Skip to content

Commit

Permalink
GitAuto: [FEATURE] Handle OpenAI downtime and large requests (#151)
Browse files Browse the repository at this point in the history
* Update README.md

* Update Src/AiCommitMessage/Services/GenerateCommitMessageService.cs

* Update Tests/AiCommitMessage.Tests/Services/GenerateCommitMessageServiceTests.cs

* Update GenerateCommitMessageService.cs

* Update GenerateCommitMessageService.cs

* Update README.md

* #149 fix - Update validation for staged changes size limit

* #149 feat - add new functionality for Azure and OpenAI generation

* #149 docs - added XML documentation comments to methods

---------

Co-authored-by: gitauto-ai[bot] <161652217+gitauto-ai[bot]@users.noreply.github.com>
Co-authored-by: gstraccini[bot] <150967461+gstraccini[bot]@users.noreply.github.com>
Co-authored-by: Guilherme Branco Stracini <guilherme@guilhermebranco.com.br>
  • Loading branch information
3 people authored Jan 6, 2025
1 parent e624aac commit 924eac8
Show file tree
Hide file tree
Showing 4 changed files with 128 additions and 38 deletions.
2 changes: 1 addition & 1 deletion .config/dotnet-tools.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"isRoot": true,
"tools": {
"csharpier": {
"version": "0.30.4",
"version": "0.30.5",
"commands": [
"dotnet-csharpier"
],
Expand Down
6 changes: 5 additions & 1 deletion .vscode/settings.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,5 +18,9 @@
"titleBar.inactiveBackground": "#972b7099",
"titleBar.inactiveForeground": "#e7e7e799"
},
"peacock.color": "#972b70"
"peacock.color": "#972b70",
"sonarlint.connectedMode.project": {
"connectionId": "guibranco",
"projectKey": "guibranco_dotnet-aicommitmessage"
}
}
136 changes: 100 additions & 36 deletions Src/AiCommitMessage/Services/GenerateCommitMessageService.cs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using System.ClientModel;
using System.Diagnostics;
using System.Text;
using System.Text.Json;
using System.Text.RegularExpressions;
using AiCommitMessage.Options;
Expand Down Expand Up @@ -59,6 +60,13 @@ public string GenerateCommitMessage(GenerateCommitMessageOptions options)
return message;
}

if (Encoding.UTF8.GetByteCount(diff) > 10240)
{
throw new InvalidOperationException(
"🚫 The staged changes are too large to process. Please reduce the number of files or size of changes and try again."
);
}

if (string.IsNullOrEmpty(branch) && string.IsNullOrEmpty(diff))
{
throw new InvalidOperationException(
Expand All @@ -80,6 +88,16 @@ public string GenerateCommitMessage(GenerateCommitMessageOptions options)
return GenerateWithModel(model, formattedMessage, branch, message, options.Debug);
}

/// <summary>
/// Generates a commit message using the specified model.
/// </summary>
/// <param name="model">The name of the model to use for generating the commit message.</param>
/// <param name="formattedMessage">The formatted message to be used as input for the model.</param>
/// <param name="branch">The branch name associated with the commit.</param>
/// <param name="message">The original commit message.</param>
/// <param name="debug">A flag indicating whether to save debug information.</param>
/// <returns>The generated commit message.</returns>
/// <exception cref="NotSupportedException">Thrown when the specified model is not supported.</exception>
private static string GenerateWithModel(
string model,
string formattedMessage,
Expand All @@ -92,32 +110,74 @@ bool debug

if (model.Equals("llama-3-1-405B-Instruct", StringComparison.OrdinalIgnoreCase))
{
var endpoint = new Uri(EnvironmentLoader.LoadLlamaApiUrl());
var credential = new AzureKeyCredential(EnvironmentLoader.LoadLlamaApiKey());
text = GenerateUsingAzureAi(formattedMessage);
}
else if (model.Equals("gpt-4o-mini", StringComparison.OrdinalIgnoreCase))
{
text = GenerateUsingOpenAi(formattedMessage);
}
else
{
throw new NotSupportedException($"Model '{model}' is not supported.");
}

var client = new ChatCompletionsClient(
endpoint,
credential,
new AzureAIInferenceClientOptions()
);
text = ProcessGeneratedMessage(text, branch, message);

var requestOptions = new ChatCompletionsOptions
{
Messages =
{
new ChatRequestSystemMessage(Constants.SystemMessage),
new ChatRequestUserMessage(formattedMessage),
},
Temperature = 1.0f,
NucleusSamplingFactor = 1.0f,
MaxTokens = 1000,
Model = "Meta-Llama-3.1-405B-Instruct",
};

var response = client.Complete(requestOptions);
text = response.Value.Content;
if (!debug)
{
return text;
}
else if (model.Equals("gpt-4o-mini", StringComparison.OrdinalIgnoreCase))

SaveDebugInfo(text);

return text;
}

/// <summary>
/// Generates a commit message using the Azure AI API.
/// </summary>
/// <param name="formattedMessage">The formatted message to be sent to the Azure AI API.</param>
/// <returns>The generated commit message.</returns>
private static string GenerateUsingAzureAi(string formattedMessage)
{
string text;
var endpoint = new Uri(EnvironmentLoader.LoadLlamaApiUrl());
var credential = new AzureKeyCredential(EnvironmentLoader.LoadLlamaApiKey());

var client = new ChatCompletionsClient(
endpoint,
credential,
new AzureAIInferenceClientOptions()
);

var requestOptions = new ChatCompletionsOptions
{
Messages =
{
new ChatRequestSystemMessage(Constants.SystemMessage),
new ChatRequestUserMessage(formattedMessage),
},
Temperature = 1.0f,
NucleusSamplingFactor = 1.0f,
MaxTokens = 1000,
Model = "Meta-Llama-3.1-405B-Instruct",
};

var response = client.Complete(requestOptions);
text = response.Value.Content;
return text;
}

/// <summary>
/// Generates a commit message using the OpenAI API.
/// </summary>
/// <param name="formattedMessage">The formatted message to be sent to the OpenAI API.</param>
/// <returns>The generated commit message.</returns>
/// <exception cref="InvalidOperationException">Thrown when the OpenAI API is unavailable.</exception>
private static string GenerateUsingOpenAi(string formattedMessage)
{
string text;
try
{
var apiUrl = EnvironmentLoader.LoadOpenAiApiUrl();
var apiKey = EnvironmentLoader.LoadOpenAiApiKey();
Expand All @@ -135,23 +195,24 @@ bool debug

text = chatCompletion.Value.Content[0].Text;
}
else
{
throw new NotSupportedException($"Model '{model}' is not supported.");
}

text = ProcessGeneratedMessage(text, branch, message);

if (!debug)
catch (Exception ex) when (ex is HttpRequestException || ex is TaskCanceledException)
{
return text;
throw new InvalidOperationException(
"⚠️ OpenAI API is currently unavailable. Please try again later."
);
}

SaveDebugInfo(text);

return text;
}

/// <summary>
/// Service for generating commit messages using AI models.
/// </summary>
/// <remarks>
/// This service provides functionality to generate commit messages based on provided options and the OpenAI API.
/// It includes methods to detect merge conflict resolution messages, generate commit messages using different AI models,
/// and process the generated messages to include additional information such as issue numbers or version bump commands.
/// </remarks>
private static string ProcessGeneratedMessage(string text, string branch, string message)
{
if (text.Length >= 7 && text[..7] == "type - ")
Expand Down Expand Up @@ -186,6 +247,10 @@ private static string ProcessGeneratedMessage(string text, string branch, string
return text;
}

/// <summary>
/// Saves the provided debug information to a JSON file named "debug.json".
/// </summary>
/// <param name="text">The debug information to be saved.</param>
private static void SaveDebugInfo(string text)
{
var json = JsonSerializer.Serialize(new { DebugInfo = text });
Expand All @@ -210,8 +275,7 @@ private static GitProvider GetGitProvider()
UseShellExecute = false,
CreateNoWindow = true,
};
using var process = new Process();
process.StartInfo = processStartInfo;
using var process = new Process { StartInfo = processStartInfo };
process.Start();
var originUrl = process.StandardOutput.ReadToEnd();
process.WaitForExit();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,28 @@ public void GenerateCommitMessage_Should_ReturnMessage_When_MergeConflictResolut
result.Should().Be("Merge branch 'feature/test' into main");
}

[Fact]
public void GenerateCommitMessage_Should_ThrowException_When_DiffExceedsLimit()
{
// Arrange
var options = new GenerateCommitMessageOptions
{
Branch = "feature/test",
Diff = new string('a', 10241), // 10 KB + 1 byte
Message = "Test message",
};

// Act
Action act = () => _service.GenerateCommitMessage(options);

// Assert
act.Should()
.Throw<InvalidOperationException>()
.WithMessage(
"🚫 The staged changes are too large to process. Please reduce the number of files or size of changes and try again."
);
}

//[Fact]
//public void GenerateCommitMessage_Should_IncludeBranchAndDiff_When_Provided()
//{
Expand Down

0 comments on commit 924eac8

Please sign in to comment.