Skip to content

Commit

Permalink
add IHttpClientFactory, GetCompletionAsync
Browse files Browse the repository at this point in the history
  • Loading branch information
EslaMx7 authored and vermorel committed Apr 29, 2023
1 parent 411a40c commit 8cd945a
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 3 deletions.
1 change: 1 addition & 0 deletions src/Lokad.Prompting/ICompletionClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,5 @@ public interface ICompletionClient
int GetTokenCount(string content);

string GetCompletion(string prompt);
Task<string> GetCompletionAsync(string prompt);
}
22 changes: 19 additions & 3 deletions src/Lokad.Prompting/OpenAIClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,12 @@ public class OpenAIClient : ICompletionClient

readonly GptEncoding _encoding;

public OpenAIClient(string apiKey, Model? model = null, double temperature = 0.0)
readonly int _maxTokens = 2049;

public OpenAIClient(string apiKey, Model? model = null, double temperature = 0.0, IHttpClientFactory httpClientFactory = null)
{
_api = new OpenAIAPI(apiKey);
_api.HttpClientFactory = httpClientFactory; // HINT: allow creating custom HttpClient (can be used to increase default Timeout)
_model = model ?? Model.DavinciText;
_temperature = temperature;
_encoding = GptEncoding.GetEncoding("cl100k_base");
Expand All @@ -33,11 +36,24 @@ public int GetTokenCount(string content)

public string GetCompletion(string prompt)
{
return _api.Completions
return _api.Completions
.CreateCompletionAsync(
new CompletionRequest(prompt,
model: _model,
temperature: _temperature,
max_tokens: 2049 /* HACK: this should not be hard-coded */)).Result.ToString();
max_tokens: _maxTokens
)).GetAwaiter().GetResult().ToString(); // HINT: we use GetAwaiter().GetResult() to avoid async/await also better than .Result
}

public async Task<string> GetCompletionAsync(string prompt)
{
var completionResult = await _api.Completions
.CreateCompletionAsync(
new CompletionRequest(prompt,
model: _model,
temperature: _temperature,
max_tokens: _maxTokens
));
return completionResult.ToString();
}
}

0 comments on commit 8cd945a

Please sign in to comment.