main
bicijinlian 3 weeks ago
parent e2fb661366
commit 3653d23ee1

@ -10,9 +10,10 @@ using Microsoft.Extensions.Hosting;
using OllamaSharp; using OllamaSharp;
using OllamaStudy.Core;
using OpenAI; using OpenAI;
using OpenAI.Chat;
using OllamaStudy.Core;
namespace OllamaStudy.UseExtensionsAI namespace OllamaStudy.UseExtensionsAI
{ {
@ -90,11 +91,21 @@ namespace OllamaStudy.UseExtensionsAI
var openAIClientOptions = new OpenAIClientOptions() var openAIClientOptions = new OpenAIClientOptions()
{ {
Endpoint = new Uri("http://localhost:11434/v1") Endpoint = new Uri(new Uri(options.OllamaServerUrl),"v1")
}; };
return new OpenAIClient(new ApiKeyCredential("nokey"),openAIClientOptions); return new OpenAIClient(new ApiKeyCredential("nokey"),openAIClientOptions);
})
.AddScoped<OpenAI.Chat.ChatClient>(provider =>
{
var options = provider.GetRequiredService<IOptionsMonitor<OllamaServerOption>>().CurrentValue;
var openAIClientOptions = new OpenAIClientOptions()
{
Endpoint = new Uri(new Uri(options.OllamaServerUrl),"v1")
};
return new ChatClient(options.Model,new ApiKeyCredential("nokey"),openAIClientOptions);
}); });
} }
#endregion #endregion

@ -5,39 +5,65 @@ using Xunit.Abstractions;
using OpenAI; using OpenAI;
using OpenAI.Chat; using OpenAI.Chat;
namespace OllamaStudy.UseExtensionsAI namespace OllamaStudy.UseExtensionsAI;
{
/// <summary>
/// Ollama兼容OpenAI接口可以直接使用OpenAI的SDK调用
/// </summary>
public class UseOpenAITest public class UseOpenAITest
{ {
private ITestOutputHelper _output; private ITestOutputHelper _output;
private IOptionsMonitor<OllamaServerOption> _ollamaOptionsMonitor;
private OpenAIClient _defaultOpenAIClient;
private ChatClient _chatClient;
public UseOpenAITest(ITestOutputHelper outputHelper) public UseOpenAITest(ITestOutputHelper outputHelper, OpenAIClient defaultOpenAIClient, IOptionsMonitor<OllamaServerOption> ollamaOptionsMonitor)
{ {
_output = outputHelper; _output = outputHelper;
_defaultOpenAIClient = defaultOpenAIClient;
_ollamaOptionsMonitor = ollamaOptionsMonitor;
_chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
} }
/// <summary>
/// 从OpenAIClient获取各种业务Client
/// </summary>
[Fact] [Fact]
public void OpenAI_Test() public void GetClients_Test()
{
ChatClient openAIClient = new ChatClient
(
model: ModelSelecter.ModelWithTool,
credential: new ApiKeyCredential("localhost"),
options: new OpenAIClientOptions()
{ {
Endpoint = new Uri("http://localhost:11434/v1"), Assert.NotNull(_defaultOpenAIClient);
//音频客户端
var audioClient = _defaultOpenAIClient.GetAudioClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(audioClient);
//聊天客户端
var chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(chatClient);
//自动补全客户端
var completionClient = _defaultOpenAIClient;
Assert.NotNull(completionClient);
//模型客户端
var modelClient = _defaultOpenAIClient.GetOpenAIModelClient();
Assert.NotNull(modelClient);
} }
);
var mesages = new List<ChatMessage>() #region 模型
/// <summary>
/// 列出模型 测试
/// </summary>
[Fact]
public void List_Models_Test()
{ {
ChatMessage.CreateUserMessage(new ChatMessageContent("你好")), var modelClient = _defaultOpenAIClient.GetOpenAIModelClient();
};
ClientResult<ChatCompletion> result = openAIClient.CompleteChat(mesages); OpenAI.Models.OpenAIModelCollection openAIModelCollection = modelClient.GetModels().Value;
_output.WriteLine(result.Value.Content[0].Text); _output.WriteLine($"Ollama服务中共有{openAIModelCollection.Count()}个模型,包括[{string.Join(",",openAIModelCollection)}]");
}
} }
#endregion
} }

@ -1,11 +0,0 @@
namespace OllamaStudy.UseSemanticKernel
{
public class UnitTest1
{
[Fact]
public void Test1()
{
}
}
}
Loading…
Cancel
Save