main
bicijinlian 2 weeks ago
parent e2fb661366
commit 3653d23ee1

@ -10,9 +10,10 @@ using Microsoft.Extensions.Hosting;
using OllamaSharp;
using OllamaStudy.Core;
using OpenAI;
using OpenAI.Chat;
using OllamaStudy.Core;
namespace OllamaStudy.UseExtensionsAI
{
@ -90,11 +91,21 @@ namespace OllamaStudy.UseExtensionsAI
var openAIClientOptions = new OpenAIClientOptions()
{
Endpoint = new Uri("http://localhost:11434/v1")
Endpoint = new Uri(new Uri(options.OllamaServerUrl),"v1")
};
return new OpenAIClient(new ApiKeyCredential("nokey"),openAIClientOptions);
})
.AddScoped<OpenAI.Chat.ChatClient>(provider =>
{
var options = provider.GetRequiredService<IOptionsMonitor<OllamaServerOption>>().CurrentValue;
var openAIClientOptions = new OpenAIClientOptions()
{
Endpoint = new Uri(new Uri(options.OllamaServerUrl),"v1")
};
return new ChatClient(options.Model,new ApiKeyCredential("nokey"),openAIClientOptions);
});
}
#endregion

@ -5,39 +5,65 @@ using Xunit.Abstractions;
using OpenAI;
using OpenAI.Chat;
namespace OllamaStudy.UseExtensionsAI
namespace OllamaStudy.UseExtensionsAI;
/// <summary>
/// Ollama兼容OpenAI接口可以直接使用OpenAI的SDK调用
/// </summary>
public class UseOpenAITest
{
public class UseOpenAITest
private ITestOutputHelper _output;
private IOptionsMonitor<OllamaServerOption> _ollamaOptionsMonitor;
private OpenAIClient _defaultOpenAIClient;
private ChatClient _chatClient;
public UseOpenAITest(ITestOutputHelper outputHelper, OpenAIClient defaultOpenAIClient, IOptionsMonitor<OllamaServerOption> ollamaOptionsMonitor)
{
private ITestOutputHelper _output;
public UseOpenAITest(ITestOutputHelper outputHelper)
{
_output = outputHelper;
}
[Fact]
public void OpenAI_Test()
{
ChatClient openAIClient = new ChatClient
(
model: ModelSelecter.ModelWithTool,
credential: new ApiKeyCredential("localhost"),
options: new OpenAIClientOptions()
{
Endpoint = new Uri("http://localhost:11434/v1"),
}
);
var mesages = new List<ChatMessage>()
{
ChatMessage.CreateUserMessage(new ChatMessageContent("你好")),
};
ClientResult<ChatCompletion> result = openAIClient.CompleteChat(mesages);
_output.WriteLine(result.Value.Content[0].Text);
}
_output = outputHelper;
_defaultOpenAIClient = defaultOpenAIClient;
_ollamaOptionsMonitor = ollamaOptionsMonitor;
_chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
}
/// <summary>
/// 从OpenAIClient获取各种业务Client
/// </summary>
[Fact]
public void GetClients_Test()
{
Assert.NotNull(_defaultOpenAIClient);
//音频客户端
var audioClient = _defaultOpenAIClient.GetAudioClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(audioClient);
//聊天客户端
var chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(chatClient);
//自动补全客户端
var completionClient = _defaultOpenAIClient;
Assert.NotNull(completionClient);
//模型客户端
var modelClient = _defaultOpenAIClient.GetOpenAIModelClient();
Assert.NotNull(modelClient);
}
#region 模型
/// <summary>
/// 列出模型 测试
/// </summary>
[Fact]
public void List_Models_Test()
{
var modelClient = _defaultOpenAIClient.GetOpenAIModelClient();
OpenAI.Models.OpenAIModelCollection openAIModelCollection = modelClient.GetModels().Value;
_output.WriteLine($"Ollama服务中共有{openAIModelCollection.Count()}个模型,包括[{string.Join(",",openAIModelCollection)}]");
}
#endregion
}

@ -1,11 +0,0 @@
namespace OllamaStudy.UseSemanticKernel
{
public class UnitTest1
{
[Fact]
public void Test1()
{
}
}
}
Loading…
Cancel
Save