main
bicijinlian 4 days ago
parent 61320dc936
commit 9614ea2c9d

@ -62,5 +62,6 @@ global using Xunit.Sdk;
global using Xunit.Extensions;
global using Xunit.Abstractions;
global using Xunit.DependencyInjection;
global using Moq;
global using OllamaStudy.Core;

@ -19,6 +19,7 @@
<PackageReference Include="Microsoft.Extensions.AI" Version="9.7.1" />
<PackageReference Include="Microsoft.Extensions.AI.OpenAI" Version="9.7.1-preview.1.25365.4" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" />
<PackageReference Include="Moq" Version="4.20.72" />
<PackageReference Include="OllamaSharp" Version="5.3.4" />
<PackageReference Include="OllamaSharp.ModelContextProtocol" Version="5.3.4" />
<PackageReference Include="OpenAI" Version="2.3.0" />

@ -1,10 +1,4 @@
using System.Net.Sockets;
using System.Threading.Tasks;
using OpenAI;
using OpenAI.Responses;
namespace OllamaStudy.UseExtensionsAI;
namespace OllamaStudy.UseExtensionsAI;
/// <summary>
/// Ollama兼容OpenAI接口可以直接使用OpenAI的SDK调用
@ -661,7 +655,8 @@ public class OpenAISdkTest
/// 在此示例中,您有一个包含不同产品每月销售信息的 JSON 文档,并且您希望构建一个能够分析它并回答有关它的问题的助手
/// 请同时使用 OpenAI.Files 命名空间中的 OpenAIFileClient 和 OpenAI.Assistants 命名空间中的 AssistantClient
/// </summary>
[Fact]
//[Fact]
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
public void RAG_Assistant_ChatClient_Test()
{
#pragma warning disable OPENAI001
@ -798,7 +793,7 @@ public class OpenAISdkTest
}
//它会产生这样的结果
var output =
_ =
"""
[USER]: How well did product 113045 sell in February? Graph its trend over time.
@ -824,9 +819,171 @@ public class OpenAISdkTest
#endregion
#region 如何将助手与流媒体和视觉结合使用
//演示如何使用 v2助手 API 向助手提供图像数据,然后流式传输运行的响应。
//[Fact]
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
public void Streaming_Vision_AssistantsClient()
{
#pragma warning disable OPENAI001
OpenAIClientOptions clientOptions = new()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1"),
};
OpenAIClient openAIClient = new(new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"));
OpenAIFileClient fileClient = openAIClient.GetOpenAIFileClient();
AssistantClient assistantClient = openAIClient.GetAssistantClient();
OpenAIFile pictureOfAppleFile = fileClient.UploadFile
(
Path.Combine("Assets", "images_apple.png"),
FileUploadPurpose.Vision
);
Uri linkToPictureOfOrange = new("https://raw.githubusercontent.com/openai/openai-dotnet/refs/heads/main/examples/Assets/images_orange.png");
//接下来,创建一个具有视觉支持模型(如 gpt-4o的新助手和一个包含引用图像信息的线程
Assistant assistant = assistantClient.CreateAssistant
(
"gpt-4o",
new AssistantCreationOptions()
{
Instructions = "When asked a question, attempt to answer very concisely. Prefer one-sentence answers whenever feasible."
}
);
AssistantThread thread = assistantClient.CreateThread
(
new ThreadCreationOptions()
{
InitialMessages =
{
new ThreadInitializationMessage(
OpenAI.Assistants.MessageRole.User,
[
"Hello, assistant! Please compare these two images for me:",
MessageContent.FromImageFileId(pictureOfAppleFile.Id),
MessageContent.FromImageUri(linkToPictureOfOrange),
]),
}
}
);
CollectionResult<StreamingUpdate> streamingUpdates = assistantClient.CreateRunStreaming
(
thread.Id,
assistant.Id,
new RunCreationOptions()
{
AdditionalInstructions = "When possible, try to sneak in puns if you're asked to compare things.",
}
);
foreach (StreamingUpdate streamingUpdate in streamingUpdates)
{
if (streamingUpdate.UpdateKind == StreamingUpdateReason.RunCreated)
{
Console.WriteLine($"--- Run started! ---");
}
if (streamingUpdate is MessageContentUpdate contentUpdate)
{
Console.Write(contentUpdate.Text);
}
}
#pragma warning restore OPENAI001
}
#endregion
#region 高级方案
/// <summary>
/// 使用协议方法
/// 除了使用强类型请求和响应对象的客户端方法外,.NET 库还提供协议方法 ,以便更直接地访问 REST API。
/// 协议方法是“二进制输入,二进制输出”,接受 BinaryContent 作为请求体,并提供 BinaryData 作为响应体。
/// </summary>
[Fact]
public void Using_Protocol_Methods_Test()
{
//若要使用 ChatClient 的 CompleteChat 方法的协议方法变体,请将请求正文作为 BinaryContent 传递
//ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
ChatClient client = _defaultOpenAIClient.GetChatClient(ModelSelecter.ModelWithToolAndThing);
BinaryData input = BinaryData.FromBytes("""
{
"model": "gemma3:4b",
"messages": [
{
"role": "user",
"content": "Say 'this is a test.'"
}
]
}
"""u8.ToArray());
using BinaryContent content = BinaryContent.Create(input);
ClientResult result = client.CompleteChat(content);
BinaryData output = result.GetRawResponse().Content;
using JsonDocument outputAsJson = JsonDocument.Parse(output.ToString());
string message = outputAsJson.RootElement
.GetProperty("choices"u8)[0]
.GetProperty("message"u8)
.GetProperty("content"u8)
.GetString() ?? "";
_output.WriteLine($"[ASSISTANT]: {message}");
//请注意,然后如何调用生成的 ClientResult 的 GetRawResponse 方法,并通过 PipelineResponse 的 Content 属性将响应正文检索为 BinaryData。
}
/// <summary>
/// 模拟客户端
/// OpenAI .NET 库旨在支持模拟,所有命名空间都有相应的模型工厂来支持模拟,但 OpenAI.Assistants 和 OpenAI.VectorStores 命名空间除外,模型工厂即将推出。
/// </summary>
[Fact]
public void MocK_Client_Test()
{
#pragma warning disable OPENAI001
Mock<AudioClient> mockClient = new();
Mock<ClientResult<AudioTranscription>> mockResult = new(new List<object>(), Mock.Of<System.ClientModel.Primitives.PipelineResponse>());
AudioTranscription transcription = OpenAIAudioModelFactory.AudioTranscription(text: "I swear I saw an apple flying yesterday!");
// Set up mocks' properties and methods.
mockResult
.SetupGet(result => result.Value)
.Returns(transcription);
mockClient
.Setup
(client => client.TranscribeAudio
(
It.IsAny<string>(),
It.IsAny<AudioTranscriptionOptions>()
)
)
.Returns(mockResult.Object);
// Perform validation.
AudioClient client = mockClient.Object;
bool containsSecretWord = ContainsSecretWord(client, "<audioFilePath>", "apple");
Assert.True(containsSecretWord);
bool ContainsSecretWord(AudioClient client, string audioFilePath, string secretWord)
{
AudioTranscription transcription = client.TranscribeAudio(audioFilePath);
return transcription.Text.Contains(secretWord);
}
#pragma warning restore OPENAI001
}
#endregion
#region 私有方法

@ -7,10 +7,6 @@
<IsPackable>false</IsPackable>
</PropertyGroup>
<ItemGroup>
<Compile Remove="ModelSelecter.cs" />
</ItemGroup>
<ItemGroup>
<PackageReference Include="coverlet.collector" Version="6.0.4">
<PrivateAssets>all</PrivateAssets>

Loading…
Cancel
Save