You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

1034 lines
39 KiB
C#

This file contains ambiguous Unicode characters!

This file contains ambiguous Unicode characters that may be confused with others in your current locale. If your use case is intentional and legitimate, you can safely ignore this warning. Use the Escape button to highlight these characters.

namespace OllamaStudy.UseExtensionsAI;
/// <summary>
/// Ollama兼容OpenAI接口可以直接使用OpenAI的SDK调用
/// </summary>
public class OpenAISdkTest
{
private ITestOutputHelper _output;
private IOptionsMonitor<OllamaServerOption> _ollamaOptionsMonitor;
private OpenAIClient _defaultOpenAIClient;
private ChatClient _singtonChatClient;
private OpenAIClient _uiUiApiClient;
private OpenAIClient _zipuApiClient;
public OpenAISdkTest
(
ITestOutputHelper outputHelper,
OpenAIClient defaultOpenAIClient,
IOptionsMonitor<OllamaServerOption> ollamaOptionsMonitor,
//使用了FromKeyedServices特性所以需要使用IKeyedServiceCollection注册服务
[FromKeyedServices("OpenAIChatClient")]ChatClient singtonChatClient,
[FromKeyedServices("UiUiAPIClient")]OpenAIClient uiUiApiClient,
[FromKeyedServices("ZipuAPIClient")]OpenAIClient zipuApiClient
)
{
_output = outputHelper;
_defaultOpenAIClient = defaultOpenAIClient;
_ollamaOptionsMonitor = ollamaOptionsMonitor;
_singtonChatClient = singtonChatClient;
_uiUiApiClient = uiUiApiClient;
_zipuApiClient = zipuApiClient;
}
#region 使用客户端库
/// <summary>
/// 从OpenAIClient获取各种业务Client
/// </summary>
[Fact]
public void GetClients_Test()
{
#pragma warning disable OPENAI001
Assert.NotNull(_defaultOpenAIClient);
//音频客户端
var audioClient = _defaultOpenAIClient.GetAudioClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(audioClient);
//聊天客户端
var chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(chatClient);
//嵌入客户端
var embeddingClient = _defaultOpenAIClient.GetEmbeddingClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(embeddingClient);
//图像客户端
var imageClient = _defaultOpenAIClient.GetImageClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(imageClient);
//微调客户端
var moderationClient = _defaultOpenAIClient.GetModerationClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(moderationClient);
//文件客户端
var openAIFileClient = _defaultOpenAIClient.GetOpenAIFileClient();
Assert.NotNull(openAIFileClient);
//模型客户端
var modelClient = _defaultOpenAIClient.GetOpenAIModelClient();
Assert.NotNull(modelClient);
//助手客户端(仅评估)
var assistantClient = _defaultOpenAIClient.GetAssistantClient();
Assert.NotNull(assistantClient);
//批量客户端(仅评估)
var batchClient = _defaultOpenAIClient.GetBatchClient();
Assert.NotNull(batchClient);
//评估客户端(仅评估)
var evaluationClient = _defaultOpenAIClient.GetEvaluationClient();
Assert.NotNull(evaluationClient);
//微调客户端(仅评估)
var FineTuningClient = _defaultOpenAIClient.GetFineTuningClient();
Assert.NotNull(FineTuningClient);
//响应客户端(仅评估)
var openAIResponseClient = _defaultOpenAIClient.GetOpenAIResponseClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(openAIResponseClient);
//实时客户端(仅评估)
#pragma warning disable OPENAI002
var realtimeClient = _defaultOpenAIClient.GetRealtimeClient();
Assert.NotNull(realtimeClient);
#pragma warning restore OPENAI002
//向量存储客户端(仅评估)
var vectorStoreClient = _defaultOpenAIClient.GetVectorStoreClient();
Assert.NotNull(vectorStoreClient);
#pragma warning restore OPENAI001
}
/// <summary>
/// 自定义URL和API密钥
/// </summary>
[Fact]
public void Custom_OpenAIClient_Test()
{
var option = new OpenAIClientOptions()
{
OrganizationId = "TianyiJituan",
ProjectId = "StudyProject",
Endpoint = new Uri("http://localhost:11434/v1")
};
//本地Ollama服务不需要API密钥(随便填写)
var openAIClient = new OpenAIClient(new ApiKeyCredential("nokey"), option);
var chatClient = openAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
Assert.NotNull(openAIClient);
Assert.NotNull(chatClient);
}
/// <summary>
/// 自定义URL和API密钥
/// </summary>
[Fact]
public void Custom_ChatClient_Test()
{
var option = new OpenAIClientOptions()
{
OrganizationId = "TianyiJituan",
ProjectId = "StudyProject",
UserAgentApplicationId = "StudyAgentApp",
Endpoint = new Uri("http://localhost:11434/v1"),
};
var chatClient = new ChatClient(_ollamaOptionsMonitor.CurrentValue.Model,new ApiKeyCredential("nokey"),option);
Assert.NotNull(chatClient);
}
/// <summary>
/// 使用异步API
/// 每个客户端方法在同一客户端类中都有一个异步变体
/// </summary>
[Fact]
public async Task UseAsyncAPI_Test()
{
ChatClient chatClient = _defaultOpenAIClient.GetChatClient(_ollamaOptionsMonitor.CurrentValue.Model);
ClientResult<ChatCompletion> result = await chatClient.CompleteChatAsync("你好,请问河南的省会是什么?");
var responseText = result.Value.Content.First().Text;
_output.WriteLine(responseText);
Assert.NotNull(result);
Assert.Contains("郑州",responseText);
}
#endregion
#region 如何使用依赖注入
/// <summary>
/// OpenAI 客户端是线程安全的。可以在DI中安全地注册为单例.
/// 这最大限度地提高了资源效率和 HTTP 连接重用。
/// </summary>
[Fact]
public void Singleton_ChatClient_Test()
{
var result = _singtonChatClient.CompleteChat("你好");
var responseText = result.Value.Content.First().Text;
_output.WriteLine(responseText);
Assert.NotNull(result);
}
#endregion
#region 如何将聊天完成与流式处理一起使用
/// <summary>
/// 使用同步流式处理API可以立即收到响应而无需等待模型完成。
/// </summary>
[Fact]
public void Streamimg_ChatClient_Test()
{
CollectionResult<StreamingChatCompletionUpdate> result = _singtonChatClient.CompleteChatStreaming("你好");
var stringBuilder = new StringBuilder(500);
foreach (StreamingChatCompletionUpdate completionUpdate in result)
{
if (completionUpdate.ContentUpdate.Count > 0)
{
stringBuilder.Append(completionUpdate.ContentUpdate[0].Text);
}
}
_output.WriteLine(stringBuilder.ToString());
}
/// <summary>
/// 使用异步流式处理API
/// </summary>
[Fact]
public async Task Singleton_Async_ChatClient_Test()
{
var result = _singtonChatClient.CompleteChatStreamingAsync("你好");
var stringBuilder = new StringBuilder(500);
await foreach (StreamingChatCompletionUpdate completionUpdate in result)
{
if (completionUpdate.ContentUpdate.Count > 0)
{
stringBuilder.Append(completionUpdate.ContentUpdate[0].Text);
}
}
_output.WriteLine(stringBuilder.ToString());
}
#endregion
#region 如何将聊天完成与工具和函数调用一起使用
/// <summary>
/// 调用工具和函数
/// </summary>
[Fact]
public void Use_FunctionCalling_ChatClient_Test()
{
ChatTool getCurrentLocationTool = ChatTool.CreateFunctionTool
(
functionName: nameof(GetCurrentLocation),
functionDescription: "Get the user's current location"
);
ChatTool getCurrentWeatherTool = ChatTool.CreateFunctionTool
(
functionName: nameof(GetCurrentWeather),
functionDescription: "Get the current weather in a given location",
functionParameters: BinaryData.FromBytes("""
{
"type": "object",
"properties": {
"location": {
"type": "string",
"description": "The city and state, e.g. Boston, MA"
},
"unit": {
"type": "string",
"enum": [ "celsius", "fahrenheit" ],
"description": "The temperature unit to use. Infer this from the specified location."
}
},
"required": [ "location" ]
}
"""u8.ToArray())
);
List<OpenAI.Chat.ChatMessage> messages = [new UserChatMessage("What's the weather like beijing today?"),];
ChatCompletionOptions options = new()
{
Tools = { getCurrentLocationTool, getCurrentWeatherTool },
};
bool requiresAction = false;
do //实质上是手动调用函数
{
requiresAction = false;
ChatCompletion completion = _singtonChatClient.CompleteChat(messages, options);
switch (completion.FinishReason)
{
case OpenAI.Chat.ChatFinishReason.Stop:
{
// Add the assistant message to the conversation history.
messages.Add(new AssistantChatMessage(completion));
//输出
foreach (var message in messages)
{
_output.WriteLine(message.Content.First().Text);
}
break;
}
case OpenAI.Chat.ChatFinishReason.ToolCalls:
{
// First, add the assistant message with tool calls to the conversation history.
messages.Add(new AssistantChatMessage(completion));
// Then, add a new tool message for each tool call that is resolved.
foreach (ChatToolCall toolCall in completion.ToolCalls)
{
switch (toolCall.FunctionName)
{
case nameof(GetCurrentLocation):
{
string toolResult = GetCurrentLocation();
messages.Add(new ToolChatMessage(toolCall.Id, toolResult));
break;
}
case nameof(GetCurrentWeather):
{
// The arguments that the model wants to use to call the function are specified as a
// stringified JSON object based on the schema defined in the tool definition. Note that
// the model may hallucinate arguments too. Consequently, it is important to do the
// appropriate parsing and validation before calling the function.
using JsonDocument argumentsJson = JsonDocument.Parse(toolCall.FunctionArguments);
bool hasLocation = argumentsJson.RootElement.TryGetProperty("location", out JsonElement location);
bool hasUnit = argumentsJson.RootElement.TryGetProperty("unit", out JsonElement unit);
if (!hasLocation)
{
throw new ArgumentNullException(nameof(location), "The location argument is required.");
}
string toolResult = hasUnit
? GetCurrentWeather(location.GetString() ?? "", unit.GetString() ?? "")
: GetCurrentWeather(location.GetString() ?? "");
messages.Add(new ToolChatMessage(toolCall.Id, toolResult));
break;
}
default:
{
// Handle other unexpected calls.
throw new NotImplementedException();
}
}
}
requiresAction = true;
break;
}
case OpenAI.Chat.ChatFinishReason.Length:
throw new NotImplementedException("Incomplete model output due to MaxTokens parameter or token limit exceeded.");
case OpenAI.Chat.ChatFinishReason.ContentFilter:
throw new NotImplementedException("Omitted content due to a content filter flag.");
case OpenAI.Chat.ChatFinishReason.FunctionCall:
throw new NotImplementedException("Deprecated in favor of tool calls.");
default:
throw new NotImplementedException(completion.FinishReason.ToString());
}
} while (requiresAction);
}
#endregion
#region 如何将聊天完成与结构化输出一起使用
[Fact]
public void StructuredOutputs_ChatClient_Test()
{
List<OpenAI.Chat.ChatMessage> messages =[new UserChatMessage("How can I solve 8x + 7 = -23?"),];
ChatCompletionOptions options = new()
{
ResponseFormat = OpenAI.Chat.ChatResponseFormat.CreateJsonSchemaFormat(
jsonSchemaFormatName: "math_reasoning",
jsonSchema: BinaryData.FromBytes("""
{
"type": "object",
"properties": {
"steps": {
"type": "array",
"items": {
"type": "object",
"properties": {
"explanation": { "type": "string" },
"output": { "type": "string" }
},
"required": ["explanation", "output"],
"additionalProperties": false
}
},
"final_answer": { "type": "string" }
},
"required": ["steps", "final_answer"],
"additionalProperties": false
}
"""u8.ToArray()),
jsonSchemaIsStrict: true)
};
ChatCompletion completion = _singtonChatClient.CompleteChat(messages, options);
using JsonDocument structuredJson = JsonDocument.Parse(completion.Content[0].Text);
_output.WriteLine($"Final answer: {structuredJson.RootElement.GetProperty("final_answer")}");
_output.WriteLine("Reasoning steps:");
foreach (JsonElement stepElement in structuredJson.RootElement.GetProperty("steps").EnumerateArray())
{
_output.WriteLine($" - Explanation: {stepElement.GetProperty("explanation")}");
_output.WriteLine($" Output: {stepElement.GetProperty("output")}");
}
}
#endregion
#region 如何将聊天完成与音频一起使用
/// <summary>
/// 生成语音
/// </summary>
//[Fact]
[Fact(Skip ="因本地Ollama测试环境不支持OpenAI音频接口忽略测试")]
//[Fact]
public void GenerateSpeech_AudioClient_Test()
{
var aiClientOption = new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
};
AudioClient client = new("tts-1-1106", new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"), aiClientOption);
string input = """
使湿
""";
BinaryData speech = client.GenerateSpeech(input, GeneratedSpeechVoice.Alloy);
using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.mp3");
speech.ToStream().CopyTo(stream);
}
/// <summary>
/// 语音转文本
/// </summary>
[Fact(Skip ="因本地Ollama测试环境不支持OpenAI音频接口忽略测试")]
//[Fact]
public void AudioToText_AudioClient_Test()
{
var aiClientOption = new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
};
AudioClient client = new("whisper-1", new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"), aiClientOption);
string audioFilePath = Path.Combine(Environment.CurrentDirectory, "Assets", "yuxia.mp3");
AudioTranscription transcription = client.TranscribeAudio(audioFilePath);
_output.WriteLine($"{transcription.Text}");
}
#endregion
#region 如何将响应与流式处理和推理结合使用
[Fact(Skip ="因本地Ollama测试环境不支持忽略测试")]
public void Responses_With_Streaming_Reasoning_ChatClient_Test()
{
}
#endregion
#region 如何将响应与文件搜索一起使用
//[Fact]
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
public async Task Respones_With_FileSearch_Test()
{
#pragma warning disable OPENAI001
OpenAIResponseClient client = new
(
"gpt-4o-mini",
new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
}
);
//已经存在的文件向量IDS(提前将文件向量化)
ResponseTool fileSearchTool = ResponseTool.CreateFileSearchTool(vectorStoreIds: ["sssssssss"]);
OpenAIResponse response = await client.CreateResponseAsync
(
userInputText: "According to available files, what's the secret number?",
new ResponseCreationOptions()
{
Tools = { fileSearchTool }
}
);
foreach (ResponseItem outputItem in response.OutputItems)
{
if (outputItem is FileSearchCallResponseItem fileSearchCall)
{
Console.WriteLine($"[file_search] ({fileSearchCall.Status}): {fileSearchCall.Id}");
foreach (string query in fileSearchCall.Queries)
{
Console.WriteLine($" - {query}");
}
}
else if (outputItem is MessageResponseItem message)
{
Console.WriteLine($"[{message.Role}] {message.Content.FirstOrDefault()?.Text}");
}
}
#pragma warning restore OPENAI001
}
#endregion
#region 如何将响应与网络搜索结合使用
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
//[Fact]
public async Task WebSearch_ChatClient_Test()
{
#pragma warning disable OPENAI001
OpenAIResponseClient client = new
(
"gpt-4o-mini",
new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
}
);
OpenAIResponse response = await client.CreateResponseAsync
(
userInputText: "What's a happy news headline from today?",
new ResponseCreationOptions()
{
Tools = { ResponseTool.CreateWebSearchTool() },
}
);
foreach (ResponseItem item in response.OutputItems)
{
if (item is WebSearchCallResponseItem webSearchCall)
{
Console.WriteLine($"[Web search invoked]({webSearchCall.Status}) {webSearchCall.Id}");
}
else if (item is MessageResponseItem message)
{
Console.WriteLine($"[{message.Role}] {message.Content?.FirstOrDefault()?.Text}");
}
}
#pragma warning restore OPENAI001
}
#endregion
#region 如何生成文本嵌入
[Fact]
public void Embedding_ChatClient_Test()
{
EmbeddingClient embeddingClient = _defaultOpenAIClient.GetEmbeddingClient(ModelSelecter.ModelWithEmbedding);
string description = "Best hotel in town if you like luxury hotels. They have an amazing infinity pool, a spa,"
+ " and a really helpful concierge. The location is perfect -- right downtown, close to all the tourist"
+ " attractions. We highly recommend this hotel.";
OpenAIEmbedding embedding = embeddingClient.GenerateEmbedding(description);
ReadOnlyMemory<float> vector = embedding.ToFloats();
Assert.True(vector.Length>0);
}
#endregion
#region 如何生成图像
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
//[Fact]
public void Image_ChatClient_Test()
{
ImageClient imageClient = new
(
"dall-e-3",
new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
}
);
string prompt = """
The concept for a living room that blends Scandinavian simplicity with Japanese minimalism for a serene and cozy atmosphere.
It's a space that invites relaxation and mindfulness, with natural light and fresh air.
Using neutral tones, including colors like white, beige, gray, and black, that create a sense of harmony.
Featuring sleek wood furniture with clean lines and subtle curves to add warmth and elegance. Plants and flowers in ceramic pots adding color and life to a space.
They can serve as focal points, creating a connection with nature. Soft textiles and cushions in organic fabrics adding comfort and softness to a space.
They can serve as accents, adding contrast and texture.
""";
prompt = """
""";
ImageGenerationOptions options = new()
{
Quality = GeneratedImageQuality.High,
Size = GeneratedImageSize.W1792xH1024,
Style = GeneratedImageStyle.Vivid,
ResponseFormat = GeneratedImageFormat.Bytes
};
GeneratedImage image = imageClient.GenerateImage(prompt, options);
BinaryData bytes = image.ImageBytes;
using FileStream stream = File.OpenWrite($"{Guid.NewGuid()}.png");
bytes.ToStream().CopyTo(stream);
}
#endregion
#region 如何转录音频
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
//[Fact]
public void Tranacribe_Audio_AudioClient_Test()
{
AudioClient client = new
(
"whisper-1",
new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"),
new OpenAIClientOptions()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1")
}
);
string audioFilePath = Path.Combine("Assets", "dongdong.mp3");
AudioTranscriptionOptions options = new()
{
ResponseFormat = AudioTranscriptionFormat.Verbose,
TimestampGranularities = AudioTimestampGranularities.Word | AudioTimestampGranularities.Segment,
};
AudioTranscription transcription = client.TranscribeAudio(audioFilePath, options);
_output.WriteLine("Transcription:");
_output.WriteLine($"{transcription.Text}");
_output.WriteLine("");
_output.WriteLine($"Words:");
foreach (TranscribedWord word in transcription.Words)
{
_output.WriteLine($" {word.Word,15} : {word.StartTime.TotalMilliseconds,5:0} - {word.EndTime.TotalMilliseconds,5:0}");
}
_output.WriteLine("");
_output.WriteLine($"Segments:");
foreach (TranscribedSegment segment in transcription.Segments)
{
_output.WriteLine($" {segment.Text,90} : {segment.StartTime.TotalMilliseconds,5:0} - {segment.EndTime.TotalMilliseconds,5:0}");
}
}
#endregion
#region 如何将助手与检索增强生成 RAG 结合使用
/// <summary>
/// 在此示例中,您有一个包含不同产品每月销售信息的 JSON 文档,并且您希望构建一个能够分析它并回答有关它的问题的助手
/// 请同时使用 OpenAI.Files 命名空间中的 OpenAIFileClient 和 OpenAI.Assistants 命名空间中的 AssistantClient
/// </summary>
//[Fact]
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
public void RAG_Assistant_ChatClient_Test()
{
#pragma warning disable OPENAI001
OpenAIClientOptions clientOptions = new()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1"),
};
OpenAIClient openAIClient = new(new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"));
OpenAIFileClient fileClient = openAIClient.GetOpenAIFileClient();
AssistantClient assistantClient = openAIClient.GetAssistantClient();
using Stream document = BinaryData.FromBytes("""
{
"description": "This document contains the sale history data for Contoso products.",
"sales": [
{
"month": "January",
"by_product": {
"113043": 15,
"113045": 12,
"113049": 2
}
},
{
"month": "February",
"by_product": {
"113045": 22
}
},
{
"month": "March",
"by_product": {
"113045": 16,
"113055": 5
}
}
]
}
"""u8.ToArray()).ToStream();
//使用 OpenAIFileClient 的 UploadFile 方法将此文档上传到 OpenAI确保使用 FileUploadPurpose.Assistants 以允许助手稍后访问它
OpenAIFile salesFile = fileClient.UploadFile
(
document,
"monthly_sales.json",
FileUploadPurpose.Assistants
);
//使用 AssistantCreationOptions 类的实例创建新助手以对其进行自定义。
AssistantCreationOptions assistantOptions = new()
{
Name = "Example: Contoso sales RAG",
Instructions ="You are an assistant that looks up sales data and helps visualize the information based on user queries. When asked to generate a graph, chart, or other visualization, use the code interpreter tool to do so.",
Tools =
{
new FileSearchToolDefinition(),
new CodeInterpreterToolDefinition(),
},
ToolResources = new()
{
FileSearch = new()
{
NewVectorStores =
{
new VectorStoreCreationHelper([salesFile.Id]),
}
}
},
};
Assistant assistant = assistantClient.CreateAssistant("gpt-4o", assistantOptions);
//接下来,创建一个新线程。出于说明目的,可以包含询问给定产品的销售信息的初始用户消息,然后使用 AssistantClient 的 CreateThreadAndRun 方法启动它:
ThreadCreationOptions threadOptions = new()
{
InitialMessages = { "How well did product 113045 sell in February? Graph its trend over time." }
};
ThreadRun threadRun = assistantClient.CreateThreadAndRun(assistant.Id, threadOptions);
//轮询运行的状态,直到它不再排队或正在进行:
do
{
Thread.Sleep(TimeSpan.FromSeconds(1));
threadRun = assistantClient.GetRun(threadRun.ThreadId, threadRun.Id);
} while (!threadRun.Status.IsTerminal);
//如果一切顺利,运行的终端状态将为 RunStatus.Completed
//最后,可以使用 AssistantClient 的 GetMessages 方法检索与此线程关联的消息,这些消息现在包括助手对初始用户消息的响应。
//出于说明目的,您可以将消息打印到控制台,并将助手生成的任何图像保存到本地存储:
CollectionResult<ThreadMessage> messages = assistantClient.GetMessages(threadRun.ThreadId, new MessageCollectionOptions() { Order = MessageCollectionOrder.Ascending });
foreach (ThreadMessage message in messages)
{
Console.Write($"[{message.Role.ToString().ToUpper()}]: ");
foreach (MessageContent contentItem in message.Content)
{
if (!string.IsNullOrEmpty(contentItem.Text))
{
Console.WriteLine($"{contentItem.Text}");
if (contentItem.TextAnnotations.Count > 0)
{
Console.WriteLine();
}
// Include annotations, if any.
foreach (TextAnnotation annotation in contentItem.TextAnnotations)
{
if (!string.IsNullOrEmpty(annotation.InputFileId))
{
Console.WriteLine($"* File citation, file ID: {annotation.InputFileId}");
}
if (!string.IsNullOrEmpty(annotation.OutputFileId))
{
Console.WriteLine($"* File output, new file ID: {annotation.OutputFileId}");
}
}
}
if (!string.IsNullOrEmpty(contentItem.ImageFileId))
{
OpenAIFile imageInfo = fileClient.GetFile(contentItem.ImageFileId);
BinaryData imageBytes = fileClient.DownloadFile(contentItem.ImageFileId);
using FileStream stream = File.OpenWrite($"{imageInfo.Filename}.png");
imageBytes.ToStream().CopyTo(stream);
Console.WriteLine($"<image: {imageInfo.Filename}.png>");
}
}
Console.WriteLine();
}
//它会产生这样的结果
_ =
"""
[USER]: How well did product 113045 sell in February? Graph its trend over time.
[ASSISTANT]: Product 113045 sold 22 units in February4:0monthly_sales.json.
Now, I will generate a graph to show its sales trend over time.
* File citation, file ID: file-hGOiwGNftMgOsjbynBpMCPFn
[ASSISTANT]: <image: 015d8e43-17fe-47de-af40-280f25452280.png>
The sales trend for Product 113045 over the past three months shows that:
- In January, 12 units were sold.
- In February, 22 units were sold, indicating significant growth.
- In March, sales dropped slightly to 16 units.
The graph above visualizes this trend, showing a peak in sales during February.
""";
#pragma warning restore OPENAI001
}
#endregion
#region 如何将助手与流媒体和视觉结合使用
//演示如何使用 v2助手 API 向助手提供图像数据,然后流式传输运行的响应。
//[Fact]
[Fact(Skip = "因本地Ollama测试环境不支持忽略测试")]
public void Streaming_Vision_AssistantsClient()
{
#pragma warning disable OPENAI001
OpenAIClientOptions clientOptions = new()
{
Endpoint = new Uri("https://sg.uiuiapi.com/v1"),
};
OpenAIClient openAIClient = new(new ApiKeyCredential("sk-4azuOUkbzNGP22pQkND8ad1vZl7ladwBQyqGKlWWZyxYgX1L"));
OpenAIFileClient fileClient = openAIClient.GetOpenAIFileClient();
AssistantClient assistantClient = openAIClient.GetAssistantClient();
OpenAIFile pictureOfAppleFile = fileClient.UploadFile
(
Path.Combine("Assets", "images_apple.png"),
FileUploadPurpose.Vision
);
Uri linkToPictureOfOrange = new("https://raw.githubusercontent.com/openai/openai-dotnet/refs/heads/main/examples/Assets/images_orange.png");
//接下来,创建一个具有视觉支持模型(如 gpt-4o的新助手和一个包含引用图像信息的线程
Assistant assistant = assistantClient.CreateAssistant
(
"gpt-4o",
new AssistantCreationOptions()
{
Instructions = "When asked a question, attempt to answer very concisely. Prefer one-sentence answers whenever feasible."
}
);
AssistantThread thread = assistantClient.CreateThread
(
new ThreadCreationOptions()
{
InitialMessages =
{
new ThreadInitializationMessage(
OpenAI.Assistants.MessageRole.User,
[
"Hello, assistant! Please compare these two images for me:",
MessageContent.FromImageFileId(pictureOfAppleFile.Id),
MessageContent.FromImageUri(linkToPictureOfOrange),
]),
}
}
);
CollectionResult<StreamingUpdate> streamingUpdates = assistantClient.CreateRunStreaming
(
thread.Id,
assistant.Id,
new RunCreationOptions()
{
AdditionalInstructions = "When possible, try to sneak in puns if you're asked to compare things.",
}
);
foreach (StreamingUpdate streamingUpdate in streamingUpdates)
{
if (streamingUpdate.UpdateKind == StreamingUpdateReason.RunCreated)
{
Console.WriteLine($"--- Run started! ---");
}
if (streamingUpdate is MessageContentUpdate contentUpdate)
{
Console.Write(contentUpdate.Text);
}
}
#pragma warning restore OPENAI001
}
#endregion
#region 高级方案
/// <summary>
/// 使用协议方法
/// 除了使用强类型请求和响应对象的客户端方法外,.NET 库还提供协议方法 ,以便更直接地访问 REST API。
/// 协议方法是“二进制输入,二进制输出”,接受 BinaryContent 作为请求体,并提供 BinaryData 作为响应体。
/// </summary>
[Fact]
public void Using_Protocol_Methods_Test()
{
//若要使用 ChatClient 的 CompleteChat 方法的协议方法变体,请将请求正文作为 BinaryContent 传递
//ChatClient client = new("gpt-4o", Environment.GetEnvironmentVariable("OPENAI_API_KEY"));
ChatClient client = _defaultOpenAIClient.GetChatClient(ModelSelecter.ModelWithToolAndThing);
BinaryData input = BinaryData.FromBytes("""
{
"model": "gemma3:4b",
"messages": [
{
"role": "user",
"content": "Say 'this is a test.'"
}
]
}
"""u8.ToArray());
using BinaryContent content = BinaryContent.Create(input);
ClientResult result = client.CompleteChat(content);
BinaryData output = result.GetRawResponse().Content;
using JsonDocument outputAsJson = JsonDocument.Parse(output.ToString());
string message = outputAsJson.RootElement
.GetProperty("choices"u8)[0]
.GetProperty("message"u8)
.GetProperty("content"u8)
.GetString() ?? "";
_output.WriteLine($"[ASSISTANT]: {message}");
//请注意,然后如何调用生成的 ClientResult 的 GetRawResponse 方法,并通过 PipelineResponse 的 Content 属性将响应正文检索为 BinaryData。
}
/// <summary>
/// 模拟客户端
/// OpenAI .NET 库旨在支持模拟,所有命名空间都有相应的模型工厂来支持模拟,但 OpenAI.Assistants 和 OpenAI.VectorStores 命名空间除外,模型工厂即将推出。
/// </summary>
[Fact]
public void MocK_Client_Test()
{
#pragma warning disable OPENAI001
Mock<AudioClient> mockClient = new();
Mock<ClientResult<AudioTranscription>> mockResult = new(new List<object>(), Mock.Of<System.ClientModel.Primitives.PipelineResponse>());
AudioTranscription transcription = OpenAIAudioModelFactory.AudioTranscription(text: "I swear I saw an apple flying yesterday!");
// Set up mocks' properties and methods.
mockResult
.SetupGet(result => result.Value)
.Returns(transcription);
mockClient
.Setup
(client => client.TranscribeAudio
(
It.IsAny<string>(),
It.IsAny<AudioTranscriptionOptions>()
)
)
.Returns(mockResult.Object);
// Perform validation.
AudioClient client = mockClient.Object;
bool containsSecretWord = ContainsSecretWord(client, "<audioFilePath>", "apple");
Assert.True(containsSecretWord);
bool ContainsSecretWord(AudioClient client, string audioFilePath, string secretWord)
{
AudioTranscription transcription = client.TranscribeAudio(audioFilePath);
return transcription.Text.Contains(secretWord);
}
#pragma warning restore OPENAI001
}
#endregion
#region 私有方法
private static string GetCurrentLocation()
{
// Call the location API here.
return "San Francisco";
}
private static string GetCurrentWeather(string location, string unit = "celsius")
{
// Call the weather API here.
return $"31 {unit}";
}
#endregion
#region 其它平台测试
[Fact]
public void UiUiAPI_Test()
{
var chatClient = _uiUiApiClient.GetChatClient("gpt-3.5-turbo");
var r = chatClient.CompleteChat("你好");
_output.WriteLine(r.Value.Content.First().Text);
}
[Fact]
public void Zipu_Test()
{
var chatClient = _zipuApiClient.GetChatClient("GLM-4.5-Flash");
var r = chatClient.CompleteChat("你好");
_output.WriteLine(r.Value.Content.First().Text);
}
#endregion
}