|
|
using OllamaStudy.Core.OllamaResponse;
|
|
|
using OllamaStudy.UseHttpClient.Tools;
|
|
|
|
|
|
namespace OllamaStudy.UseHttpClientTest;
|
|
|
|
|
|
/// <summary>
|
|
|
/// Ollama API 测试
|
|
|
/// </summary>
|
|
|
public class OllamaApiTest
|
|
|
{
|
|
|
private readonly ITestOutputHelper _testOutput;
|
|
|
private OllamaServerOption defaultOllamaOption = OllamaConfig.GetOllamaServerOption();
|
|
|
private HttpClient httpClient;
|
|
|
|
|
|
public OllamaApiTest(ITestOutputHelper testOutput)
|
|
|
{
|
|
|
_testOutput = testOutput;
|
|
|
httpClient = new HttpClient()
|
|
|
{
|
|
|
BaseAddress = new Uri(defaultOllamaOption.OllamaServerUrl)
|
|
|
};
|
|
|
httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json"));
|
|
|
}
|
|
|
|
|
|
#region 生成补全
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_FullParameters_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
//模型名(标识):model:tag格式,model可具有可选的命名空间,tag是可选的,用于标识模型,默认为 latest
|
|
|
//model = option.Model,
|
|
|
model = "qwen2.5-coder:0.5b",
|
|
|
|
|
|
//生成响应的提示词:关键
|
|
|
prompt = """
|
|
|
天空为什么是蓝色的?
|
|
|
""",
|
|
|
|
|
|
//(可选)模型响应后的文本:只有专用模型才支持(qwen2.5-coder:3b等), 模型不支持则异常
|
|
|
//suffix = " return result",
|
|
|
|
|
|
//(可选)Base64 编码的图像列表(适用于 LLAVA 等多模态模型)
|
|
|
//images = new string[] { "base64", "base64"},
|
|
|
|
|
|
//(可选)模型应该在回应之前思考吗?
|
|
|
// think = true,
|
|
|
|
|
|
/*高级选项*/
|
|
|
|
|
|
//(可选)返回响应的格式:json 或 JSON schema
|
|
|
//format = "json",
|
|
|
|
|
|
//(可选)其它模型参数
|
|
|
options = new
|
|
|
{
|
|
|
//设置用于生成下一个标记的上下文窗口的大小
|
|
|
num_ctx = 2048,
|
|
|
|
|
|
//设置模型回溯多长时间以防止重复(0 = 禁用,-1 = num_ctx)
|
|
|
repeat_last_n = 60,
|
|
|
|
|
|
//设置对重复项的惩罚强度:越低越宽松
|
|
|
repeat_penalty = 1.1f,
|
|
|
|
|
|
//值越高,模型回答越有创意
|
|
|
temperature = 0.8f,
|
|
|
|
|
|
//设置用于生成的随机数种子。将此设置为特定数字将使模型为同一提示生成相同的文本
|
|
|
seed = 0,
|
|
|
|
|
|
//设置要使用的停止序列。遇到此模式时,LLM 将停止生成文本并返回。通过在模型文件中指定多个单独的停止参数,可以设置多个停止模式
|
|
|
//stop = "设置就异常",
|
|
|
|
|
|
//生成文本时要预测的最大令牌数。(默认值:-1,无限生成)
|
|
|
num_predict = -1,
|
|
|
|
|
|
//降低产生无意义的可能性。较高的值(例如 100)将给出更多样化的答案,而较低的值(例如 10)将更保守。(默认值:40)
|
|
|
top_k = 40,
|
|
|
|
|
|
//与 top-k 一起使用。较高的值(例如 0.95)将导致文本更加多样化,而较低的值(例如 0.5)将生成更集中和保守的文本。(默认值:0.9)
|
|
|
top_p = 0.9f,
|
|
|
},
|
|
|
|
|
|
//(可选)流式处理响应: 可通过设置 false 来禁用流式处理
|
|
|
stream = true,
|
|
|
|
|
|
//(可选)系统消息(覆盖 Modelfile 中定义的内容)
|
|
|
//system = "Modelfile 文件格式",
|
|
|
|
|
|
//(可选)要使用的提示模板(覆盖 Modelfile 中定义的内容),内容为 Modelfile 文件格式的字符串
|
|
|
//template = "",
|
|
|
|
|
|
//(可选)如果为 true,则不会对提示应用任何格式。如果您在对 API 的请求中指定了完整的模板化提示,则可以选择使用 raw 参数
|
|
|
raw = false,
|
|
|
|
|
|
//(可选)控制模型在请求后加载到内存中的时间(默认值:5M)
|
|
|
keep_alive = "5m",
|
|
|
|
|
|
//(可选且已弃用):从上一个请求返回的 context 参数 /generate,这可用于保持较短的对话记忆
|
|
|
//context = "你好",
|
|
|
|
|
|
};
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
while (!reader.EndOfStream)
|
|
|
{
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
_testOutput.WriteLine(line);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_Streaming_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
prompt = """天空为什么是蓝色的?""",
|
|
|
};
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
while (!reader.EndOfStream)
|
|
|
{
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
_testOutput.WriteLine(line);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_NoStreaming_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
prompt = """天空为什么是蓝色的?""",
|
|
|
|
|
|
//流式处理响应: 设置为 false 禁用流式处理
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.Equal(defaultOllamaOption.Model, responseObject.Model);
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_WithSuffix_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
//特定模型 qwen2.5-coder:0.5b 支持
|
|
|
model = ModelSelecter.ModelWithSuffixAndImage,
|
|
|
|
|
|
prompt = """def compute_gcd(a, b):""",
|
|
|
|
|
|
//(可选)模型响应后的文本
|
|
|
suffix = " return result",
|
|
|
|
|
|
options = new { temperature = 0 },
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_StructuredOutputs_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
prompt = """Ollama is 22 years old and is busy saving the world. Respond using JSON""",
|
|
|
stream = false,
|
|
|
format = new
|
|
|
{
|
|
|
type = "object",
|
|
|
properties = new { age = new { type = "integer" }, available = new { type = "boolean" } },
|
|
|
required = new string[] { "age", "available" }
|
|
|
}
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_JsonMode_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
prompt = """一天中不同时间的天空是什么颜色的?使用JSON进行响应!""",
|
|
|
stream = false,
|
|
|
format = "json",
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_WithImages_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
//特定模型支持
|
|
|
model = ModelSelecter.ModelWithSuffixAndImage,
|
|
|
prompt = """这张照片里有什么东西?""",
|
|
|
stream = false,
|
|
|
|
|
|
//必须是支持机器视觉的模型,如 qwen2.5vl:3b
|
|
|
images = new string[] { "iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC" }
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 在某些情况下,您可能希望绕过模板系统并提供完整的提示。在这种情况下,您可以使用raw参数禁用模板。还要注意,原始模式不会返回上下文。
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_RawMode_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithRawmodel,
|
|
|
prompt = """[INST] why is the sky blue? [/INST]""",
|
|
|
stream = false,
|
|
|
raw = true,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Response);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 可重现输出的请求
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task Completion_Request_ReproducibleOutputs_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
prompt = """列出世界四大洋""",
|
|
|
stream = false,
|
|
|
think = false,
|
|
|
options = new { seed = 123, temperature = 0 },
|
|
|
};
|
|
|
|
|
|
//第一次请求
|
|
|
using var requestMessage1 = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage1 = await httpClient.SendAsync(requestMessage1);
|
|
|
responseMessage1.EnsureSuccessStatusCode();
|
|
|
var responseObject1 = await responseMessage1.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
//第二次请求
|
|
|
using var requestMessage2 = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage2 = await httpClient.SendAsync(requestMessage2);
|
|
|
responseMessage2.EnsureSuccessStatusCode();
|
|
|
|
|
|
var responseObject2 = await responseMessage2.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject1);
|
|
|
Assert.NotNull(responseObject1.Response);
|
|
|
Assert.NotNull(responseObject2);
|
|
|
Assert.NotNull(responseObject2.Response);
|
|
|
|
|
|
Assert.Equal(responseObject1.Response, responseObject2.Response);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject1.Response);
|
|
|
_testOutput.WriteLine(responseObject2.Response);
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 生成对话补全
|
|
|
|
|
|
/// <summary>
|
|
|
/// 流式对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_Streaming_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
messages = new[] { new { role = "user", content = "天空为什么是蓝的?" } },
|
|
|
think = false
|
|
|
};
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
while (!reader.EndOfStream)
|
|
|
{
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
_testOutput.WriteLine(line);
|
|
|
|
|
|
}
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 非流式对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_NoStreaming_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
messages = new[] { new { role = "user", content = "天空为什么是蓝的?" } },
|
|
|
think = false,
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
//发送请求:流式处理响应,HttpCompletionOption.ResponseHeadersRead 是关键,只在SendAsync方法中有此参数
|
|
|
var response = await httpClient.SendAsync(request);
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var content = await response.Content.ReadAsStringAsync();
|
|
|
|
|
|
var responseObject = System.Text.Json.JsonSerializer.Deserialize<ChatResponse>(content);
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Message?.Content);
|
|
|
Assert.True(responseObject.Done);
|
|
|
|
|
|
_testOutput.WriteLine(content);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 结构化输出对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_StructuredOutputs_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
messages = new[]
|
|
|
{
|
|
|
new { role="user", content = "Ollama is 22 years old and busy saving the world. Return a JSON object with the age and availability."}
|
|
|
},
|
|
|
stream = false,
|
|
|
|
|
|
format = new
|
|
|
{
|
|
|
type = "object",
|
|
|
properties = new { age = new { type = "integer" }, available = new { type = "boolean" } },
|
|
|
required = new string[] { "age", "available" }
|
|
|
}
|
|
|
|
|
|
//options = new { temperature = 0},
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var content = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
var responseObject = System.Text.Json.JsonSerializer.Deserialize<ChatResponse>(content);
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Message);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Message.Content);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 带历史上下文的对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_WithHistory_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
messages = new[]
|
|
|
{
|
|
|
new { role="user", content = "why is the sky blue?"},
|
|
|
new { role="assistant", content = "due to rayleigh scattering."},
|
|
|
new { role="user", content = "how is that different than mie scattering?"}
|
|
|
},
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var content = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
var responseObject = System.Text.Json.JsonSerializer.Deserialize<ChatResponse>(content);
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Message);
|
|
|
|
|
|
_testOutput.WriteLine(content);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 带图像的对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_WithImages_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithSuffixAndImage,
|
|
|
messages = new[]
|
|
|
{
|
|
|
new
|
|
|
{
|
|
|
role = "user",
|
|
|
content = "这张图片中包含什么内容?",
|
|
|
//必须是支持机器视觉的模型,如 qwen2.5vl:3b (下图是ollama logo图标)
|
|
|
images = new string[] { "iVBORw0KGgoAAAANSUhEUgAAAG0AAABmCAYAAADBPx+VAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAA3VSURBVHgB7Z27r0zdG8fX743i1bi1ikMoFMQloXRpKFFIqI7LH4BEQ+NWIkjQuSWCRIEoULk0gsK1kCBI0IhrQVT7tz/7zZo888yz1r7MnDl7z5xvsjkzs2fP3uu71nNfa7lkAsm7d++Sffv2JbNmzUqcc8m0adOSzZs3Z+/XES4ZckAWJEGWPiCxjsQNLWmQsWjRIpMseaxcuTKpG/7HP27I8P79e7dq1ars/yL4/v27S0ejqwv+cUOGEGGpKHR37tzJCEpHV9tnT58+dXXCJDdECBE2Ojrqjh071hpNECjx4cMHVycM1Uhbv359B2F79+51586daxN/+pyRkRFXKyRDAqxEp4yMlDDzXG1NPnnyJKkThoK0VFd1ELZu3TrzXKxKfW7dMBQ6bcuWLW2v0VlHjx41z717927ba22U9APcw7Nnz1oGEPeL3m3p2mTAYYnFmMOMXybPPXv2bNIPpFZr1NHn4HMw0KRBjg9NuRw95s8PEcz/6DZELQd/09C9QGq5RsmSRybqkwHGjh07OsJSsYYm3ijPpyHzoiacg35MLdDSIS/O1yM778jOTwYUkKNHWUzUWaOsylE00MyI0fcnOwIdjvtNdW/HZwNLGg+sR1kMepSNJXmIwxBZiG8tDTpEZzKg0GItNsosY8USkxDhD0Rinuiko2gfL/RbiD2LZAjU9zKQJj8RDR0vJBR1/Phx9+PHj9Z7REF4nTZkxzX4LCXHrV271qXkBAPGfP/atWvu/PnzHe4C97F48eIsRLZ9+3a3f/9+87dwP1JxaF7/3r17ba+5l4EcaVo0lj3SBq5kGTJSQmLWMjgYNei2GPT1MuMqGTDEFHzeQSP2wi/jGnkmPJ/nhccs44jvDAxpVcxnq0F6eT8h4ni/iIWpR5lPyA6ETkNXoSukvpJAD3AsXLiwpZs49+fPn5ke4j10TqYvegSfn0OnafC+Tv9ooA/JPkgQysqQNBzagXY55nO/oa1F7qvIPWkRL12WRpMWUvpVDYmxAPehxWSe8ZEXL20sadYIozfmNch4QJPAfeJgW3rNsnzphBKNJM2KKODo1rVOMRYik5ETy3ix4qWNI81qAAirizgMIc+yhTytx0JWZuNI03qsrgWlGtwjoS9XwgUhWGyhUaRZZQNNIEwCiXD16tXcAHUs79co0vSD8rrJCIW98pzvxpAWyyo3HYwqS0+H0BjStClcZJT5coMm6D2LOF8TolGJtK9fvyZpyiC5ePFi9nc/oJU4eiEP0jVoAnHa9wyJycITMP78+eMeP37sXrx44d6+fdt6f82aNdkx1pg9e3Zb5W+RSRE+n+VjksQWifvVaTKFhn5O8my63K8Qabdv33b379/PiAP//vuvW7BggZszZ072/+TJk91YgkafPn166zXB1rQHFvouAWHq9z3SEevSUerqCn2/dDCeta2jxYbr69evk4MHDyY7d+7MjhMnTiTPnz9Pfv/+nfQT2ggpO2dMF8cghuoM7Ygj5iWCqRlGFml0QC/ftGmTmzt3rmsaKDsgBSPh0/8yPeLLBihLkOKJc0jp8H8vUzcxIA1k6QJ/c78tWEyj5P3o4u9+jywNPdJi5rAH9x0KHcl4Hg570eQp3+vHXGyrmEeigzQsQsjavXt38ujRo44LQuDDhw+TW7duRS1HGgMxhNXHgflaNTOsHyKvHK5Ijo2jbFjJBQK9YwFd6RVMzfgRBmEfP37suBBm/p49e1qjEP2mwTViNRo0VJWH1deMXcNK08uUjVUu7s/zRaL+oLNxz1bpANco4npUgX4G2eFbpDFyQoQxojBCpEGSytmOH8qrH5Q9vuzD6ofQylkCUmh8DBAr+q8JCyVNtWQIidKQE9wNtLSQnS4jDSsxNHogzFuQBw4cyM61UKVsjfr3ooBkPSqqQHesUPWVtzi9/vQi1T+rJj7WiTz4Pt/l3LxUkr5P2VYZaZ4URpsE+st/dujQoaBBYokbrz/8TJNQYLSonrPS9kUaSkPeZyj1AWSj+d+VBoy1pIWVNed8P0Ll/ee5HdGRhrHhR5GGN0r4LGZBaj8oFDJitBTJzIZgFcmU0Y8ytWMZMzJOaXUSrUs5RxKnrxmbb5YXO9VGUhtpXldhEUogFr3IzIsvlpmdosVcGVGXFWp2oU9kLFL3dEkSz6NHEY1sjSRdIuDFWEhd8KxFqsRi1uM/nz9/zpxnwlESONdg6dKlbsaMGS4EHFHtjFIDHwKOo46l4TxSuxgDzi+rE2jg+BaFruOX4HXa0Nnf1lwAPufZeF8/r6zD97WK2qFnGjBxTw5qNGPxT+5T/r7/7RawFC3j4vTp09koCxkeHjqbHJqArmH5UrFKKksnxrK7FuRIs8STfBZv+luugXZ2pR/pP9Ois4z+TiMzUUkUjD0iEi1fzX8GmXyuxUBRcaUfykV0YZnlJGKQpOiGB76x5GeWkWWJc3mOrK6S7xdND+W5N6XyaRgtWJFe13GkaZnKOsYqGdOVVVbGupsyA/l7emTLHi7vwTdirNEt0qxnzAvBFcnQF16xh/TMpUuXHDowhlA9vQVraQhkudRdzOnK+04ZSP3DUhVSP61YsaLtd/ks7ZgtPcXqPqEafHkdqa84X6aCeL7YWlv6edGFHb+ZFICPlljHhg0bKuk0CSvVznWsotRu433alNdFrqG45ejoaPCaUkWERpLXjzFL2Rpllp7PJU2a/v7Ab8N05/9t27Z16KUqoFGsxnI9EosS2niSYg9SpU6B4JgTrvVW1flt1sT+0ADIJU2maXzcUTraGCRaL1Wp9rUMk16PMom8QhruxzvZIegJjFU7LLCePfS8uaQdPny4jTTL0dbee5mYokQsXTIWNY46kuMbnt8Kmec+LGWtOVIl9cT1rCB0V8WqkjAsRwta93TbwNYoGKsUSChN44lgBNCoHLHzquYKrU6qZ8lolCIN0Rh6cP0Q3U6I6IXILYOQI513hJaSKAorFpuHXJNfVlpRtmYBk1Su1obZr5dnKAO+L10Hrj3WZW+E3qh6IszE37F6EB+68mGpvKm4eb9bFrlzrok7fvr0Kfv727dvWRmdVTJHw0qiiCUSZ6wCK+7XL/AcsgNyL74DQQ730sv78Su7+t/A36MdY0sW5o40ahslXr58aZ5HtZB8GH64m9EmMZ7FpYw4T6QnrZfgenrhFxaSiSGXtPnz57e9TkNZLvTjeqhr734CNtrK41L40sUQckmj1lGKQ0rC37x544r8eNXRpnVE3ZZY7zXo8NomiO0ZUCj2uHz58rbXoZ6gc0uA+F6ZeKS/jhRDUq8MKrTho9fEkihMmhxtBI1DxKFY9XLpVcSkfoi8JGnToZO5sU5aiDQIW716ddt7ZLYtMQlhECdBGXZZMWldY5BHm5xgAroWj4C0hbYkSc/jBmggIrXJWlZM6pSETsEPGqZOndr2uuuR5rF169a2HoHPdurUKZM4CO1WTPqaDaAd+GFGKdIQkxAn9RuEWcTRyN2KSUgiSgF5aWzPTeA/lN5rZubMmR2bE4SIC4nJoltgAV/dVefZm72AtctUCJU2CMJ327hxY9t7EHbkyJFseq+EJSY16RPo3Dkq1kkr7+q0bNmyDuLQcZBEPYmHVdOBiJyIlrRDq41YPWfXOxUysi5fvtyaj+2BpcnsUV/oSoEMOk2CQGlr4ckhBwaetBhjCwH0ZHtJROPJkyc7UjcYLDjmrH7ADTEBXFfOYmB0k9oYBOjJ8b4aOYSe7QkKcYhFlq3QYLQhSidNmtS2RATwy8YOM3EQJsUjKiaWZ+vZToUQgzhkHXudb/PW5YMHD9yZM2faPsMwoc7RciYJXbGuBqJ1UIGKKLv915jsvgtJxCZDubdXr165mzdvtr1Hz5LONA8jrUwKPqsmVesKa49S3Q4WxmRPUEYdTjgiUcfUwLx589ySJUva3oMkP6IYddq6HMS4o55xBJBUeRjzfa4Zdeg56QZ43LhxoyPo7Lf1kNt7oO8wWAbNwaYjIv5lhyS7kRf96dvm5Jah8vfvX3flyhX35cuX6HfzFHOToS1H4BenCaHvO8pr8iDuwoUL7tevX+b5ZdbBair0xkFIlFDlW4ZknEClsp/TzXyAKVOmmHWFVSbDNw1l1+4f90U6IY/q4V27dpnE9bJ+v87QEydjqx/UamVVPRG+mwkNTYN+9tjkwzEx+atCm/X9WvWtDtAb68Wy9LXa1UmvCDDIpPkyOQ5ZwSzJ4jMrvFcr0rSjOUh+GcT4LSg5ugkW1Io0/SCDQBojh0hPlaJdah+tkVYrnTZowP8iq1F1TgMBBauufyB33x1v+NWFYmT5KmppgHC+NkAgbmRkpD3yn9QIseXymoTQFGQmIOKTxiZIWpvAatenVqRVXf2nTrAWMsPnKrMZHz6bJq5jvce6QK8J1cQNgKxlJapMPdZSR64/UivS9NztpkVEdKcrs5alhhWP9NeqlfWopzhZScI6QxseegZRGeg5a8C3Re1Mfl1ScP36ddcUaMuv24iOJtz7sbUjTS4qBvKmstYJoUauiuD3k5qhyr7QdUHMeCgLa1Ear9NquemdXgmum4fvJ6w1lqsuDhNrg1qSpleJK7K3TF0Q2jSd94uSZ60kK1e3qyVpQK6PVWXp2/FC3mp6jBhKKOiY2h3gtUV64TWM6wDETRPLDfSakXmH3w8g9Jlug8ZtTt4kVF0kLUYYmCCtD/DrQ5YhMGbA9L3ucdjh0y8kOHW5gU/VEEmJTcL4Pz/f7mgoAbYkAAAAAElFTkSuQmCC"}
|
|
|
}
|
|
|
},
|
|
|
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<ChatResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.Message?.Content);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject.Message.Content);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 可复现输出的对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_ReproducibleOutputs_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
messages = new[]
|
|
|
{
|
|
|
new
|
|
|
{
|
|
|
role = "user",
|
|
|
content = "你好",
|
|
|
}
|
|
|
},
|
|
|
think = false,
|
|
|
stream = false,
|
|
|
options = new { seed = 123, temperature = 0 },
|
|
|
};
|
|
|
|
|
|
|
|
|
//第一次请求
|
|
|
using var requestMessage1 = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage1 = await httpClient.SendAsync(requestMessage1);
|
|
|
responseMessage1.EnsureSuccessStatusCode();
|
|
|
var responseObject1 = await responseMessage1.Content.ReadFromJsonAsync<ChatResponse>();
|
|
|
|
|
|
//第二次请求
|
|
|
using var requestMessage2 = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage2 = await httpClient.SendAsync(requestMessage2);
|
|
|
responseMessage2.EnsureSuccessStatusCode();
|
|
|
|
|
|
var responseObject2 = await responseMessage2.Content.ReadFromJsonAsync<ChatResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject1);
|
|
|
Assert.NotNull(responseObject1.Message?.Content);
|
|
|
Assert.NotNull(responseObject2);
|
|
|
Assert.NotNull(responseObject2.Message?.Content);
|
|
|
|
|
|
Assert.Equal(responseObject1.Message.Content, responseObject2.Message.Content);
|
|
|
|
|
|
_testOutput.WriteLine(responseObject1.Message.Content);
|
|
|
_testOutput.WriteLine(responseObject2.Message.Content);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 支持工具调用的对话请求 测试
|
|
|
/// </summary>
|
|
|
/// <remarks>
|
|
|
/// 携带工具本地执行结果,再次发送给大模型不
|
|
|
/// </remarks>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task ChatRequest_WithTools_Test()
|
|
|
{
|
|
|
List<Message> ChatMessages = new List<Message>()
|
|
|
{
|
|
|
new Message(){ role = "user", content = "获取北京市当前的时间" }
|
|
|
};
|
|
|
|
|
|
var chatTools = new[]
|
|
|
{
|
|
|
new
|
|
|
{
|
|
|
type = "function",
|
|
|
function = new
|
|
|
{
|
|
|
name = "GetCurrentTime",
|
|
|
description = "获取指定城市的当前时间",
|
|
|
parameters = new
|
|
|
{
|
|
|
type = "object",
|
|
|
properties = new
|
|
|
{
|
|
|
city = new
|
|
|
{
|
|
|
type = "string",
|
|
|
description = "城市名称"
|
|
|
}
|
|
|
}
|
|
|
},
|
|
|
required = new string[] { "city" }
|
|
|
},
|
|
|
}
|
|
|
};
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithTool,
|
|
|
messages = ChatMessages,
|
|
|
stream = false,
|
|
|
tools = chatTools,
|
|
|
tool_choice = "auto",
|
|
|
};
|
|
|
|
|
|
|
|
|
//第一步:向大模型发送带tool信息的请求
|
|
|
using var requestMessage1 = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage1 = await httpClient.SendAsync(requestMessage1);
|
|
|
responseMessage1.EnsureSuccessStatusCode();
|
|
|
|
|
|
//第二步:接收大模型响应,响应中携带有“工具调用信息”
|
|
|
var responseText = await responseMessage1.Content.ReadAsStringAsync();
|
|
|
var responseObject1 = await responseMessage1.Content.ReadFromJsonAsync<ChatResponse>();
|
|
|
List<Tool_Calls> tool_Calls = new List<Tool_Calls>();
|
|
|
|
|
|
foreach (var call in responseObject1?.Message?.ToolCalls!)
|
|
|
{
|
|
|
Tool_Calls t = new Tool_Calls()
|
|
|
{
|
|
|
function = new Function()
|
|
|
{
|
|
|
name = call.Function!.Name,
|
|
|
arguments = call.Function.Arguments!,
|
|
|
},
|
|
|
};
|
|
|
|
|
|
tool_Calls.Add(t);
|
|
|
}
|
|
|
|
|
|
ChatMessages.Add(new Message()
|
|
|
{
|
|
|
role = responseObject1.Message.Role,
|
|
|
content = responseObject1.Message.Content,
|
|
|
tool_calls = tool_Calls,
|
|
|
});
|
|
|
|
|
|
//第三步:解析响应中的工具调用信息,执行工具函数并返回调用结果(不需要调用工具,则忽略)
|
|
|
|
|
|
//真实项目应该用特性、反射等技术,动态执行
|
|
|
//此处仅作演示,直接写死第一个函数
|
|
|
DateTime callCurrentDate = DateTime.Now;
|
|
|
if (responseObject1?.Message?.ToolCalls?.Count>0)
|
|
|
{
|
|
|
var toolCall = responseObject1.Message.ToolCalls[0];
|
|
|
if (toolCall.Function?.Name == "GetCurrentTime")
|
|
|
{
|
|
|
Dictionary<string, string>? parameters = toolCall.Function.Arguments;
|
|
|
|
|
|
var city = parameters?["city"] ?? "上海";
|
|
|
|
|
|
//调用本地工具函数
|
|
|
callCurrentDate = DateTools.GetCurrentTime(city);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
//结果作消息,添加到消息列表
|
|
|
var c = @"{Tool:" + responseObject1?.Message.ToolCalls[0].Function!.Name +"(city:" + responseObject1?.Message.ToolCalls[0].Function?.Arguments!["city"] + ")" + "Result:" + callCurrentDate.ToString("yyyy-MM-dd HH:mm:ss") + "}";
|
|
|
ChatMessages.Add(new Message()
|
|
|
{
|
|
|
role = "tool",
|
|
|
content = c,
|
|
|
tool_calls = tool_Calls,
|
|
|
|
|
|
});
|
|
|
|
|
|
//第四步:传递工具执行结果给大模型,大模型生成最终回复(没有起到真正的作用)
|
|
|
|
|
|
var requetData2 = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithTool,
|
|
|
messages = ChatMessages,
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
|
|
|
using var requestMessage2 = new HttpRequestMessage(HttpMethod.Post, "/api/chat")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
var responseMessage2 = await httpClient.SendAsync(requestMessage2);
|
|
|
responseMessage2.EnsureSuccessStatusCode();
|
|
|
|
|
|
//理论上:大模型应该根据工具调用结果,生成最终回复,但此处没有真正起到作用
|
|
|
var lastContentText = await responseMessage2.Content.ReadAsStringAsync();
|
|
|
|
|
|
_testOutput.WriteLine(lastContentText);
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 列出本地模型
|
|
|
[Fact]
|
|
|
public async Task List_Local_Models_Test()
|
|
|
{
|
|
|
var responseModels = await httpClient.GetFromJsonAsync<ListModelResponse>("/api/tags");
|
|
|
|
|
|
Assert.NotNull(responseModels);
|
|
|
Assert.True(responseModels.Models.Count > 0);
|
|
|
|
|
|
_testOutput.WriteLine($"Ollama服务中,共有 {responseModels.Models.Count} 个模型");
|
|
|
}
|
|
|
|
|
|
[Theory]
|
|
|
[InlineData("llama")]
|
|
|
[InlineData("qwen")]
|
|
|
public async Task Models_Exsitis_Test(string modelName)
|
|
|
{
|
|
|
var responseModels = await httpClient.GetFromJsonAsync<ListModelResponse>("/api/tags");
|
|
|
|
|
|
Assert.NotNull(responseModels);
|
|
|
Assert.NotNull(responseModels.Models.FirstOrDefault(m => m.Model.StartsWith(modelName, StringComparison.OrdinalIgnoreCase)));
|
|
|
|
|
|
_testOutput.WriteLine($"Ollama服务中,共有 {responseModels.Models.Count} 个模型, 包含{modelName}");
|
|
|
}
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
#region 列出运行中的模型
|
|
|
[Fact]
|
|
|
public async Task List_Running_Models_Test()
|
|
|
{
|
|
|
var responseModels = await httpClient.GetFromJsonAsync<ListModelResponse>("/api/ps");
|
|
|
|
|
|
Assert.NotNull(responseModels);
|
|
|
|
|
|
_testOutput.WriteLine($"Ollama 服务中,共有 {responseModels.Models.Count} 个,正运行中的模型");
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 拉取模型
|
|
|
[Fact]
|
|
|
public async Task Pull_Models_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
|
|
|
//(可选) 允许与库建立不安全连接 (insecure connections)。 仅在开发期间从您自己的库中拉取时才使用此选项。
|
|
|
insecure = true,
|
|
|
|
|
|
// (可选) 如果为 false,则响应将作为单个响应对象 (single response object) 返回,而不是作为对象流 (stream of objects) 返回。
|
|
|
stream = true,
|
|
|
};
|
|
|
|
|
|
using var request = new HttpRequestMessage(HttpMethod.Post, "/api/pull")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
//发送请求
|
|
|
var response = await httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead);
|
|
|
response.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
using var responseStream = await response.Content.ReadAsStreamAsync();
|
|
|
using var reader = new StreamReader(responseStream);
|
|
|
|
|
|
//如果是流式响应,则逐行读取
|
|
|
while (!reader.EndOfStream)
|
|
|
{
|
|
|
var line = await reader.ReadLineAsync();
|
|
|
_testOutput.WriteLine(line);
|
|
|
}
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 显示模型详情
|
|
|
[Fact]
|
|
|
public async Task Show_Models_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
|
|
|
//(可选) 如果设置为 true,则返回详细响应字段的完整数据
|
|
|
verbose = false,
|
|
|
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/show")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
var responseText = await responseMessage.Content.ReadAsStringAsync();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<ResponseModelInfo>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
_testOutput.WriteLine(responseObject.ToString());
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 加载模型
|
|
|
[Fact]
|
|
|
public async Task Load_Model_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.True(string.IsNullOrEmpty(responseObject.Response));
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task Load_NonExistent_Model_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = Guid.NewGuid().ToString(),
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
Func<Task> sendRequest = async () =>
|
|
|
{
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
};
|
|
|
|
|
|
await Assert.ThrowsAnyAsync<Exception>(() => sendRequest());
|
|
|
}
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
#region 卸载模型
|
|
|
[Fact]
|
|
|
public async Task UnLoad_Model_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = defaultOllamaOption.Model,
|
|
|
|
|
|
//内存中存活时长设置为0
|
|
|
keep_alive = 0,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.Equal("unload", responseObject.done_reason);
|
|
|
|
|
|
_testOutput.WriteLine($"响应内容:{System.Text.Json.JsonSerializer.Serialize(responseObject)}");
|
|
|
}
|
|
|
|
|
|
[Fact]
|
|
|
public async Task UnLoad_NonExistent_Model_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = Guid.NewGuid().ToString(),
|
|
|
keep_alive = 0,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/generate")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
Func<Task> sendRequest = async () =>
|
|
|
{
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<GenerateResponse>();
|
|
|
};
|
|
|
|
|
|
await Assert.ThrowsAnyAsync<Exception>(() => sendRequest());
|
|
|
}
|
|
|
|
|
|
#endregion
|
|
|
|
|
|
#region 生成嵌入向量
|
|
|
|
|
|
/// <summary>
|
|
|
/// 生成嵌入向量
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task Embed_Request_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithEmbedding,
|
|
|
input = "为什么天空是蓝色的?",
|
|
|
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/embed")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseContent = await responseMessage.Content.ReadAsStringAsync();
|
|
|
_testOutput.WriteLine(responseContent);
|
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<EmbedResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.embeddings);
|
|
|
}
|
|
|
|
|
|
/// <summary>
|
|
|
/// 生成嵌入向量(多输入)
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task MultipleInput_Embed_Request_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithEmbedding,
|
|
|
input = new[] {"为什么天空是蓝色的?", "北京现在的温度是多少?" } ,
|
|
|
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/embed")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseContent = await responseMessage.Content.ReadAsStringAsync();
|
|
|
_testOutput.WriteLine(responseContent);
|
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<EmbedResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.embeddings);
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 生成单个嵌入向量(被取代)
|
|
|
|
|
|
/// <summary>
|
|
|
/// 生成单个嵌入向量
|
|
|
/// [过时]
|
|
|
/// </summary>
|
|
|
/// <returns></returns>
|
|
|
[Fact]
|
|
|
public async Task Request_WithEmbeddings_Test()
|
|
|
{
|
|
|
var requetData = new
|
|
|
{
|
|
|
model = ModelSelecter.ModelWithEmbedding,
|
|
|
prompt = "这是一篇关于羊驼的文章...",
|
|
|
|
|
|
stream = false,
|
|
|
};
|
|
|
|
|
|
using var requestMessage = new HttpRequestMessage(HttpMethod.Post, "/api/embeddings")
|
|
|
{
|
|
|
Content = JsonContent.Create(requetData)
|
|
|
};
|
|
|
|
|
|
var responseMessage = await httpClient.SendAsync(requestMessage);
|
|
|
responseMessage.EnsureSuccessStatusCode();
|
|
|
|
|
|
//处理响应
|
|
|
var responseContent = await responseMessage.Content.ReadAsStringAsync();
|
|
|
_testOutput.WriteLine(responseContent);
|
|
|
|
|
|
var responseObject = await responseMessage.Content.ReadFromJsonAsync<EmbeddingsResponse>();
|
|
|
|
|
|
Assert.NotNull(responseObject);
|
|
|
Assert.NotNull(responseObject.embedding);
|
|
|
}
|
|
|
#endregion
|
|
|
|
|
|
#region 版本信息
|
|
|
[Fact]
|
|
|
public async Task Get_Version_Test()
|
|
|
{
|
|
|
var responseOllamaVersion = await httpClient.GetFromJsonAsync<ResponseOllamaVersion>("/api/version");
|
|
|
|
|
|
Version? version = responseOllamaVersion?.ToSystemVersion();
|
|
|
_testOutput.WriteLine($"Ollama 版本为 {responseOllamaVersion?.Version}");
|
|
|
|
|
|
Assert.NotNull(version);
|
|
|
Assert.True((version.Major + version.Minor + version.Build)>0);
|
|
|
}
|
|
|
|
|
|
#endregion
|
|
|
}
|