.net 9
ollama 测试模型 qwen3 4B
Server 端依赖包 ModelContextProtocol.AspNetCore (0.3.0-preview.4)
客户端依赖包 ModelContextProtocol.AspNetCore (0.2.0-preview.3),ModelContextProtocol.Core (0.2.0-preview.3), OllamaSharp , OllamaSharp.ModelContextProtocol
本示例 Ollama包5.3.6
客户端使用低版本 ModelContextProtocol 是因为ollama最新包5.3.6中调用工具需要使用到ModelContextProtocol.Core.Protocol.CallToolResponse的类.这个类在0.2.0-preview.3版本后被移除.使用CallToolResult 替换.
本地模型建议 1.5B以上. 1.5B的大部分模型不支持直接tools把模型传过去. 但可以要求按指定格式生成调用.再解析.不过1.5B 生成跟抽风一样一会好一会坏.但我在4B里测了很多次都是很稳定的.
服务端工具代码
1 using System.ComponentModel; 2 using ModelContextProtocol.Server; 3 4 namespace test_server.tools; 5 6 [McpServerToolType] 7 public class FileTool 8 { 9 [McpServerTool(Name = "文件查找"), Description("在指定的目录中查找文件")] 10 public string FindFileInDirectory([Description("查找文件的基础目录")] string baseDirectory 11 , [Description("需要查找的文件名")] string fileName) 12 { 13 try 14 { 15 if (!Directory.Exists(baseDirectory)) 16 { 17 return $"查询的目录{baseDirectory}不存在!"; 18 } 19 var fs = Directory.GetFiles(baseDirectory, $"*{fileName}*", SearchOption.AllDirectories); 20 if (fs.Length == 0) 21 { 22 return $"在目录{baseDirectory}中没有找到文件名包含{fileName}的任何文件!"; 23 } 24 return $"在目录{baseDirectory}中找到文件名包含{fileName}的文件{fs.Length}个 :\r\n{string.Join("\r\n", fs)}"; 25 } 26 catch (System.Exception ex) 27 { 28 Console.WriteLine(ex.Message + "\n" + ex.StackTrace); 29 return ex.Message; 30 } 31 } 32 }
服务端启动代码(Program.cs)
1 using Microsoft.AspNetCore.Builder; 2 using Microsoft.Extensions.DependencyInjection; 3 4 5 var builder = WebApplication.CreateBuilder(args); 6 builder.Services.AddMcpServer() 7 .WithHttpTransport() 8 .WithToolsFromAssembly(); 9 var app = builder.Build(); 10 11 app.MapGet("/run", () => "Hello MCP Server!"); 12 13 app.MapMcp(); 14 app.Run();
客户端调用(Program.cs)
1 var sseTran = new SseClientTransport(new SseClientTransportOptions 2 { 3 Name = "Test Tools", 4 Endpoint = new Uri("http://localhost:5000") 5 6 }); 7 8 await using var mcpClient = await McpClientFactory.CreateAsync(sseTran); 9 var mcpTools = await mcpClient.ListToolsAsync(); 10 11 //List<OllamaSharp.Models.Chat.Tool> tools = GetOllamaToolsFromMCPTools(mcpTools); 12 var tools = new List<OllamaSharp.ModelContextProtocol.Server.McpClientTool>(); 13 14 foreach (var tool in mcpTools) 15 { 16 Console.WriteLine($"{tool.Name} ({tool.Description})"); 17 tools.Add(new OllamaSharp.ModelContextProtocol.Server.McpClientTool(tool, mcpClient)); 18 } 19 20 HttpClient hc = new HttpClient(); 21 // set up the client 22 hc.Timeout = TimeSpan.FromMinutes(5); 23 hc.BaseAddress = uri; 24 //这里未必需要httpClient.因为我本地模型生成比较慢需要设个超时,所以才用了它 25 var ollama = new OllamaApiClient(hc, "qwen3:4b"); 26 ollama.Config.Uri = uri; 27 28 var chatOptions = new ChatOptions() 29 { 30 Tools = [.. mcpTools], 31 Temperature = 0.6f, 32 AllowMultipleToolCalls = true, 33 ToolMode = ChatToolMode.Auto, 34 MaxOutputTokens = 256,//这个不设置 超级多废话(我当前用的是qwen3 4B) 35 36 }; 37 38 var chat = new Chat(ollama); 39 while (true) 40 { 41 42 System.Console.InputEncoding = System.Text.Encoding.UTF8; 43 Console.Write("User >"); 44 string content = Console.ReadLine(); 45 if (String.IsNullOrWhiteSpace(content)) 46 { 47 continue; 48 } 49 Console.Write("AI >"); 50 await foreach (var answerToken in chat.SendAsync(content, tools)) 51 Console.Write(answerToken); 52 }
