前言
C#手寫Ollama服務交互,實現本地模型對話
最近使用C#調用OllamaSharpe庫實現Ollama本地對話,然后思考著能否自己實現這個功能。經過一番查找,和查看OllamaSharpe源碼發現確實可以。其實就是開啟Ollama服務后,發送HTTP請求,獲取返回結果以及一些數據處理。
基本流程
1、啟動Ollama服務進程。
2、創建HttpClient對象。
3、創建請求體(參數:模型名稱、提示語、是否流式生成)。
4、將請求體序列化為JSON。
5、創建HTTP請求內容。
6、發送POST請求,并確保請求成功。
7、讀取并返回響應內容,并解析相應字符串。
8、返回結果。
//創建請求體:模型名稱、提示語、是否流式生成var request = new RequestModel{Model = model,Prompt = prompt,Stream = false};// 將請求體序列化為JSONvar json = JsonSerializer.Serialize(request);// 創建HTTP請求內容var content = new StringContent(json, Encoding.UTF8, "application/json");// 發送POST請求var response = await _httpClient.PostAsync("/api/generate", content);// 確保請求成功response.EnsureSuccessStatusCode();// 讀取并返回響應內容string responseString = await response.Content.ReadAsStringAsync();///解析相應字符串ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString);//返回結果return results.Response;
項目結構
OllamaClient :實現基本的對話請求、獲取模型列表功能。
Model :創建模型結果的一些參數
RequestModel:請求參數模型
ResponseModel:結果參數模型,用于解析返回的結果。
MainWindow:用戶界面
MainWindowViewModel:界面交互業務處理
案例
模型加載
發送聊天
代碼
OllamaSharpe
Ollama客戶端 OllamaClient
public class OllamaClient{public IEnumerable<Model> ModelList { get; set; }private readonly HttpClient _httpClient;public OllamaClient(string baseAddress = "http://localhost:11434"){_httpClient = new HttpClient{BaseAddress = new Uri(baseAddress)};ExecuteCommand("ollama list"); //啟動Ollama服務}/// <summary>/// 異步生成文本/// </summary>public async Task<string> GenerateTextAsync(string model, string prompt){try{//創建請求體:模型名稱、提示語、是否流式生成var request = new RequestModel{Model = model,Prompt = prompt,Stream = false};// 將請求體序列化為JSONvar json = JsonSerializer.Serialize(request);// 創建HTTP請求內容var content = new StringContent(json, Encoding.UTF8, "application/json");// 發送POST請求var response = await _httpClient.PostAsync("/api/generate", content);// 確保請求成功response.EnsureSuccessStatusCode();// 讀取并返回響應內容string responseString = await response.Content.ReadAsStringAsync();///解析相應字符串ResponseModel results = JsonSerializer.Deserialize<ResponseModel>(responseString);//返回結果return results.Response;}catch (HttpRequestException e){throw new Exception($"Request failed: {e.Message}");}}/// <summary>/// 異步流式生成文本/// </summary>public async IAsyncEnumerable<string> StreamGenerateTextAsync(string model, string prompt){//創建請求體:模型名稱、提示語、是否流式生成var request = new RequestModel{Model = model,Prompt = prompt, Stream = true};// 將請求體序列化為JSONvar json = JsonSerializer.Serialize(request);//創建HTTP請求內容var content = new StringContent(json, Encoding.UTF8, "application/json");//發送POST請求using var response = await _httpClient.PostAsync("/api/generate", content);// 確保請求成功response.EnsureSuccessStatusCode();// 讀取流并解析為ResponseModelusing var stream = await response.Content.ReadAsStreamAsync();// 創建流讀取器using var reader = new StreamReader(stream);// 循環讀取流while (!reader.EndOfStream){// 讀取一行var line = await reader.ReadLineAsync();// 如果行不為空,則解析為ResponseModel并返回if (!string.IsNullOrEmpty(line)){var partial = JsonSerializer.Deserialize<ResponseModel>(line);yield return partial.Response;}}}/// <summary>/// 異步獲取本地模型列表/// </summary>public async Task<IEnumerable<Model>> ListLocalModelsAsync(){//相應請求HttpResponseMessage responseMessage = await _httpClient.GetAsync("/api/tags").ConfigureAwait(false);;//確保請求成功responseMessage.EnsureSuccessStatusCode();//讀取響應string response = await responseMessage.Content.ReadAsStringAsync();//讀取流并解析為LocalModelsLocalModels localModel = JsonSerializer.Deserialize<LocalModels>(response);await Task.Delay(3000);//返回結果ModelList = localModel.Models;return localModel.Models;}// <summary>/// 執行CMD指令:用于啟動Ollama服務,/// </summary>public static bool ExecuteCommand(string command){// 創建一個新的進程啟動信息ProcessStartInfo processStartInfo = new ProcessStartInfo{FileName = "cmd.exe", // 設置要啟動的程序為cmd.exeArguments = $"/C {command}", // 設置要執行的命令UseShellExecute = true, // 使用操作系統shell啟動進程CreateNoWindow = false, //不創建窗體};try{Process process = Process.Start(processStartInfo);// 啟動進程process.WaitForExit(); // 等待進程退出process.Close(); // 返回是否成功執行return process.ExitCode == 0;}catch (Exception ex){Debug.WriteLine($"發生錯誤: {ex.Message}");// 其他異常處理return false;}}}
請求模型:RequestModel
/// <summary>
/// 請求模型
/// </summary>
public class RequestModel
{public string Model { get; set; }public string Prompt { get; set; }public bool Stream { get; set; }
}
響應模型:ResponseModel
/// <summary>
/// 響應模型
/// </summary>
public class ResponseModel
{/// <summary>/// 模型名稱/// </summary>[JsonPropertyName("model")]public string Model { get; set; }/// <summary>/// 創建時間/// </summary>[JsonPropertyName("created_at")]public string CreatedTime { get; set; }/// <summary>/// 響應:返回文本/// </summary>[JsonPropertyName("response")]public string Response { get; set; }/// <summary>/// 是否結束/// </summary>[JsonPropertyName("done")]public bool Done { get; set; }/// <summary>/// 結束原因/// </summary>[JsonPropertyName("done_reason")]public string Done_Reason { get; set; }/// <summary>/// 上下文/// </summary>[JsonPropertyName("context")]public List<int> Context { get; set; }/// <summary>/// 總耗時/// </summary>[JsonPropertyName("total_duration")]public long TotalDuration { get; set; }/// <summary>/// 加載耗時/// </summary>[JsonPropertyName("load_duration")]public long LoadDuration { get; set; }/// <summary>/// 提示詞評估次數/// </summary>[JsonPropertyName("prompt_eval_count")]public long PromptEvalCount { get; set; }/// <summary>/// 提示詞評估耗時/// </summary>[JsonPropertyName("prompt_eval_duration")]public long PromptEvalDuration { get; set; }/// <summary>/// 評估次數/// </summary>[JsonPropertyName("eval_count")]public long EvalCount { get; set; }/// <summary>/// 評估耗時/// </summary>[JsonPropertyName("eval_duration")]public long EvalDuration { get; set; }
}
結果模型:LocalModels | Model
/// <summary>
/// 本地模型
/// </summary>
public class LocalModels
{[JsonPropertyName("models")]public IEnumerable<Model> Models { get; set; }
}
/// <summary>
/// 模型
/// </summary>
public class Model
{/// <summary>/// 模型名稱/// </summary>[JsonPropertyName("name")]public string Name { get; set; }/// <summary>/// 模型名稱/// </summary>[JsonPropertyName("model")]public string ModelName { get; set; }/// <summary>/// 修改時間/// </summary>[JsonPropertyName("modified_at")]public DateTime ModifiedAt { get; set; }/// <summary>/// 大小/// </summary>[JsonPropertyName("size")]public long Size { get; set; }/// <summary>/// /// </summary>[JsonPropertyName("digest")]public string Digest { get; set; }/// <summary>/// 模型細節/// </summary>[JsonPropertyName("details")]public ModelDetails Details { get; set; }
}/// <summary>
/// 模型細節
/// </summary>
public class ModelDetails
{/// <summary>/// 父模型/// </summary>[JsonPropertyName("parent_model")]public string ParentModel { get; set; }/// <summary>/// 格式/// </summary>[JsonPropertyName("format")]public string Format { get; set; }/// <summary>/// /// </summary>[JsonPropertyName("family")]public string Family { get; set; }/// <summary>/// /// </summary>[JsonPropertyName("families")]public List<string> Families { get; set; }/// <summary>/// 參數大小/// </summary>[JsonPropertyName("parameter_size")]public string ParameterSize { get; set; }/// <summary>/// 質量等級/// </summary>[JsonPropertyName("quantization_level")]public string QuantizationLevel { get; set; }
}
簡單的界面
MainWindow
<Window.DataContext><local:MainWindowViewModel x:Name="ViewModel"/>
</Window.DataContext>
<Grid><Grid.RowDefinitions><RowDefinition Height="50"/><RowDefinition Height="*"/><RowDefinition Height="300"/></Grid.RowDefinitions><Grid Grid.Row="0"><WrapPanel VerticalAlignment="Center" Margin="5"><Label Content="模型列表" Margin="5"/><ComboBox Width="200" Margin="5" Name="ModelListBox"ItemsSource="{Binding ModelCollection}"SelectedItem="{Binding SelectedModel}"/></WrapPanel></Grid><Grid Grid.Row="1"><TextBox x:Name="OutputBox" Text="{Binding OutputText}"ScrollViewer.HorizontalScrollBarVisibility="Visible"ScrollViewer.VerticalScrollBarVisibility="Visible"/></Grid><Grid Grid.Row="2"><Grid.RowDefinitions><RowDefinition Height="*"/><RowDefinition Height="50"/></Grid.RowDefinitions><TextBox Grid.Row="0" x:Name="InputBox" Background="#AAAAAA"Text="{Binding InputText}"TextWrapping="WrapWithOverflow"ScrollViewer.VerticalScrollBarVisibility="Auto"ScrollViewer.HorizontalScrollBarVisibility="Auto" ></TextBox><WrapPanel Grid.Row="1" HorizontalAlignment="Right" VerticalAlignment="Center" Margin="5"><Button Grid.Row="1" Width="100" Height="30" x:Name="Btn_Submit" Command="{Binding SendQuestionCommand}">發送</Button></WrapPanel></Grid>
</Grid>
MainWindowViewModel
public class MainWindowViewModel: PropertyChangedBase
{#region 字段、屬性private string _inputText = ""; //輸入文本private string _outputText = ""; //輸出文本private OllamaClient _ollama; //Ollama客戶端private string _selectedModel = "deepseek-r1:1.5b"; //選擇模型private ObservableCollection<string> _modelCollection; //模型列表#region 屬性public ObservableCollection<string> ModelCollection{get => _modelCollection;set{if (_modelCollection != value){_modelCollection = value;OnPropertyChanged();}}}public string SelectedModel{get => _selectedModel;set{if (_selectedModel != value){_selectedModel = value;OnPropertyChanged();}}}private OllamaClient Ollama { get => _ollama; }public string OutputText{get => _outputText;set{if (_outputText != value){_outputText = value;OnPropertyChanged();}}}public string InputText{get => _inputText;set{if (_inputText != value){_inputText = value;OnPropertyChanged();}}}public ICommand SendQuestionCommand { get; set; }#endregion#endregionpublic MainWindowViewModel(){Initialze();}/// <summary>/// 初始化/// </summary>private void Initialze(){_ollama = new OllamaClient();_modelCollection = new ObservableCollection<string>();SelectedModel = "deepseek-r1:1.5b";var models = Ollama.ListLocalModelsAsync();AppendLine($"模型列表;{Environment.NewLine}");foreach (var model in models.Result){ModelCollection.Add(model.ModelName);AppendLine($"{model.ModelName},{FormatFileSize(model.Size)}\r\n");}SendQuestionCommand = new ParameterlessCommand(OnSendQuestion);}/// <summary>/// 格式化文件大小/// </summary>private string FormatFileSize(long bytes){string[] sizes = { "B", "KB", "MB", "GB", "TB" };int order = 0;while (bytes >= 1024 && order < sizes.Length - 1){order++;bytes = bytes / 1024;}return $"{bytes:0.##} {sizes[order]}";}/// <summary>/// 發送文本/// </summary>public async void OnSendQuestion(){try{AppendLine($"【用戶】{InputText}\r\n\r\n");AppendLine($"【AI】\r\n\r\n");await foreach (var answerToken in Ollama.StreamGenerateTextAsync(SelectedModel, InputText)){AppendText(answerToken);}AppendLine($"\r\n");}catch (Exception ex){AppendText($"Error: {ex.Message}");}}/// <summary>/// 附加文本/// </summary>private async void AppendText(string text){Debug.Print($"{text}");OutputText += text;}/// <summary>/// 附加文本行/// </summary>private async void AppendLine(string text){Debug.Print($"{text}");OutputText += $"{text}\r\n";}
}
/// <summary>/// 屬性變更/// </summary>public class PropertyChangedBase : INotifyPropertyChanged{public event PropertyChangedEventHandler PropertyChanged;protected void OnPropertyChanged([CallerMemberName] string propertyName = null){PropertyChanged?.Invoke(this, new PropertyChangedEventArgs(propertyName));}}
總結
案例代碼實現了與Ollama的HTTP交互,通過使用HttpClient、JSON序列化和錯誤處理,提供了一個簡潔的異步文本生成接口。適合直接調用本地Ollama服務的場景,更多功能,可以后續拓展。