C#测试调用LM Studio服务接口
使用LM Studio部署本地DeepSeek模型后,除了在LM Studio的聊天界面与大模型对话之外,还可以调用LM Studio提供的服务接口自编程序调用。如下图所示,在开发者界面点击Settings,在弹出的窗口中启动“在网络中提供服务”,同时在标“4”处设置Status为Running,此时就可以调用LM Studio服务接口了。标“5”处显示了服务基础地址,标“6”处显示了可用服务地址。
本文测试在Winform中调用LM Studio服务接口与大模型对话的基本用法,一开始先测试了是否能够用Ater. DeepSeek.AspNetCore包调用本地模型,结果不行。于是决定使用之前用的Flurl包基于http通信调用服务接口。
主要测试获取模型列表(v1/models)和聊天补全(v1/chat/completions)两种服务,前者不需要输入参数,后者的话LM Studio中给出了输入参数示例,在postman中也能看到返回结果样式,
curl http://localhost:1234/v1/chat/completions \
-H "Content-Type: application/json" \
-d '{
"model": "deepseek-r1-distill-qwen-1.5b",
"messages": [
{ "role": "system", "content": "Always answer in rhymes. Today is Thursday" },
{ "role": "user", "content": "What day is it today?" }
],
"temperature": 0.7,
"max_tokens": -1,
"stream": false
}'
在项目中定义对应的类(对话结果返回类只定义了部分内容,没有定义全),主要代码如下所示:
// 单个模型信息
public class LMStudioModel
{
public string Id { get; set; } = string.Empty;
public string Object { get; set; } = string.Empty;
public string Owned_by { get; set; } = string.Empty;
}
//模型集合
public class LMStudioModels
{
public string Object{get;set;}=string.Empty;
public List<LMStudioModel> Data { get; set; } = new List<LMStudioModel>();
}
//问题及回答中的单条消息
public class ChatMessage
{
public string role { get; set; } = string.Empty;
public string content { get; set; } = string.Empty;
}
//对话问题
public class ChatQuestion
{
public string model { get; set; } = string.Empty;
public bool stream { get; set; } = false;
public int max_tokens { get; set; } = -1;
public List<ChatMessage> messages { get; set; } = new List<ChatMessage>();
}
//单个对话结果
public class AnswerChoice
{
public int Index { get; set; } = 0;
public string Finish_reason { get; set; } = string.Empty;
public ChatMessage message { get; set; } = new ChatMessage();
}
//对话结果集合
public class ChatAnswer
{
public string Id { get; set; } = string.Empty;
public string Object { get; set; } = string.Empty;
public long Created { get; set; } = 0;
public string Model { get; set; } = string.Empty;
public List<AnswerChoice> Choices { get; set; } = new List<AnswerChoice>();
}
//对话地址
private string m_modelUrl = "http://192.168.11.102:1234/v1/models";
private string m_chatUrl = "http://192.168.11.102:1234/v1/chat/completions";
//获取模型信息
string retResult = m_modelUrl.GetStringAsync().Result;
JsonSerializerOptions options = new JsonSerializerOptions();
options.PropertyNameCaseInsensitive = true;
m_models = JsonSerializer.Deserialize<LMStudioModels>(retResult, options);
foreach (var model in m_models.Data)
{
comboBox1.Items.Add(model.Id);
}
//对话
ChatQuestion question = new ChatQuestion();
ChatMessage message = new ChatMessage();
message.role = "user";
message.content = txtChat.Text;
question.messages.Add(message);
question.model = m_models.Data[comboBox1.SelectedIndex].Id;
string result = m_chatUrl.PostJsonAsync(question).ReceiveString().Result;
txtResult.Text = result;
JsonSerializerOptions options = new JsonSerializerOptions();
options.PropertyNameCaseInsensitive = true;
ChatAnswer answer = JsonSerializer.Deserialize<ChatAnswer>(result, options);
txtResult.Text = answer?.Choices.FirstOrDefault()?.message.content;
最后是程序运行效果截图: