spring-ai-alibaba第一章ollama集成
1、pom文件内容如下
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
</dependencies>
2、application.yml 内容如下
server:
port: 10005
spring:
application:
name: spring-ai-alibaba-ollama-chat-model-example
ai:
ollama:
base-url: http://localhost:11434
chat:
model: llama3.2:1b
3、使用ChatClient 实现模型调用
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.cloud.ai.example.chat.ollama.controller;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.http.MediaType;
import reactor.core.publisher.Flux;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.SimpleLoggerAdvisor;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @author yuluo
* @author <a href="mailto:yuluo08290126@gmail.com">yuluo</a>
*/
@RestController
@RequestMapping("/client")
public class OllamaChatClientController {
private static final String DEFAULT_PROMPT = "你好,介绍下你自己!请用中文回答。";
private final ChatClient ollamaiChatClient;
public OllamaChatClientController(ChatModel chatModel) {
// 构造时,可以设置 ChatClient 的参数
// {@link org.springframework.ai.chat.client.ChatClient};
this.ollamaiChatClient = ChatClient.builder(chatModel)
// 实现 Logger 的 Advisor
.defaultAdvisors(
new SimpleLoggerAdvisor()
)
// 设置 ChatClient 中 ChatModel 的 Options 参数
.defaultOptions(
OllamaOptions.builder()
.topP(0.7)
.model("deepseek-r1:1.5b")
.build()
)
.build();
}
/**
* ChatClient 简单调用
*/
@GetMapping("/simple/chat")
public String simpleChat() {
return ollamaiChatClient.prompt(DEFAULT_PROMPT).call().content();
}
/**
* ChatClient 流式调用
*/
@GetMapping(value = "/stream/chat",produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> streamChat(HttpServletResponse response) {
response.setCharacterEncoding("UTF-8");
return ollamaiChatClient.prompt(DEFAULT_PROMPT).stream().content();
}
}
测试同步如下
测试SSE流式如下
4、使用ChatModel代码如下
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.cloud.ai.example.chat.ollama.controller;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.http.MediaType;
import reactor.core.publisher.Flux;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @author yuluo
* @author <a href="mailto:yuluo08290126@gmail.com">yuluo</a>
*/
@RestController
@RequestMapping("/model")
public class OllamaChatModelController {
private static final String DEFAULT_PROMPT = "你好,介绍下你自己吧。请用中文回答。";
private final ChatModel ollamaChatModel;
public OllamaChatModelController(ChatModel chatModel) {
this.ollamaChatModel = chatModel;
}
/**
* 最简单的使用方式,没有任何 LLMs 参数注入。
*
* @return String types.
*/
@GetMapping("/simple/chat")
public String simpleChat() {
return ollamaChatModel.call(new Prompt(DEFAULT_PROMPT)).getResult().getOutput().getText();
}
/**
* Stream 流式调用。可以使大模型的输出信息实现打字机效果。
*
* @return Flux<String> types.
*/
@GetMapping(value = "/stream/chat",produces = MediaType.TEXT_EVENT_STREAM_VALUE)
public Flux<String> streamChat(HttpServletResponse response) {
// 避免返回乱码
response.setCharacterEncoding("UTF-8");
Flux<ChatResponse> stream = ollamaChatModel.stream(new Prompt(DEFAULT_PROMPT));
return stream.map(resp -> resp.getResult().getOutput().getText());
}
/**
* 使用编程方式自定义 LLMs ChatOptions 参数, {@link OllamaOptions}。
* 优先级高于在 application.yml 中配置的 LLMs 参数!
*/
@GetMapping("/custom/chat")
public String customChat() {
OllamaOptions customOptions = OllamaOptions.builder()
.topP(0.7)
.model("deepseek-r1:1.5b")
.temperature(0.8)
.build();
return ollamaChatModel.call(new Prompt(DEFAULT_PROMPT, customOptions)).getResult().getOutput().getText();
}
}
测试SSE流式如下
指定chat模型