零基础学习Spring AI Java AI SpringBoot AI调用大模型OpenAi Ollama集成大模型
零基础学习Spring AI Java AI SpringBoot AI调用大模型OpenAi Ollama集成大模型
环境准备
-
jdk17+ 这里自行安装,我安装的jdk21
-
idea
-
Ollama安装,请参考下面链接:
本地部署大模型,不需要GPU就能玩本地模型-亲测成功
Ollama前端页面调用大模型 知识问答 模型切换
创建项目
maven的pom.xml配置
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.3.5</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>com.example</groupId>
<artifactId>springai</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>springai</name>
<description>springai</description>
<properties>
<java.version>23</java.version>
</properties>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-bom</artifactId>
<version>1.0.0-SNAPSHOT</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<!-- <dependency>-->
<!-- <groupId>org.springframework.ai</groupId>-->
<!-- <artifactId>spring-ai-openai-spring-boot-starter</artifactId>-->
<!-- </dependency>-->
<dependency>
<groupId>org.springframework.ai</groupId>
<artifactId>spring-ai-ollama-spring-boot-starter</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
<repository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>https://repo.spring.io/snapshot</url>
<releases>
<enabled>false</enabled>
</releases>
</repository>
</repositories>
</project>
配置ollama大模型调用api地址
其他配置请看官网说明:https://docs.spring.io/spring-ai/reference/api/chat/ollama-chat.html
spring.application.name=springai
#如果是调用chatGPT的接口,这里配置openai的key,并把maven的配置放开
spring.ai.openai.api-key=34223242432424324
#ollama配置地址
spring.ai.ollama.base-url=http://192.168.1.59:11434
spring.ai.ollama.init.pull-model-strategy=never
spring.ai.ollama.init.timeout=60s
spring.ai.ollama.init.max-retries=1
#ollama配置使用的本地大模型:llama3.1:latest
spring.ai.ollama.chat.options.model=llama3.1:latest
spring.ai.ollama.chat.options.temperature=0.7
spring.ai.ollama.embedding.enabled=true
spring.ai.ollama.embedding.options.model=llama3.1-instruct:latest
使用流式的方式调用时可能会超时报错:Resolved [org.springframework.web.context.request.async.AsyncRequestTimeoutException]
设置超时时间为300秒
package com.example.springai.config;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.AsyncSupportConfigurer;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
@Configuration
public class WebConfig implements WebMvcConfigurer {
@Override
public void configureAsyncSupport(AsyncSupportConfigurer configurer) {
//报错:Resolved [org.springframework.web.context.request.async.AsyncRequestTimeoutException]
configurer.setDefaultTimeout(300000); // 设置超时时间为300秒
}
}
调用Ollama的API对话
package com.example.springai.controller;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;
import java.util.Map;
@RestController
public class OllamaController {
private final OllamaChatModel chatModel;
@Autowired
public OllamaController(OllamaChatModel chatModel) {
this.chatModel = chatModel;
}
@GetMapping("/ai/generate")
public Map<String,String> generate(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
return Map.of("generation", this.chatModel.call(message));
}
//流式返回
@GetMapping("/ai/generateStream")
public Flux<ChatResponse> generateStream(@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
Prompt prompt = new Prompt(new UserMessage(message));
return this.chatModel.stream(prompt);
}
}
一次性调用返回效果:
流式调用效果: