Skip to content

Commit

Permalink
build(spring-ai): Spring AI 1.0.0-M3
Browse files Browse the repository at this point in the history
  • Loading branch information
clementgig committed Oct 14, 2024
1 parent 9b80ccd commit eb3207c
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 11 deletions.
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
<maven.compiler.target>21</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

<spring-ai.version>1.0.0-M2</spring-ai.version>
<spring-ai.version>1.0.0-M3</spring-ai.version>
<spring-shell.version>3.3.3</spring-shell.version>
<lombok.version>1.18.34</lombok.version>
</properties>
Expand Down
5 changes: 3 additions & 2 deletions workshop/exercise-1.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ In the LLMService constructor, set chatClient with the result of calling `build(

```java
private final ChatClient chatClient;
private final SystemMessage systemMessage;

public LLMService(ChatClient.Builder builder, @Value("classpath:/prompt-system.md") Resource promptSystem) {
this.systemMessage = new SystemMessage(promptSystem);
Expand All @@ -29,12 +30,12 @@ public LLMService(ChatClient.Builder builder, @Value("classpath:/prompt-system.m
Update the `prompt-system.md` file in `src/main/resources` folder with the following content:

```markdown
Please answer the question asked and provide the shortest possible response without extra text, using formal English language.
Please answer the question asked and provide the shortest possible response without extra text nor line-breaks, using formal English language.
```

### Part 3 - Create query options object

Create a `OllamaOptions` attribute and initialize it in the constructor by using `OllamaOptions.create()` method and set model to `mistral:7b` and temperature to `0.8f`.
Create a `OllamaOptions` attribute and initialize it in the constructor by using `OllamaOptions.create()` method and set model to `mistral:7b` and temperature to `0.8`.

### Part 4 - Implement the model query in streaming mode

Expand Down
2 changes: 1 addition & 1 deletion workshop/exercise-4.md
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ Message message = promptTemplate.createMessage(Map.of("context", context, "quest
Prompt prompt = new Prompt(List.of(systemMessage, message),
OllamaOptions.create()
.withModel("mistral:7b")
.withTemperature(0.9f));
.withTemperature(0.9));

System.out.println("Preparing the answer...");

Expand Down
4 changes: 2 additions & 2 deletions workshop/solution/exercise-1/LLMService.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@
public class LLMService {

private final ChatClient chatClient;
private final OllamaOptions options;
private final SystemMessage systemMessage;
private final OllamaOptions options;

public LLMService(ChatClient.Builder builder, @Value("classpath:/prompt-system.md") Resource promptSystem) {
this.systemMessage = new SystemMessage(promptSystem);
this.chatClient = builder.build();
this.options = OllamaOptions.create()
.withModel("mistral:7b")
.withTemperature(0.8f);
.withTemperature(0.8);
}

private Stream<String> getResponse(final Message userMessage) {
Expand Down
4 changes: 2 additions & 2 deletions workshop/solution/exercise-2/LLMService.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,16 @@
public class LLMService {

private final ChatClient chatClient;
private final OllamaOptions options;
private final SystemMessage systemMessage;
private final OllamaOptions options;
private final List<Message> history;

public LLMService(ChatClient.Builder builder, @Value("classpath:/prompt-system.md") Resource promptSystem) {
this.systemMessage = new SystemMessage(promptSystem);
this.chatClient = builder.build();
this.options = OllamaOptions.create()
.withModel("mistral:7b")
.withTemperature(0.8f);
.withTemperature(0.8);
this.history = new ArrayList<>();
}

Expand Down
4 changes: 2 additions & 2 deletions workshop/solution/exercise-3/LLMService.java
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@
public class LLMService {

private final ChatClient chatClient;
private final OllamaOptions options;
private final SystemMessage systemMessage;
private final OllamaOptions options;
private final List<Message> history;
private final DataService dataService;
private final PromptTemplate userPromptTemplate;
Expand All @@ -34,7 +34,7 @@ public LLMService(ChatClient.Builder builder, @Value("classpath:/prompt-system.m
this.chatClient = builder.build();
this.options = OllamaOptions.create()
.withModel("mistral:7b")
.withTemperature(0.8f);
.withTemperature(0.8);
this.history = new ArrayList<>();
this.dataService = dataService;
this.userPromptTemplate = new PromptTemplate("""
Expand Down
2 changes: 1 addition & 1 deletion workshop/solution/exercise-4/RAGService.java
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public Stream<String> getResponse(final String question) {
Prompt prompt = new Prompt(List.of(systemMessage, message),
OllamaOptions.create()
.withModel("mistral:7b")
.withTemperature(0.9f));
.withTemperature(0.9));

System.out.println("Preparing the answer...");

Expand Down

0 comments on commit eb3207c

Please sign in to comment.