-
Notifications
You must be signed in to change notification settings - Fork 59
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Introduce Mistral AI support #32
Open
ThomasVitale
wants to merge
1
commit into
main
Choose a base branch
from
mistral-ai-support
base: main
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Open
Changes from all commits
Commits
File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
<?xml version="1.0" encoding="UTF-8"?> | ||
<project xmlns="http://maven.apache.org/POM/4.0.0" | ||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||
<modelVersion>4.0.0</modelVersion> | ||
|
||
<parent> | ||
<groupId>dev.langchain4j</groupId> | ||
<artifactId>langchain4j-spring</artifactId> | ||
<version>0.32.0</version> | ||
<relativePath>../pom.xml</relativePath> | ||
</parent> | ||
|
||
<artifactId>langchain4j-spring-boot-autoconfigure</artifactId> | ||
<name>LangChain4j Spring Boot Autoconfigure</name> | ||
|
||
<dependencies> | ||
|
||
<!-- Annotation Processor --> | ||
|
||
<dependency> | ||
<groupId>org.springframework.boot</groupId> | ||
<artifactId>spring-boot-autoconfigure-processor</artifactId> | ||
<optional>true</optional> | ||
</dependency> | ||
|
||
<dependency> | ||
<groupId>org.springframework.boot</groupId> | ||
<artifactId>spring-boot-configuration-processor</artifactId> | ||
<optional>true</optional> | ||
</dependency> | ||
|
||
<!-- API --> | ||
|
||
<dependency> | ||
<groupId>org.springframework.boot</groupId> | ||
<artifactId>spring-boot-starter</artifactId> | ||
<scope>compile</scope> | ||
</dependency> | ||
|
||
<!-- Optional --> | ||
|
||
<dependency> | ||
<groupId>dev.langchain4j</groupId> | ||
<artifactId>langchain4j-spring-core</artifactId> | ||
<version>${project.parent.version}</version> | ||
<optional>true</optional> | ||
</dependency> | ||
|
||
<dependency> | ||
<groupId>dev.langchain4j</groupId> | ||
<artifactId>langchain4j-spring-mistral-ai</artifactId> | ||
<version>${project.parent.version}</version> | ||
<optional>true</optional> | ||
</dependency> | ||
|
||
<!-- Test Implementation --> | ||
|
||
<dependency> | ||
<groupId>org.springframework.boot</groupId> | ||
<artifactId>spring-boot-starter-test</artifactId> | ||
<scope>test</scope> | ||
</dependency> | ||
|
||
</dependencies> | ||
|
||
<licenses> | ||
<license> | ||
<name>Apache-2.0</name> | ||
<url>https://www.apache.org/licenses/LICENSE-2.0.txt</url> | ||
<distribution>repo</distribution> | ||
<comments>A business-friendly OSS license</comments> | ||
</license> | ||
</licenses> | ||
|
||
</project> |
83 changes: 83 additions & 0 deletions
83
...ev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfiguration.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
package dev.langchain4j.spring.boot.autoconfigure.models.mistralai; | ||
|
||
import dev.langchain4j.model.mistralai.MistralAiChatModel; | ||
import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel; | ||
import org.slf4j.Logger; | ||
import org.slf4j.LoggerFactory; | ||
import org.springframework.boot.autoconfigure.AutoConfiguration; | ||
import org.springframework.boot.autoconfigure.condition.ConditionalOnClass; | ||
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; | ||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; | ||
import org.springframework.boot.autoconfigure.web.client.RestClientAutoConfiguration; | ||
import org.springframework.boot.context.properties.EnableConfigurationProperties; | ||
import org.springframework.context.annotation.Bean; | ||
|
||
/** | ||
* Auto-configuration for Mistral AI clients and models. | ||
*/ | ||
@AutoConfiguration(after = {RestClientAutoConfiguration.class}) | ||
@ConditionalOnClass({ MistralAiChatModel.class }) | ||
@ConditionalOnProperty(prefix = MistralAiProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true) | ||
@EnableConfigurationProperties({ MistralAiProperties.class, MistralAiChatProperties.class, MistralAiEmbeddingProperties.class }) | ||
public class MistralAiAutoConfiguration { | ||
|
||
private static final Logger logger = LoggerFactory.getLogger(MistralAiAutoConfiguration.class); | ||
|
||
@Bean | ||
@ConditionalOnMissingBean | ||
@ConditionalOnProperty(prefix = MistralAiChatProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true) | ||
MistralAiChatModel mistralAiChatModel(MistralAiProperties properties, MistralAiChatProperties chatProperties) { | ||
var chatModel = MistralAiChatModel.builder() | ||
// Client Config | ||
.apiKey(properties.getClient().getApiKey()) | ||
.baseUrl(properties.getClient().getBaseUrl().toString()) | ||
.timeout(properties.getClient().getReadTimeout()) | ||
.maxRetries(properties.getClient().getMaxRetries()) | ||
.logRequests(properties.getClient().isLogRequests()) | ||
.logResponses(properties.getClient().isLogResponses()) | ||
// Model Options | ||
.modelName(chatProperties.getModel()) | ||
.temperature(chatProperties.getTemperature()) | ||
.topP(chatProperties.getTopP()) | ||
.maxTokens(chatProperties.getMaxTokens()) | ||
.safePrompt(chatProperties.isSafePrompt()) | ||
.randomSeed(chatProperties.getRandomSeed()) | ||
.build(); | ||
|
||
warnAboutSensitiveInformationExposure(properties.getClient(), MistralAiChatModel.class.getTypeName()); | ||
|
||
return chatModel; | ||
} | ||
|
||
@Bean | ||
@ConditionalOnMissingBean | ||
@ConditionalOnProperty(prefix = MistralAiEmbeddingProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true) | ||
MistralAiEmbeddingModel mistralAiEmbeddingModel(MistralAiProperties properties, MistralAiEmbeddingProperties embeddingProperties) { | ||
var embeddingModel = MistralAiEmbeddingModel.builder() | ||
// Client Config | ||
.apiKey(properties.getClient().getApiKey()) | ||
.baseUrl(properties.getClient().getBaseUrl().toString()) | ||
.timeout(properties.getClient().getReadTimeout()) | ||
.maxRetries(properties.getClient().getMaxRetries()) | ||
.logRequests(properties.getClient().isLogRequests()) | ||
.logResponses(properties.getClient().isLogResponses()) | ||
// Model Options | ||
.modelName(embeddingProperties.getModel()) | ||
.build(); | ||
|
||
warnAboutSensitiveInformationExposure(properties.getClient(), MistralAiEmbeddingModel.class.getTypeName()); | ||
|
||
return embeddingModel; | ||
} | ||
|
||
private static void warnAboutSensitiveInformationExposure(MistralAiProperties.Client client, String modelClassName) { | ||
if (client.isLogRequests()) { | ||
logger.warn("You have enabled logging for the entire model request in {}, with the risk of exposing sensitive or private information. Please, be careful!", modelClassName); | ||
} | ||
|
||
if (client.isLogResponses()) { | ||
logger.warn("You have enabled logging for the entire model response in {}, with the risk of exposing sensitive or private information. Please, be careful!", modelClassName); | ||
} | ||
} | ||
|
||
} |
100 changes: 100 additions & 0 deletions
100
...a/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiChatProperties.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
package dev.langchain4j.spring.boot.autoconfigure.models.mistralai; | ||
|
||
import dev.langchain4j.model.mistralai.MistralAiChatModelName; | ||
import org.springframework.boot.context.properties.ConfigurationProperties; | ||
|
||
/** | ||
* Configuration properties for Mistral AI chat models. | ||
*/ | ||
@ConfigurationProperties(prefix = MistralAiChatProperties.CONFIG_PREFIX) | ||
public class MistralAiChatProperties { | ||
|
||
public static final String CONFIG_PREFIX = "langchain4j.mistralai.chat"; | ||
|
||
/** | ||
* Whether to enable the Mistral AI chat models. | ||
*/ | ||
private boolean enabled = true; | ||
|
||
/** | ||
* ID of the model to use. | ||
*/ | ||
private String model = MistralAiChatModelName.OPEN_MISTRAL_7B.toString(); | ||
/** | ||
* What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or "top_p" but not both. | ||
*/ | ||
private Double temperature = 0.7; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Do we need to set default values here? (except for the mandatory params (model)). |
||
/** | ||
* Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or "temperature" but not both. | ||
*/ | ||
private Double topP = 1.0; | ||
/** | ||
* The maximum number of tokens to generate in the completion. The token count of your prompt plus "max_tokens" cannot exceed the model's context length. | ||
*/ | ||
private Integer maxTokens; | ||
/** | ||
* Whether to inject a safety prompt before all conversations. | ||
*/ | ||
private boolean safePrompt = false; | ||
/** | ||
* The seed to use for random sampling. If set, different calls will generate deterministic results. | ||
*/ | ||
private Integer randomSeed; | ||
|
||
public boolean isEnabled() { | ||
return enabled; | ||
} | ||
|
||
public void setEnabled(boolean enabled) { | ||
this.enabled = enabled; | ||
} | ||
|
||
public String getModel() { | ||
return model; | ||
} | ||
|
||
public void setModel(String model) { | ||
this.model = model; | ||
} | ||
|
||
public Double getTemperature() { | ||
return temperature; | ||
} | ||
|
||
public void setTemperature(Double temperature) { | ||
this.temperature = temperature; | ||
} | ||
|
||
public Double getTopP() { | ||
return topP; | ||
} | ||
|
||
public void setTopP(Double topP) { | ||
this.topP = topP; | ||
} | ||
|
||
public Integer getMaxTokens() { | ||
return maxTokens; | ||
} | ||
|
||
public void setMaxTokens(Integer maxTokens) { | ||
this.maxTokens = maxTokens; | ||
} | ||
|
||
public boolean isSafePrompt() { | ||
return safePrompt; | ||
} | ||
|
||
public void setSafePrompt(boolean safePrompt) { | ||
this.safePrompt = safePrompt; | ||
} | ||
|
||
public Integer getRandomSeed() { | ||
return randomSeed; | ||
} | ||
|
||
public void setRandomSeed(Integer randomSeed) { | ||
this.randomSeed = randomSeed; | ||
} | ||
|
||
} |
40 changes: 40 additions & 0 deletions
40
.../langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiEmbeddingProperties.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
@@ -0,0 +1,40 @@ | ||||||
package dev.langchain4j.spring.boot.autoconfigure.models.mistralai; | ||||||
|
||||||
import dev.langchain4j.model.mistralai.MistralAiEmbeddingModelName; | ||||||
import org.springframework.boot.context.properties.ConfigurationProperties; | ||||||
|
||||||
/** | ||||||
* Configuration properties for Mistral AI chat models. | ||||||
*/ | ||||||
@ConfigurationProperties(prefix = MistralAiEmbeddingProperties.CONFIG_PREFIX) | ||||||
public class MistralAiEmbeddingProperties { | ||||||
|
||||||
public static final String CONFIG_PREFIX = "langchain4j.mistralai.embedding"; | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||
|
||||||
/** | ||||||
* Whether to enable the Mistral AI embedding models. | ||||||
*/ | ||||||
private boolean enabled = true; | ||||||
|
||||||
/** | ||||||
* ID of the model to use. | ||||||
*/ | ||||||
private String model = MistralAiEmbeddingModelName.MISTRAL_EMBED.toString(); | ||||||
|
||||||
public boolean isEnabled() { | ||||||
return enabled; | ||||||
} | ||||||
|
||||||
public void setEnabled(boolean enabled) { | ||||||
this.enabled = enabled; | ||||||
} | ||||||
|
||||||
public String getModel() { | ||||||
return model; | ||||||
} | ||||||
|
||||||
public void setModel(String model) { | ||||||
this.model = model; | ||||||
} | ||||||
|
||||||
} |
Oops, something went wrong.
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
nit: