diff --git a/langchain4j-spring-boot-autoconfigure/pom.xml b/langchain4j-spring-boot-autoconfigure/pom.xml
new file mode 100644
index 00000000..ec95f0e9
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/pom.xml
@@ -0,0 +1,76 @@
+
+
+ 4.0.0
+
+
+ dev.langchain4j
+ langchain4j-spring
+ 0.32.0
+ ../pom.xml
+
+
+ langchain4j-spring-boot-autoconfigure
+ LangChain4j Spring Boot Autoconfigure
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-autoconfigure-processor
+ true
+
+
+
+ org.springframework.boot
+ spring-boot-configuration-processor
+ true
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter
+ compile
+
+
+
+
+
+ dev.langchain4j
+ langchain4j-spring-core
+ ${project.parent.version}
+ true
+
+
+
+ dev.langchain4j
+ langchain4j-spring-mistral-ai
+ ${project.parent.version}
+ true
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+
+ Apache-2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+ A business-friendly OSS license
+
+
+
+
\ No newline at end of file
diff --git a/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfiguration.java b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfiguration.java
new file mode 100644
index 00000000..332a8fae
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfiguration.java
@@ -0,0 +1,83 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import dev.langchain4j.model.mistralai.MistralAiChatModel;
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.autoconfigure.AutoConfiguration;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnClass;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
+import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
+import org.springframework.boot.autoconfigure.web.client.RestClientAutoConfiguration;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.context.annotation.Bean;
+
+/**
+ * Auto-configuration for Mistral AI clients and models.
+ */
+@AutoConfiguration(after = {RestClientAutoConfiguration.class})
+@ConditionalOnClass({ MistralAiChatModel.class })
+@ConditionalOnProperty(prefix = MistralAiProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true)
+@EnableConfigurationProperties({ MistralAiProperties.class, MistralAiChatProperties.class, MistralAiEmbeddingProperties.class })
+public class MistralAiAutoConfiguration {
+
+ private static final Logger logger = LoggerFactory.getLogger(MistralAiAutoConfiguration.class);
+
+ @Bean
+ @ConditionalOnMissingBean
+ @ConditionalOnProperty(prefix = MistralAiChatProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true)
+ MistralAiChatModel mistralAiChatModel(MistralAiProperties properties, MistralAiChatProperties chatProperties) {
+ var chatModel = MistralAiChatModel.builder()
+ // Client Config
+ .apiKey(properties.getClient().getApiKey())
+ .baseUrl(properties.getClient().getBaseUrl().toString())
+ .timeout(properties.getClient().getReadTimeout())
+ .maxRetries(properties.getClient().getMaxRetries())
+ .logRequests(properties.getClient().isLogRequests())
+ .logResponses(properties.getClient().isLogResponses())
+ // Model Options
+ .modelName(chatProperties.getModel())
+ .temperature(chatProperties.getTemperature())
+ .topP(chatProperties.getTopP())
+ .maxTokens(chatProperties.getMaxTokens())
+ .safePrompt(chatProperties.isSafePrompt())
+ .randomSeed(chatProperties.getRandomSeed())
+ .build();
+
+ warnAboutSensitiveInformationExposure(properties.getClient(), MistralAiChatModel.class.getTypeName());
+
+ return chatModel;
+ }
+
+ @Bean
+ @ConditionalOnMissingBean
+ @ConditionalOnProperty(prefix = MistralAiEmbeddingProperties.CONFIG_PREFIX, name = "enabled", havingValue = "true", matchIfMissing = true)
+ MistralAiEmbeddingModel mistralAiEmbeddingModel(MistralAiProperties properties, MistralAiEmbeddingProperties embeddingProperties) {
+ var embeddingModel = MistralAiEmbeddingModel.builder()
+ // Client Config
+ .apiKey(properties.getClient().getApiKey())
+ .baseUrl(properties.getClient().getBaseUrl().toString())
+ .timeout(properties.getClient().getReadTimeout())
+ .maxRetries(properties.getClient().getMaxRetries())
+ .logRequests(properties.getClient().isLogRequests())
+ .logResponses(properties.getClient().isLogResponses())
+ // Model Options
+ .modelName(embeddingProperties.getModel())
+ .build();
+
+ warnAboutSensitiveInformationExposure(properties.getClient(), MistralAiEmbeddingModel.class.getTypeName());
+
+ return embeddingModel;
+ }
+
+ private static void warnAboutSensitiveInformationExposure(MistralAiProperties.Client client, String modelClassName) {
+ if (client.isLogRequests()) {
+ logger.warn("You have enabled logging for the entire model request in {}, with the risk of exposing sensitive or private information. Please, be careful!", modelClassName);
+ }
+
+ if (client.isLogResponses()) {
+ logger.warn("You have enabled logging for the entire model response in {}, with the risk of exposing sensitive or private information. Please, be careful!", modelClassName);
+ }
+ }
+
+}
diff --git a/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiChatProperties.java b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiChatProperties.java
new file mode 100644
index 00000000..b12a186e
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiChatProperties.java
@@ -0,0 +1,100 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import dev.langchain4j.model.mistralai.MistralAiChatModelName;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * Configuration properties for Mistral AI chat models.
+ */
+@ConfigurationProperties(prefix = MistralAiChatProperties.CONFIG_PREFIX)
+public class MistralAiChatProperties {
+
+ public static final String CONFIG_PREFIX = "langchain4j.mistralai.chat";
+
+ /**
+ * Whether to enable the Mistral AI chat models.
+ */
+ private boolean enabled = true;
+
+ /**
+ * ID of the model to use.
+ */
+ private String model = MistralAiChatModelName.OPEN_MISTRAL_7B.toString();
+ /**
+ * What sampling temperature to use, between 0.0 and 1.0. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic. We generally recommend altering this or "top_p" but not both.
+ */
+ private Double temperature = 0.7;
+ /**
+ * Nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or "temperature" but not both.
+ */
+ private Double topP = 1.0;
+ /**
+ * The maximum number of tokens to generate in the completion. The token count of your prompt plus "max_tokens" cannot exceed the model's context length.
+ */
+ private Integer maxTokens;
+ /**
+ * Whether to inject a safety prompt before all conversations.
+ */
+ private boolean safePrompt = false;
+ /**
+ * The seed to use for random sampling. If set, different calls will generate deterministic results.
+ */
+ private Integer randomSeed;
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+
+ public Double getTemperature() {
+ return temperature;
+ }
+
+ public void setTemperature(Double temperature) {
+ this.temperature = temperature;
+ }
+
+ public Double getTopP() {
+ return topP;
+ }
+
+ public void setTopP(Double topP) {
+ this.topP = topP;
+ }
+
+ public Integer getMaxTokens() {
+ return maxTokens;
+ }
+
+ public void setMaxTokens(Integer maxTokens) {
+ this.maxTokens = maxTokens;
+ }
+
+ public boolean isSafePrompt() {
+ return safePrompt;
+ }
+
+ public void setSafePrompt(boolean safePrompt) {
+ this.safePrompt = safePrompt;
+ }
+
+ public Integer getRandomSeed() {
+ return randomSeed;
+ }
+
+ public void setRandomSeed(Integer randomSeed) {
+ this.randomSeed = randomSeed;
+ }
+
+}
diff --git a/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiEmbeddingProperties.java b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiEmbeddingProperties.java
new file mode 100644
index 00000000..384cc62d
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiEmbeddingProperties.java
@@ -0,0 +1,40 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModelName;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+/**
+ * Configuration properties for Mistral AI chat models.
+ */
+@ConfigurationProperties(prefix = MistralAiEmbeddingProperties.CONFIG_PREFIX)
+public class MistralAiEmbeddingProperties {
+
+ public static final String CONFIG_PREFIX = "langchain4j.mistralai.embedding";
+
+ /**
+ * Whether to enable the Mistral AI embedding models.
+ */
+ private boolean enabled = true;
+
+ /**
+ * ID of the model to use.
+ */
+ private String model = MistralAiEmbeddingModelName.MISTRAL_EMBED.toString();
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+
+}
diff --git a/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiProperties.java b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiProperties.java
new file mode 100644
index 00000000..883712de
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/main/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiProperties.java
@@ -0,0 +1,137 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import org.springframework.boot.context.properties.ConfigurationProperties;
+
+import java.net.URI;
+import java.time.Duration;
+
+/**
+ * Common configuration properties for the Mistral AI integration.
+ */
+@ConfigurationProperties(MistralAiProperties.CONFIG_PREFIX)
+public class MistralAiProperties {
+
+ public static final String CONFIG_PREFIX = "langchain4j.mistralai";
+
+ /**
+ * Whether to enable the Mistral AI integration.
+ */
+ private boolean enabled = true;
+
+ /**
+ * Settings for the HTTP client.
+ */
+ private Client client = new Client();
+
+ public boolean isEnabled() {
+ return enabled;
+ }
+
+ public void setEnabled(boolean enabled) {
+ this.enabled = enabled;
+ }
+
+ public Client getClient() {
+ return client;
+ }
+
+ public void setClient(Client client) {
+ this.client = client;
+ }
+
+ public static class Client {
+
+ /**
+ * Base URL of the Mistral AI API.
+ */
+ private URI baseUrl = URI.create("https://api.mistral.ai/v1");
+
+ /**
+ * Maximum time to wait for a connection.
+ */
+ private Duration connectTimeout = Duration.ofSeconds(10);
+
+ /**
+ * Maximum time to wait for a response.
+ */
+ private Duration readTimeout = Duration.ofSeconds(60);
+
+ /**
+ * Mistral AI APY Key.
+ */
+ private String apiKey;
+
+ /**
+ * Maximum number of retries.
+ */
+ private int maxRetries = 3;
+
+ /**
+ * Whether to log requests.
+ */
+ private boolean logRequests = false;
+
+ /**
+ * Whether to log responses.
+ */
+ private boolean logResponses = false;
+
+ public URI getBaseUrl() {
+ return baseUrl;
+ }
+
+ public void setBaseUrl(URI baseUrl) {
+ this.baseUrl = baseUrl;
+ }
+
+ public Duration getConnectTimeout() {
+ return connectTimeout;
+ }
+
+ public void setConnectTimeout(Duration connectTimeout) {
+ this.connectTimeout = connectTimeout;
+ }
+
+ public Duration getReadTimeout() {
+ return readTimeout;
+ }
+
+ public void setReadTimeout(Duration readTimeout) {
+ this.readTimeout = readTimeout;
+ }
+
+ public String getApiKey() {
+ return apiKey;
+ }
+
+ public void setApiKey(String apiKey) {
+ this.apiKey = apiKey;
+ }
+
+ public int getMaxRetries() {
+ return maxRetries;
+ }
+
+ public void setMaxRetries(int maxRetries) {
+ this.maxRetries = maxRetries;
+ }
+
+ public boolean isLogRequests() {
+ return logRequests;
+ }
+
+ public void setLogRequests(boolean logRequests) {
+ this.logRequests = logRequests;
+ }
+
+ public boolean isLogResponses() {
+ return logResponses;
+ }
+
+ public void setLogResponses(boolean logResponses) {
+ this.logResponses = logResponses;
+ }
+
+ }
+
+}
diff --git a/langchain4j-spring-boot-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/langchain4j-spring-boot-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
new file mode 100644
index 00000000..344cd293
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports
@@ -0,0 +1 @@
+dev.langchain4j.spring.boot.autoconfigure.models.mistralai.MistralAiAutoConfiguration
diff --git a/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationIT.java b/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationIT.java
new file mode 100644
index 00000000..7b446584
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationIT.java
@@ -0,0 +1,43 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import dev.langchain4j.data.embedding.Embedding;
+import dev.langchain4j.model.mistralai.MistralAiChatModel;
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+import org.springframework.boot.autoconfigure.AutoConfigurations;
+import org.springframework.boot.autoconfigure.web.client.RestClientAutoConfiguration;
+import org.springframework.boot.test.context.runner.ApplicationContextRunner;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Integration tests for {@link MistralAiAutoConfiguration}.
+ */
+@EnabledIfEnvironmentVariable(named = "MISTRAL_AI_API_KEY", matches = ".*")
+class MistralAiAutoConfigurationIT {
+
+ private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
+ .withPropertyValues("langchain4j.mistralai.client.apiKey=" + System.getenv("MISTRAL_AI_API_KEY"))
+ .withPropertyValues("langchain4j.mistralai.client.logRequests=true")
+ .withConfiguration(AutoConfigurations.of(RestClientAutoConfiguration.class, MistralAiAutoConfiguration.class));
+
+ @Test
+ void chat() {
+ contextRunner.run(context -> {
+ MistralAiChatModel model = context.getBean(MistralAiChatModel.class);
+ String response = model.generate("What is the capital of Italy?");
+ assertThat(response).containsIgnoringCase("Rome");
+ });
+ }
+
+ @Test
+ void embedding() {
+ contextRunner.run(context -> {
+ MistralAiEmbeddingModel model = context.getBean(MistralAiEmbeddingModel.class);
+ Embedding embedding = model.embed("hi").content();
+ assertThat(embedding.dimension()).isEqualTo(1024);
+ });
+ }
+
+}
diff --git a/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationTests.java b/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationTests.java
new file mode 100644
index 00000000..dae911c7
--- /dev/null
+++ b/langchain4j-spring-boot-autoconfigure/src/test/java/dev/langchain4j/spring/boot/autoconfigure/models/mistralai/MistralAiAutoConfigurationTests.java
@@ -0,0 +1,57 @@
+package dev.langchain4j.spring.boot.autoconfigure.models.mistralai;
+
+import dev.langchain4j.model.mistralai.MistralAiChatModel;
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
+import org.junit.jupiter.api.Test;
+import org.springframework.boot.autoconfigure.AutoConfigurations;
+import org.springframework.boot.autoconfigure.web.client.RestClientAutoConfiguration;
+import org.springframework.boot.test.context.runner.ApplicationContextRunner;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Unit tests for {@link MistralAiAutoConfiguration}.
+ */
+class MistralAiAutoConfigurationTests {
+
+ private final ApplicationContextRunner contextRunner = new ApplicationContextRunner()
+ .withPropertyValues("langchain4j.mistralai.client.api-key=demo")
+ .withConfiguration(AutoConfigurations.of(RestClientAutoConfiguration.class, MistralAiAutoConfiguration.class));
+
+ @Test
+ void chat() {
+ contextRunner.run(context -> {
+ assertThat(context).hasSingleBean(MistralAiChatModel.class);
+ });
+ }
+
+ @Test
+ void chatDisabled() {
+ contextRunner.withPropertyValues("langchain4j.mistralai.chat.enabled=false").run(context -> {
+ assertThat(context).doesNotHaveBean(MistralAiChatModel.class);
+ });
+ }
+
+ @Test
+ void embedding() {
+ contextRunner.run(context -> {
+ assertThat(context).hasSingleBean(MistralAiEmbeddingModel.class);
+ });
+ }
+
+ @Test
+ void embeddingDisabled() {
+ contextRunner.withPropertyValues("langchain4j.mistralai.embedding.enabled=false").run(context -> {
+ assertThat(context).doesNotHaveBean(MistralAiEmbeddingModel.class);
+ });
+ }
+
+ @Test
+ void disabled() {
+ contextRunner.withPropertyValues("langchain4j.mistralai.enabled=false").run(context -> {
+ assertThat(context).doesNotHaveBean(MistralAiChatModel.class);
+ assertThat(context).doesNotHaveBean(MistralAiEmbeddingModel.class);
+ });
+ }
+
+}
\ No newline at end of file
diff --git a/langchain4j-spring-boot-starters/langchain4j-mistral-ai-spring-boot-starter/pom.xml b/langchain4j-spring-boot-starters/langchain4j-mistral-ai-spring-boot-starter/pom.xml
new file mode 100644
index 00000000..d7f78608
--- /dev/null
+++ b/langchain4j-spring-boot-starters/langchain4j-mistral-ai-spring-boot-starter/pom.xml
@@ -0,0 +1,52 @@
+
+
+ 4.0.0
+
+
+ dev.langchain4j
+ langchain4j-spring
+ 0.32.0
+ ../../pom.xml
+
+
+ langchain4j-mistral-ai-spring-boot-starter
+ LangChain4j Mistral AI Spring Boot Starter
+
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter
+ compile
+
+
+
+ dev.langchain4j
+ langchain4j-spring-boot-autoconfigure
+ ${project.parent.version}
+ compile
+
+
+
+ dev.langchain4j
+ langchain4j-spring-mistral-ai
+ ${project.parent.version}
+ compile
+
+
+
+
+
+
+ Apache-2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+ A business-friendly OSS license
+
+
+
+
\ No newline at end of file
diff --git a/langchain4j-spring-core/pom.xml b/langchain4j-spring-core/pom.xml
new file mode 100644
index 00000000..5c7ee4e1
--- /dev/null
+++ b/langchain4j-spring-core/pom.xml
@@ -0,0 +1,52 @@
+
+
+ 4.0.0
+
+
+ dev.langchain4j
+ langchain4j-spring
+ 0.32.0
+ ../pom.xml
+
+
+ langchain4j-spring-core
+ LangChain4j Spring Core
+
+
+
+
+
+
+ org.springframework
+ spring-web
+ compile
+
+
+
+ org.slf4j
+ slf4j-api
+ compile
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+
+
+
+ Apache-2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+ A business-friendly OSS license
+
+
+
+
\ No newline at end of file
diff --git a/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/HttpLoggingInterceptor.java b/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/HttpLoggingInterceptor.java
new file mode 100644
index 00000000..780a2c0c
--- /dev/null
+++ b/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/HttpLoggingInterceptor.java
@@ -0,0 +1,74 @@
+package dev.langchain4j.spring.core.http;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.HttpRequest;
+import org.springframework.http.client.ClientHttpRequestExecution;
+import org.springframework.http.client.ClientHttpRequestInterceptor;
+import org.springframework.http.client.ClientHttpResponse;
+import org.springframework.util.StreamUtils;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.util.stream.Collectors;
+
+/**
+ * HTTP interceptor for logging requests and responses.
+ */
+public class HttpLoggingInterceptor implements ClientHttpRequestInterceptor {
+
+ private static final Logger logger = LoggerFactory.getLogger(HttpLoggingInterceptor.class);
+
+ private final boolean logRequests;
+
+ private final boolean logResponses;
+
+ public HttpLoggingInterceptor(boolean logRequests, boolean logResponses) {
+ this.logRequests = logRequests;
+ this.logResponses = logResponses;
+ }
+
+ @Override
+ public ClientHttpResponse intercept(HttpRequest request, byte[] requestBody, ClientHttpRequestExecution execution) throws IOException {
+ if (logRequests) {
+ logRequest(request, requestBody);
+ }
+
+ if (logResponses) {
+ return logResponse(request, requestBody, execution);
+ } else {
+ return execution.execute(request, requestBody);
+ }
+ }
+
+ private void logRequest(HttpRequest request, byte[] requestBody) {
+ logger.info("Request.\n Method: {}.\n URI: {}.\n Headers: {}.\n Body: {}", request.getMethod(),
+ request.getURI(),
+ request.getHeaders()
+ .toSingleValueMap()
+ .entrySet()
+ .stream()
+ .filter(e -> !e.getKey().equals(HttpHeaders.AUTHORIZATION))
+ .map(e -> e.getKey() + ":" + e.getValue())
+ .collect(Collectors.joining(", ")),
+ new String(requestBody, StandardCharsets.UTF_8));
+ }
+
+ private ClientHttpResponse logResponse(HttpRequest request, byte[] requestBody, ClientHttpRequestExecution execution) throws IOException {
+ ClientHttpResponse response = execution.execute(request, requestBody);
+ String responseBody = StreamUtils.copyToString(response.getBody(), StandardCharsets.UTF_8);
+
+ logger.info("Response.\n Status Code: {}.\n Headers: {}.\n Body: {}", response.getStatusText(),
+ response.getHeaders()
+ .toSingleValueMap()
+ .entrySet()
+ .stream()
+ .map(e -> e.getKey() + ":" + e.getValue())
+ .collect(Collectors.joining(", ")),
+ responseBody);
+
+ return response;
+ }
+
+}
diff --git a/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/package-info.java b/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/package-info.java
new file mode 100644
index 00000000..ad012274
--- /dev/null
+++ b/langchain4j-spring-core/src/main/java/dev/langchain4j/spring/core/http/package-info.java
@@ -0,0 +1,6 @@
+@NonNullApi
+@NonNullFields
+package dev.langchain4j.spring.core.http;
+
+import org.springframework.lang.NonNullApi;
+import org.springframework.lang.NonNullFields;
diff --git a/models/langchain4j-spring-mistral-ai/pom.xml b/models/langchain4j-spring-mistral-ai/pom.xml
new file mode 100644
index 00000000..56458ad6
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/pom.xml
@@ -0,0 +1,102 @@
+
+
+ 4.0.0
+
+
+ dev.langchain4j
+ langchain4j-spring
+ 0.32.0
+ ../../pom.xml
+
+
+ langchain4j-spring-mistral-ai
+ LangChain4j Spring Mistral AI
+
+
+
+
+
+
+ dev.langchain4j
+ langchain4j-spring-core
+ ${project.parent.version}
+ compile
+
+
+
+ dev.langchain4j
+ langchain4j-mistral-ai
+ compile
+
+
+ org.slf4j
+ slf4j-api
+
+
+ com.squareup.retrofit2
+ retrofit
+
+
+ com.squareup.retrofit2
+ converter-jackson
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+
+
+ com.squareup.okhttp3
+ okhttp
+
+
+ com.squareup.okhttp3
+ okhttp-sse
+
+
+ com.google.code.gson
+ gson
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-json
+ compile
+
+
+
+
+
+ org.springframework.boot
+ spring-boot-starter-test
+ test
+
+
+
+ dev.langchain4j
+ langchain4j-embeddings-all-minilm-l6-v2-q
+ ${project.version}
+ test
+
+
+ dev.langchain4j
+ langchain4j-core
+
+
+
+
+
+
+
+
+ Apache-2.0
+ https://www.apache.org/licenses/LICENSE-2.0.txt
+ repo
+ A business-friendly OSS license
+
+
+
+
\ No newline at end of file
diff --git a/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/MistralAiClientConfig.java b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/MistralAiClientConfig.java
new file mode 100644
index 00000000..2dedcaba
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/MistralAiClientConfig.java
@@ -0,0 +1,76 @@
+package dev.langchain4j.spring.mistralai.client;
+
+import org.springframework.util.Assert;
+
+import java.net.URI;
+import java.time.Duration;
+
+/**
+ * Options for configuring the Mistral AI client.
+ */
+public record MistralAiClientConfig(
+ URI baseUrl,
+ Duration connectTimeout,
+ Duration readTimeout,
+ String apiKey,
+ boolean logRequests,
+ boolean logResponses
+) {
+
+ public MistralAiClientConfig {
+ Assert.notNull(baseUrl, "baseUrl must not be null");
+ Assert.notNull(connectTimeout, "connectTimeout must not be null");
+ Assert.notNull(readTimeout, "readTimeout must not be null");
+ Assert.hasText(apiKey, "apiKey must not be null or empty");
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ public static class Builder {
+ private URI baseUrl = URI.create("https://api.mistral.ai/v1");
+ private Duration connectTimeout = Duration.ofSeconds(10);
+ private Duration readTimeout = Duration.ofSeconds(60);
+ private String apiKey;
+ private boolean logRequests = false;
+ private boolean logResponses = false;
+
+ private Builder() {}
+
+ public Builder baseUrl(URI baseUrl) {
+ this.baseUrl = baseUrl;
+ return this;
+ }
+
+ public Builder connectTimeout(Duration connectTimeout) {
+ this.connectTimeout = connectTimeout;
+ return this;
+ }
+
+ public Builder readTimeout(Duration readTimeout) {
+ this.readTimeout = readTimeout;
+ return this;
+ }
+
+ public Builder apiKey(String apiKey) {
+ this.apiKey = apiKey;
+ return this;
+ }
+
+ public Builder logRequests(boolean logRequests) {
+ this.logRequests = logRequests;
+ return this;
+ }
+
+ public Builder logResponses(boolean logResponses) {
+ this.logResponses = logResponses;
+ return this;
+ }
+
+ public MistralAiClientConfig build() {
+ return new MistralAiClientConfig(baseUrl, connectTimeout, readTimeout, apiKey, logRequests, logResponses);
+ }
+ }
+
+}
diff --git a/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/SpringMistralAiClient.java b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/SpringMistralAiClient.java
new file mode 100644
index 00000000..5befbcda
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/SpringMistralAiClient.java
@@ -0,0 +1,152 @@
+package dev.langchain4j.spring.mistralai.client;
+
+import dev.langchain4j.data.message.AiMessage;
+import dev.langchain4j.model.StreamingResponseHandler;
+import dev.langchain4j.model.mistralai.internal.api.*;
+import dev.langchain4j.model.mistralai.internal.client.MistralAiClient;
+import dev.langchain4j.model.mistralai.internal.client.MistralAiClientBuilderFactory;
+import dev.langchain4j.spring.core.http.HttpLoggingInterceptor;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.boot.web.client.ClientHttpRequestFactories;
+import org.springframework.boot.web.client.ClientHttpRequestFactorySettings;
+import org.springframework.http.HttpHeaders;
+import org.springframework.http.MediaType;
+import org.springframework.http.client.BufferingClientHttpRequestFactory;
+import org.springframework.lang.Nullable;
+import org.springframework.util.Assert;
+import org.springframework.web.client.RestClient;
+
+import java.net.URI;
+import java.util.function.Consumer;
+
+/**
+ * Client for the Mistral AI API.
+ *
+ * @see Mistral AI API
+ */
+public class SpringMistralAiClient extends MistralAiClient {
+
+ private final Logger logger = LoggerFactory.getLogger(SpringMistralAiClient.class);
+
+ private final RestClient restClient;
+
+ public SpringMistralAiClient(Builder builder) {
+ this(MistralAiClientConfig.builder()
+ .apiKey(builder.apiKey)
+ .baseUrl(URI.create(builder.baseUrl))
+ .connectTimeout(builder.timeout)
+ .readTimeout(builder.timeout)
+ .logRequests(builder.logRequests)
+ .logResponses(builder.logResponses)
+ .build(), RestClient.builder());
+ }
+
+ public SpringMistralAiClient(MistralAiClientConfig clientConfig, RestClient.Builder restClientBuilder) {
+ this.restClient = buildRestClient(clientConfig, restClientBuilder);
+ }
+
+ private RestClient buildRestClient(MistralAiClientConfig clientConfig, RestClient.Builder restClientBuilder) {
+ var clientHttpRequestFactory = new BufferingClientHttpRequestFactory(ClientHttpRequestFactories.get(
+ ClientHttpRequestFactorySettings.DEFAULTS
+ .withConnectTimeout(clientConfig.connectTimeout())
+ .withReadTimeout(clientConfig.readTimeout())));
+
+ Consumer defaultHeaders = headers -> {
+ headers.setContentType(MediaType.APPLICATION_JSON);
+ headers.setBearerAuth(clientConfig.apiKey());
+ };
+
+ return restClientBuilder.requestFactory(clientHttpRequestFactory)
+ .baseUrl(clientConfig.baseUrl().toString())
+ .defaultHeaders(defaultHeaders)
+ .requestInterceptors(interceptors -> {
+ if (clientConfig.logRequests() || clientConfig.logResponses()) {
+ interceptors.add(new HttpLoggingInterceptor(clientConfig.logRequests(), clientConfig.logResponses()));
+ }
+ })
+ .build();
+ }
+
+ /**
+ * Creates a model response for the given chat conversation.
+ */
+ @Nullable
+ @Override
+ public MistralAiChatCompletionResponse chatCompletion(MistralAiChatCompletionRequest chatCompletionRequest) {
+ Assert.notNull(chatCompletionRequest, "chatCompletionRequest cannot be null");
+ Assert.isTrue(!chatCompletionRequest.getStream(), "stream mode must be disabled");
+
+ logger.debug("Sending chat completion request: {}", chatCompletionRequest);
+
+ return this.restClient.post()
+ .uri("/chat/completions")
+ .body(chatCompletionRequest)
+ .retrieve()
+ .body(MistralAiChatCompletionResponse.class);
+ }
+
+ /**
+ * Creates a streaming model response for the given chat conversation.
+ */
+ @Override
+ public void streamingChatCompletion(MistralAiChatCompletionRequest chatCompletionRequest, StreamingResponseHandler handler) {
+ throw new IllegalStateException("operation not implemented");
+ }
+
+ /**
+ * Creates an embedding vector representing the input text.
+ */
+ @Nullable
+ @Override
+ public MistralAiEmbeddingResponse embedding(MistralAiEmbeddingRequest embeddingRequest) {
+ Assert.notNull(embeddingRequest, "embeddingRequest cannot be null");
+
+ logger.debug("Sending embedding request: {}", embeddingRequest);
+
+ return this.restClient.post()
+ .uri("/embeddings")
+ .body(embeddingRequest)
+ .retrieve()
+ .body(MistralAiEmbeddingResponse.class);
+ }
+
+ /**
+ * List available models.
+ */
+ @Nullable
+ @Override
+ public MistralAiModelResponse listModels() {
+ logger.debug("Sending list models request");
+
+ return this.restClient.get()
+ .uri("/models")
+ .retrieve()
+ .body(MistralAiModelResponse.class);
+ }
+
+ /**
+ * A factory for creating a {@link SpringMistralAiClient.Builder} instance.
+ */
+ public static class SpringMistralAiClientBuilderFactory implements MistralAiClientBuilderFactory {
+
+ @Override
+ public Builder get() {
+ return new Builder();
+ }
+
+ }
+
+ /**
+ * A builder for creating a {@link SpringMistralAiClient} instance.
+ */
+ public static class Builder extends MistralAiClient.Builder {
+
+ @Override
+ public SpringMistralAiClient build() {
+ return new SpringMistralAiClient(this);
+ }
+
+ }
+
+}
diff --git a/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/package-info.java b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/package-info.java
new file mode 100644
index 00000000..fd6489f0
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/main/java/dev/langchain4j/spring/mistralai/client/package-info.java
@@ -0,0 +1,6 @@
+@NonNullApi
+@NonNullFields
+package dev.langchain4j.spring.mistralai.client;
+
+import org.springframework.lang.NonNullApi;
+import org.springframework.lang.NonNullFields;
diff --git a/models/langchain4j-spring-mistral-ai/src/main/resources/META-INF/services/dev.langchain4j.model.mistralai.internal.client.MistralAiClientBuilderFactory b/models/langchain4j-spring-mistral-ai/src/main/resources/META-INF/services/dev.langchain4j.model.mistralai.internal.client.MistralAiClientBuilderFactory
new file mode 100644
index 00000000..43246038
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/main/resources/META-INF/services/dev.langchain4j.model.mistralai.internal.client.MistralAiClientBuilderFactory
@@ -0,0 +1 @@
+dev.langchain4j.spring.mistralai.client.SpringMistralAiClient$SpringMistralAiClientBuilderFactory
\ No newline at end of file
diff --git a/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiChatModelIT.java b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiChatModelIT.java
new file mode 100644
index 00000000..daecc9cd
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiChatModelIT.java
@@ -0,0 +1,260 @@
+package dev.langchain4j.spring.mistralai;
+
+import dev.langchain4j.agent.tool.ToolExecutionRequest;
+import dev.langchain4j.agent.tool.ToolSpecification;
+import dev.langchain4j.data.message.*;
+import dev.langchain4j.model.mistralai.MistralAiChatModel;
+import dev.langchain4j.model.mistralai.MistralAiChatModelName;
+import dev.langchain4j.model.mistralai.internal.api.MistralAiResponseFormatType;
+import dev.langchain4j.model.output.FinishReason;
+import dev.langchain4j.model.output.Response;
+import dev.langchain4j.model.output.TokenUsage;
+import org.json.JSONException;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+import org.skyscreamer.jsonassert.JSONAssert;
+import org.skyscreamer.jsonassert.JSONCompareMode;
+
+import java.util.List;
+
+import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER;
+import static dev.langchain4j.data.message.UserMessage.userMessage;
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Integration tests for {@link MistralAiChatModel}.
+ * Adapted from MistralAiChatModelIT in the LangChain4j project.
+ */
+@EnabledIfEnvironmentVariable(named = "MISTRAL_AI_API_KEY", matches = ".*")
+class MistralAiChatModelIT {
+
+ private static final String apiKey = System.getenv("MISTRAL_AI_API_KEY");
+
+ private final ToolSpecification calculator = ToolSpecification.builder()
+ .name("calculator")
+ .description("returns a sum of two numbers")
+ .addParameter("first", INTEGER)
+ .addParameter("second", INTEGER)
+ .build();
+
+ @Test
+ void generateText() {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MISTRAL_7B)
+ .logRequests(true)
+ .build();
+
+ UserMessage userMessage = UserMessage.from("What is the capital of Italy?");
+
+ Response response = chatModel.generate(userMessage);
+
+ AiMessage aiMessage = response.content();
+ assertThat(aiMessage.text()).contains("Rome");
+ assertThat(aiMessage.toolExecutionRequests()).isNull();
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.totalTokenCount())
+ .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
+
+ assertThat(response.finishReason()).isEqualTo(FinishReason.STOP);
+ }
+
+ @Test
+ void generateTextTooLong() {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MISTRAL_7B)
+ .maxTokens(1)
+ .logRequests(true)
+ .build();
+
+ UserMessage userMessage = UserMessage.from("What is the capital of Denmark?");
+
+ Response response = chatModel.generate(userMessage);
+
+ AiMessage aiMessage = response.content();
+ assertThat(aiMessage.text()).isNotBlank();
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.outputTokenCount()).isEqualTo(1);
+ assertThat(tokenUsage.totalTokenCount())
+ .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
+
+ assertThat(response.finishReason()).isEqualTo(FinishReason.LENGTH);
+ }
+
+ @Test
+ void generateTextWithFewShots() {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MISTRAL_7B)
+ .logRequests(true)
+ .build();
+
+ List messages = List.of(
+ UserMessage.from("1 + 1 ="), AiMessage.from(">>> 2"),
+ UserMessage.from("2 + 2 ="), AiMessage.from(">>> 4"),
+ UserMessage.from("4 + 4 ="));
+
+ Response response = chatModel.generate(messages);
+
+ assertThat(response.content().text()).startsWith(">>> 8");
+ }
+
+ @Test
+ void generateTextWithSystemMessage() {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MISTRAL_7B)
+ .logRequests(true)
+ .build();
+
+ SystemMessage systemMessage = SystemMessage.from("Start every answer with Ahoy");
+ UserMessage userMessage = UserMessage.from("Hello, captain!");
+
+ Response response = chatModel.generate(systemMessage, userMessage);
+
+ assertThat(response.content().text()).containsIgnoringCase("Ahoy");
+ }
+
+ @Test
+ void generateTextAsJson() throws JSONException {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MISTRAL_7B)
+ .temperature(0.1)
+ .responseFormat(MistralAiResponseFormatType.JSON_OBJECT)
+ .logRequests(true)
+ .build();
+
+ String response = chatModel
+ .generate("Return a JSON object with two fields: location is Jungle and name is Jumanji.");
+
+ JSONAssert.assertEquals("""
+ {
+ "name": "Jumanji",
+ "location": "Jungle"
+ }
+ """, response, JSONCompareMode.STRICT);
+ }
+
+ @Test
+ void executeToolExplicitlyAndThenGenerateAnswer() throws JSONException {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MIXTRAL_8X22B)
+ .temperature(0.1)
+ .logRequests(true)
+ .build();
+
+ // Execute tool
+
+ UserMessage userMessage = userMessage("2+2=?");
+ List toolSpecifications = List.of(calculator);
+
+ Response response = chatModel.generate(List.of(userMessage), toolSpecifications);
+
+ AiMessage aiMessage = response.content();
+ assertThat(aiMessage.text()).isNull();
+ assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
+
+ ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
+ assertThat(toolExecutionRequest.id()).isNotBlank();
+ assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
+ JSONAssert.assertEquals("""
+ {
+ "first": 2,
+ "second": 2
+ }
+ """, toolExecutionRequest.arguments(), JSONCompareMode.STRICT);
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.totalTokenCount())
+ .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
+
+ assertThat(response.finishReason()).isEqualTo(FinishReason.TOOL_EXECUTION);
+
+ // Then answer
+
+ ToolExecutionResultMessage toolExecutionResultMessage = ToolExecutionResultMessage.from(toolExecutionRequest, "4");
+ List messages = List.of(userMessage, aiMessage, toolExecutionResultMessage);
+
+ Response secondResponse = chatModel.generate(messages);
+
+ AiMessage secondAiMessage = secondResponse.content();
+ assertThat(secondAiMessage.text()).contains("4");
+ assertThat(secondAiMessage.toolExecutionRequests()).isNull();
+
+ TokenUsage secondTokenUsage = secondResponse.tokenUsage();
+ assertThat(secondTokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
+ assertThat(secondTokenUsage.totalTokenCount())
+ .isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
+
+ assertThat(secondResponse.finishReason()).isEqualTo(FinishReason.STOP);
+ }
+
+ @Test
+ void executeToolImplicitlyAndThenGenerateAnswer() throws JSONException {
+ MistralAiChatModel chatModel = MistralAiChatModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiChatModelName.OPEN_MIXTRAL_8X22B)
+ .temperature(0.1)
+ .logRequests(true)
+ .build();
+
+ // Execute tool
+
+ UserMessage userMessage = userMessage("2+2=?");
+
+ Response response = chatModel.generate(List.of(userMessage), calculator);
+
+ AiMessage aiMessage = response.content();
+ assertThat(aiMessage.text()).isNull();
+ assertThat(aiMessage.toolExecutionRequests()).hasSize(1);
+
+ ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
+ assertThat(toolExecutionRequest.id()).isNotBlank();
+ assertThat(toolExecutionRequest.name()).isEqualTo("calculator");
+ JSONAssert.assertEquals("""
+ {
+ "first": 2,
+ "second": 2
+ }
+ """, toolExecutionRequest.arguments(), JSONCompareMode.STRICT);
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0);
+ assertThat(tokenUsage.totalTokenCount())
+ .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount());
+
+ assertThat(response.finishReason()).isEqualTo(FinishReason.TOOL_EXECUTION);
+
+ // Then answer
+
+ ToolExecutionResultMessage toolExecutionResultMessage = ToolExecutionResultMessage.from(toolExecutionRequest, "4");
+ List messages = List.of(userMessage, aiMessage, toolExecutionResultMessage);
+
+ Response secondResponse = chatModel.generate(messages);
+
+ AiMessage secondAiMessage = secondResponse.content();
+ assertThat(secondAiMessage.text()).contains("4");
+ assertThat(secondAiMessage.toolExecutionRequests()).isNull();
+
+ TokenUsage secondTokenUsage = secondResponse.tokenUsage();
+ assertThat(secondTokenUsage.inputTokenCount()).isGreaterThan(0);
+ assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0);
+ assertThat(secondTokenUsage.totalTokenCount())
+ .isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount());
+
+ assertThat(secondResponse.finishReason()).isEqualTo(FinishReason.STOP);
+ }
+
+}
diff --git a/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiEmbeddingModelIT.java b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiEmbeddingModelIT.java
new file mode 100644
index 00000000..be31ea42
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiEmbeddingModelIT.java
@@ -0,0 +1,72 @@
+package dev.langchain4j.spring.mistralai;
+
+import dev.langchain4j.data.embedding.Embedding;
+import dev.langchain4j.data.segment.TextSegment;
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModel;
+import dev.langchain4j.model.mistralai.MistralAiEmbeddingModelName;
+import dev.langchain4j.model.output.Response;
+import dev.langchain4j.model.output.TokenUsage;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Integration tests for {@link MistralAiEmbeddingModel}.
+ * Adapted from MistralAiEmbeddingModelIT in the LangChain4j project.
+ */
+@EnabledIfEnvironmentVariable(named = "MISTRAL_AI_API_KEY", matches = ".*")
+class MistralAiEmbeddingModelIT {
+
+ private static final String apiKey = System.getenv("MISTRAL_AI_API_KEY");
+
+ @Test
+ void generateSingleEmbedding() {
+ MistralAiEmbeddingModel embeddingModel = MistralAiEmbeddingModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiEmbeddingModelName.MISTRAL_EMBED)
+ .logRequests(true)
+ .build();
+
+ String text = "Welcome to the jungle";
+
+ Response response = embeddingModel.embed(text);
+
+ assertThat(response.content().vector()).hasSize(1024);
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isEqualTo(7);
+ assertThat(tokenUsage.outputTokenCount()).isEqualTo(0);
+ assertThat(tokenUsage.totalTokenCount()).isEqualTo(7);
+
+ assertThat(response.finishReason()).isNull();
+ }
+
+ @Test
+ void generateMultipleEmbeddings() {
+ MistralAiEmbeddingModel embeddingModel = MistralAiEmbeddingModel.builder()
+ .apiKey(apiKey)
+ .modelName(MistralAiEmbeddingModelName.MISTRAL_EMBED)
+ .logRequests(true)
+ .build();
+
+ TextSegment textSegment1 = TextSegment.from("Welcome to the jungle");
+ TextSegment textSegment2 = TextSegment.from("Welcome to Jumanji");
+
+ Response> response = embeddingModel.embedAll(List.of(textSegment1, textSegment2));
+
+ assertThat(response.content()).hasSize(2);
+ assertThat(response.content().get(0).dimension()).isEqualTo(1024);
+ assertThat(response.content().get(1).dimension()).isEqualTo(1024);
+
+ TokenUsage tokenUsage = response.tokenUsage();
+ assertThat(tokenUsage.inputTokenCount()).isEqualTo(14);
+ assertThat(tokenUsage.outputTokenCount()).isEqualTo(0);
+ assertThat(tokenUsage.totalTokenCount()).isEqualTo(14);
+
+ assertThat(response.finishReason()).isNull();
+ }
+
+}
diff --git a/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiModelsIT.java b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiModelsIT.java
new file mode 100644
index 00000000..a9fd7fe9
--- /dev/null
+++ b/models/langchain4j-spring-mistral-ai/src/test/java/dev/langchain4j/spring/mistralai/MistralAiModelsIT.java
@@ -0,0 +1,39 @@
+package dev.langchain4j.spring.mistralai;
+
+import dev.langchain4j.model.mistralai.MistralAiChatModelName;
+import dev.langchain4j.model.mistralai.MistralAiModels;
+import dev.langchain4j.model.mistralai.internal.api.MistralAiModelCard;
+import dev.langchain4j.model.output.Response;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable;
+
+import java.util.List;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/**
+ * Integration tests for {@link MistralAiModels}.
+ * Adapted from MistralAiModelsIT in the LangChain4j project.
+ */
+@EnabledIfEnvironmentVariable(named = "MISTRAL_AI_API_KEY", matches = ".*")
+class MistralAiModelsIT {
+
+ private static final String apiKey = System.getenv("MISTRAL_AI_API_KEY");
+
+ @Test
+ void getAllModels() {
+ MistralAiModels models = MistralAiModels.builder()
+ .apiKey(apiKey)
+ .logRequests(true)
+ .build();
+
+ Response> response = models.availableModels();
+
+ List modelCards = response.content();
+ assertThat(modelCards.size()).isGreaterThan(0);
+ assertThat(modelCards).extracting("id").contains(MistralAiChatModelName.OPEN_MISTRAL_7B.toString());
+ assertThat(modelCards).extracting("object").contains("model");
+ assertThat(modelCards).extracting("permission").isNotNull();
+ }
+
+}
diff --git a/pom.xml b/pom.xml
index 03080fca..deed3e6b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -14,9 +14,16 @@
https://github.com/langchain4j/langchain4j-spring
+ langchain4j-spring-core
+
+ models/langchain4j-spring-mistral-ai
+
+ langchain4j-spring-boot-autoconfigure
langchain4j-spring-boot-starter
langchain4j-spring-boot-tests
+ langchain4j-spring-boot-starters/langchain4j-mistral-ai-spring-boot-starter
+
langchain4j-anthropic-spring-boot-starter
langchain4j-ollama-spring-boot-starter
langchain4j-open-ai-spring-boot-starter
@@ -34,14 +41,12 @@
17
UTF-8
2.6.2
- 3.2.6
- 1.19.8
+ 3.2.7
2.6.2
-
dev.langchain4j
langchain4j-bom
@@ -52,26 +57,10 @@
org.springframework.boot
- spring-boot-starter
- ${spring.boot.version}
-
-
-
- org.springframework.boot
- spring-boot-autoconfigure-processor
- ${spring.boot.version}
-
-
-
- org.springframework.boot
- spring-boot-configuration-processor
- ${spring.boot.version}
-
-
-
- org.springframework.boot
- spring-boot-starter-test
- ${spring.boot.version}
+ spring-boot-dependencies
+ ${spring-boot.version}
+ pom
+ import
@@ -80,14 +69,6 @@
1.18.32
-
- org.testcontainers
- testcontainers-bom
- ${testcontainers.version}
- import
- pom
-
-
org.tinylog
tinylog-impl