Skip to content

Commit

Permalink
Addexample how use Anthropic cache (#133)
Browse files Browse the repository at this point in the history
  • Loading branch information
Claudio-code authored Dec 2, 2024
1 parent c2984fd commit d07ffb3
Showing 1 changed file with 31 additions and 4 deletions.
35 changes: 31 additions & 4 deletions anthropic-examples/src/main/java/AnthropicChatModelTest.java
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.message.*;
import dev.langchain4j.model.anthropic.AnthropicChatModel;
import dev.langchain4j.model.anthropic.AnthropicChatModelName;
import dev.langchain4j.model.anthropic.AnthropicTokenUsage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.output.Response;
import org.junit.jupiter.api.Test;
Expand Down Expand Up @@ -45,4 +44,32 @@ void AnthropicChatModel_with_vision_Example() {

assertThat(response.content().text()).containsIgnoringCase("RAG");
}

@Test
void AnthropicChatModel_with_cache_system_message_Example() {
ChatLanguageModel modelWithCache = AnthropicChatModel.builder()
.apiKey(System.getenv("ANTHROPIC_API_KEY"))
.beta("prompt-caching-2024-07-31")
.modelName(AnthropicChatModelName.CLAUDE_3_HAIKU_20240307)
.cacheSystemMessages(true)
.logRequests(true)
.logResponses(true)
.build();

// Now cache has in beta
// You can send up to 4 systemMessages/Tools

// create cache
SystemMessage systemMessage = SystemMessage.from("What types of messages are supported in LangChain?".repeat(187));
UserMessage userMessage = UserMessage.userMessage("what result it calcule 5x2 + 2x + 2 = 0?");
Response<AiMessage> response = modelWithCache.generate(systemMessage, userMessage);

AnthropicTokenUsage createCacheTokenUsage = (AnthropicTokenUsage) response.tokenUsage();
assertThat(createCacheTokenUsage.cacheCreationInputTokens()).isGreaterThan(0);

// read cache created
Response<AiMessage> responseToReadCache = modelWithCache.generate(systemMessage, userMessage);
AnthropicTokenUsage readCacheTokenUsage = (AnthropicTokenUsage) responseToReadCache.tokenUsage();
assertThat(readCacheTokenUsage.cacheReadInputTokens()).isGreaterThan(0);
}
}

0 comments on commit d07ffb3

Please sign in to comment.