Skip to content

Commit

Permalink
Code fixes for examples
Browse files Browse the repository at this point in the history
  • Loading branch information
Winston-503 committed Oct 10, 2024
1 parent ab3c198 commit a92d67a
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 4 deletions.
8 changes: 5 additions & 3 deletions docs/source/reference/llm/llm_message.rst
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,16 @@ Here's how you can use Anthropic prompt caching with council.
os.environ["ANTHROPIC_API_KEY"] = "sk-YOUR-KEY-HERE"
os.environ["ANTHROPIC_LLM_MODEL"] = "claude-3-haiku-20240307"

# Ensure that the number of tokens in a cacheable message exceeds
# the minimum cacheable token count, which is 2048 for Haiku;
# otherwise, the message will not be cached.
HUGE_STATIC_SYSTEM_PROMPT = ""

# Create a system message with ephemeral caching
system_message_with_cache = LLMMessage.system_message(
HUGE_STATIC_SYSTEM_PROMPT,
data=[LLMCacheControlData.ephemeral()]
)
# Ensure that the number of tokens in a cacheable message exceeds
# the minimum cacheable token count, which is 2048 for Haiku;
# otherwise, the message will not be cached.

# Initialize the messages list with cachable system message
messages = [
Expand Down
4 changes: 3 additions & 1 deletion docs/source/reference/llm/llm_response_parser.rst
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,8 @@ Here's how you can simplify :class:`council.llm.LLMFunction` example for a sampl
from council.llm.llm_function import LLMFunction
from council.llm.llm_response_parser import CodeBlocksResponseParser

SYSTEM_PROMPT = "same system prompt as in LLMFunction example"


# CodeBlocksResponseParser will provide from_response() automatically for you
class SQLResultFromCodeBlocks(CodeBlocksResponseParser):
Expand All @@ -40,7 +42,7 @@ Here's how you can simplify :class:`council.llm.LLMFunction` example for a sampl

# All the remaining code stays the same
llm_function: LLMFunction[SQLResultFromCodeBlocks] = LLMFunction(
llm, SQLResultFromCodeBlocks.from_response, SYSTEM_PROMPT # system prompt is the same
llm, SQLResultFromCodeBlocks.from_response, SYSTEM_PROMPT
)

response = llm_function.execute(
Expand Down

0 comments on commit a92d67a

Please sign in to comment.