-
Notifications
You must be signed in to change notification settings - Fork 35
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
7 changed files
with
169 additions
and
73 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
import abc | ||
from abc import ABC | ||
from typing import Sequence, List | ||
|
||
from council.llm import LLMMessage | ||
|
||
|
||
class AnthropicAPIClientWrapper(ABC): | ||
|
||
@abc.abstractmethod | ||
def post_chat_request(self, messages: Sequence[LLMMessage]) -> List[str]: | ||
pass |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,58 @@ | ||
from typing import Sequence, List | ||
|
||
from anthropic import Anthropic | ||
from anthropic._types import NOT_GIVEN | ||
|
||
from council.llm import AnthropicLLMConfiguration, LLMMessage, LLMMessageRole | ||
from council.llm.anthropic import AnthropicAPIClientWrapper | ||
|
||
_HUMAN_TURN = Anthropic.HUMAN_PROMPT | ||
_ASSISTANT_TURN = Anthropic.AI_PROMPT | ||
|
||
|
||
class AnthropicCompletionLLM(AnthropicAPIClientWrapper): | ||
""" | ||
Implementation for an Anthropic LLM with LEGACY completion. | ||
Notes: | ||
More details: https://docs.anthropic.com/claude/docs | ||
and https://docs.anthropic.com/claude/reference/complete_post | ||
""" | ||
|
||
def __init__(self, config: AnthropicLLMConfiguration, client: Anthropic) -> None: | ||
self._config = config | ||
self._client = client | ||
|
||
def post_chat_request(self, messages: Sequence[LLMMessage]) -> List[str]: | ||
prompt = self._to_anthropic_messages(messages) | ||
result = self._client.completions.create( | ||
prompt=prompt, | ||
model=self._config.model.unwrap(), | ||
max_tokens_to_sample=self._config.max_tokens.unwrap(), | ||
timeout=self._config.timeout.value, | ||
temperature=self._config.temperature.unwrap_or(NOT_GIVEN), | ||
top_k=self._config.top_k.unwrap_or(NOT_GIVEN), | ||
top_p=self._config.top_p.unwrap_or(NOT_GIVEN), | ||
) | ||
return [result.completion] | ||
|
||
@staticmethod | ||
def _to_anthropic_messages(messages: Sequence[LLMMessage]) -> str: | ||
messages_count = len(messages) | ||
if messages_count == 0: | ||
raise Exception("No message to process.") | ||
|
||
result = [] | ||
if messages[0].is_of_role(LLMMessageRole.System) and messages_count > 1: | ||
result.append(f"{_HUMAN_TURN} {messages[0].content}\n{messages[1].content}") | ||
remaining = messages[2:] | ||
else: | ||
result.append(f"{_HUMAN_TURN} {messages[0].content}") | ||
remaining = messages[1:] | ||
|
||
for item in remaining: | ||
prefix = _HUMAN_TURN if item.is_of_role(LLMMessageRole.User) else _ASSISTANT_TURN | ||
result.append(f"{prefix} {item.content}") | ||
result.append(_ASSISTANT_TURN) | ||
|
||
return "".join(result) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,61 @@ | ||
from __future__ import annotations | ||
|
||
from typing import Sequence, List, Iterable, Literal | ||
|
||
from anthropic import Anthropic | ||
from anthropic._types import NOT_GIVEN | ||
from anthropic.types import MessageParam | ||
|
||
from council.llm import ( | ||
LLMMessage, | ||
LLMMessageRole, | ||
AnthropicLLMConfiguration, | ||
) | ||
from council.llm.anthropic import AnthropicAPIClientWrapper | ||
|
||
|
||
class AnthropicMessagesLLM(AnthropicAPIClientWrapper): | ||
""" | ||
Implementation for an Anthropic LLM. | ||
Notes: | ||
More details: https://docs.anthropic.com/claude/docs | ||
and https://docs.anthropic.com/claude/reference/messages_post | ||
""" | ||
|
||
def __init__(self, config: AnthropicLLMConfiguration, client: Anthropic) -> None: | ||
self._config = config | ||
self._client = client | ||
|
||
def post_chat_request(self, messages: Sequence[LLMMessage]) -> List[str]: | ||
messages_formatted = self._to_anthropic_messages(messages) | ||
completion = self._client.messages.create( | ||
messages=messages_formatted, | ||
model=self._config.model.unwrap(), | ||
max_tokens=self._config.max_tokens.unwrap(), | ||
timeout=self._config.timeout.value, | ||
temperature=self._config.temperature.unwrap_or(NOT_GIVEN), | ||
top_k=self._config.top_k.unwrap_or(NOT_GIVEN), | ||
top_p=self._config.top_p.unwrap_or(NOT_GIVEN), | ||
) | ||
return [content.text for content in completion.content] | ||
|
||
@staticmethod | ||
def _to_anthropic_messages(messages: Sequence[LLMMessage]) -> Iterable[MessageParam]: | ||
result: List[MessageParam] = [] | ||
temp_content = "" | ||
role: Literal["user", "assistant"] = "user" | ||
|
||
for message in messages: | ||
if message.is_of_role(LLMMessageRole.System): | ||
temp_content += message.content | ||
else: | ||
temp_content += message.content | ||
result.append(MessageParam(role=role, content=temp_content)) | ||
temp_content = "" | ||
role = "assistant" if role == "user" else "user" | ||
|
||
if temp_content: | ||
result.append(MessageParam(role=role, content=temp_content)) | ||
|
||
return result |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -7,7 +7,7 @@ progressbar==2.5 | |
tiktoken==0.5.1 | ||
|
||
# LLMs | ||
anthropic>=0.5.0 | ||
anthropic>=0.20.0 | ||
|
||
# Skills | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters