Skip to content

Commit

Permalink
Update package
Browse files Browse the repository at this point in the history
  • Loading branch information
vmanot committed Mar 22, 2024
1 parent 60f1c7b commit 43e8ba2
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 5 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ extension Anthropic: LLMRequestHandling {
\.complete,
with: Anthropic.API.RequestBodies.Complete(
prompt: prompt.prefix.promptLiteral._stripToText(),
model: .claude_v2,
model: .claude_3_opus_20240229,
maxTokensToSample: parameters.tokenLimit.fixedValue ?? 256,
stopSequences: parameters.stops,
stream: false,
Expand Down
8 changes: 4 additions & 4 deletions Sources/Anthropic/Intramodular/Anthropic.API.swift
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ extension Anthropic {
public typealias API = Anthropic.API

case apiKeyMissing
case incorrectAPIKeyProvided
case invalidAPIKey
case rateLimitExceeded
case badRequest(API.Request.Error)
case runtime(AnyError)
Expand Down Expand Up @@ -89,10 +89,10 @@ extension Anthropic.API {
).error {
print(error.message)

if error.message.contains("You didn't provide an API key") {
if error.message.contains("x-api-key header is required") {
throw Error.apiKeyMissing
} else if error.message.contains("Incorrect API key provided") {
throw Error.incorrectAPIKeyProvided
} else if error.message.contains("invalid x-api-key") {
throw Error.invalidAPIKey
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,26 @@ extension LLMRequestHandling {
return try await complete(prompt: prompt)
}

public func complete(
_ messages: [AbstractLLM.ChatMessage]
) async throws -> AbstractLLM.ChatCompletion {
let prompt = AbstractLLM.ChatPrompt(
messages: messages
)

return try await complete(prompt: prompt)
}

public func completion(
for messages: [AbstractLLM.ChatMessage]
) async throws -> AbstractLLM.ChatCompletionStream {
let prompt = AbstractLLM.ChatPrompt(
messages: messages
)

return try await completion(for: prompt)
}

public func complete(
_ message: AbstractLLM.ChatMessage,
model: some _MLModelIdentifierConvertible
Expand Down

0 comments on commit 43e8ba2

Please sign in to comment.