Skip to content

Commit

Permalink
Integrate deepseek (#105)
Browse files Browse the repository at this point in the history
  • Loading branch information
gromdimon authored Jan 27, 2025
1 parent 03fe3c0 commit 1c1ed49
Show file tree
Hide file tree
Showing 5 changed files with 59 additions and 2 deletions.
4 changes: 4 additions & 0 deletions .env.dev
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ OPENAI_API_KEY=
XAI_MODEL=grok-2-latest
XAI_API_KEY=

DEEPSEEK_API_KEY=
DEEPSEEK_MODEL=deepseek-chat
DEEPSEEK_API_BASE_URL=https://api.deepseek.com

# === Third-party services settings ===

# Perplexity
Expand Down
5 changes: 5 additions & 0 deletions src/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,11 @@ class Settings(BaseSettings):
XAI_API_KEY: str = ""
XAI_MODEL: str = "grok-2-latest"

#: DeepSeek
DEEPSEEK_API_KEY: str = ""
DEEPSEEK_MODEL: str = "deepseek-reasoner"
DEEPSEEK_API_BASE_URL: str = "https://api.deepseek.com"

#: Llama
LLAMA_PROVIDER: LlamaProviderType = LlamaProviderType.OLLAMA
LLAMA_MODEL_NAME: str = "llama3-8b-8192" # Model name is usually unique for each provider
Expand Down
3 changes: 2 additions & 1 deletion src/core/defs.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,12 +67,13 @@ class LLMProviderType(str, Enum):
ANTHROPIC = "anthropic"
XAI = "xai"
LLAMA = "llama"
DEEPSEEK = "deepseek"


class LlamaProviderType(str, Enum):
"""Llama provider type."""

OLLAMA = "ollama"
FIREWORKS = "fireworks"
LLAMA_API = "llama-api"
LLAMA_API = "llama_api"
OPENROUTER = "openrouter"
5 changes: 4 additions & 1 deletion src/llm/llm.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from src.core.defs import LLMProviderType
from src.core.exceptions import LLMError
from src.llm.providers.anthropic import call_anthropic
from src.llm.providers.deepseek import call_deepseek
from src.llm.providers.llama import call_llama
from src.llm.providers.oai import call_openai
from src.llm.providers.xai import call_xai
Expand All @@ -20,7 +21,7 @@ class LLM:
def __init__(self):
"""
Initialize the LLM class based on the selected provider from settings.
Supported providers: 'openai', 'anthropic', 'xai'
Supported providers: 'openai', 'anthropic', 'xai', 'llama', 'deepseek'
"""
self.provider = settings.LLM_PROVIDER
logger.debug(f"Using LLM provider: {self.provider}")
Expand Down Expand Up @@ -52,6 +53,8 @@ async def generate_response(self, messages: List[Dict[str, Any]], **kwargs) -> s
return await call_xai(messages, **kwargs)
elif self.provider == LLMProviderType.LLAMA:
return await call_llama(messages, **kwargs)
elif self.provider == LLMProviderType.DEEPSEEK:
return await call_deepseek(messages, **kwargs)
else:
raise LLMError(f"Unknown LLM provider: {self.provider}")

Expand Down
44 changes: 44 additions & 0 deletions src/llm/providers/deepseek.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
from typing import Dict, List

from loguru import logger
from openai import OpenAI

from src.core.config import settings
from src.core.exceptions import LLMError


async def call_deepseek(messages: List[Dict[str, str]], **kwargs) -> str:
"""
Call the DeepSeek API endpoint.
Args:
messages: A list of dicts with 'role' and 'content'.
kwargs: Additional parameters (e.g., model, temperature).
Returns:
str: Response content from DeepSeek.
"""
model = kwargs.get("model", settings.DEEPSEEK_MODEL)
temperature = kwargs.get("temperature", 0.7)

client = OpenAI(api_key=settings.DEEPSEEK_API_KEY, base_url=settings.DEEPSEEK_API_BASE_URL)

logger.debug(
f"Calling DeepSeek with model={model}, temperature={temperature}, messages={messages}"
)

try:
response = client.chat.completions.create(
model=model,
messages=messages, # type: ignore
temperature=temperature,
)
if not response.choices[0].message.content:
raise LLMError("No content in DeepSeek response")

content = response.choices[0].message.content.strip()
logger.debug(f"DeepSeek response: {content}")
return content
except Exception as e:
logger.error(f"DeepSeek call failed: {e}")
raise LLMError("Error during DeepSeek API call") from e

0 comments on commit 1c1ed49

Please sign in to comment.