diff --git a/readmeai/cli/options.py b/readmeai/cli/options.py index 7115cefb..26c00321 100644 --- a/readmeai/cli/options.py +++ b/readmeai/cli/options.py @@ -52,6 +52,7 @@ class ModelOptions(str, Enum): OLLAMA = "OLLAMA" OPENAI = "OPENAI" GEMINI = "GEMINI" + TELNYX = "TELNYX" def prompt_for_image( diff --git a/readmeai/config/settings.py b/readmeai/config/settings.py index 29a6aa22..3c0fdce7 100644 --- a/readmeai/config/settings.py +++ b/readmeai/config/settings.py @@ -108,6 +108,8 @@ class ModelSettings(BaseModel): temperature: Optional[float] tokens: Optional[int] top_p: Optional[float] + telnyx_base_url: Optional[HttpUrl] + telnyx_base_model: Optional[str] class Settings(BaseModel): diff --git a/readmeai/config/settings/config.toml b/readmeai/config/settings/config.toml index c3511b96..e04b2339 100644 --- a/readmeai/config/settings/config.toml +++ b/readmeai/config/settings/config.toml @@ -28,6 +28,8 @@ model = "gpt-3.5-turbo" temperature = 0.9 tokens = 650 top_p = 0.9 +telnyx_base_url = "https://api.telnyx.com/v2/ai/chat/completions" +telnyx_base_model = "meta-llama/Meta-Llama-3-70B-Instruct" # Markdown Template Settings [md] diff --git a/readmeai/core/utils.py b/readmeai/core/utils.py index b0d5f6b3..36654bf4 100644 --- a/readmeai/core/utils.py +++ b/readmeai/core/utils.py @@ -19,6 +19,7 @@ class SecretKey(str, Enum): OLLAMA_HOST = "OLLAMA_HOST" OPENAI_API_KEY = "OPENAI_API_KEY" GOOGLE_API_KEY = "GOOGLE_API_KEY" + TELNYX_API_KEY = "TELNYX_API_KEY" def _set_offline(message: str) -> tuple: @@ -33,12 +34,14 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: llms.OPENAI.name: "gpt-3.5-turbo", llms.OLLAMA.name: "mistral", llms.GEMINI.name: "gemini-pro", + llms.TELNYX.name: "meta-llama/Meta-Llama-3-70B-Instruct", } env_keys = { llms.OPENAI.name: SecretKey.OPENAI_API_KEY.value, llms.OLLAMA.name: SecretKey.OLLAMA_HOST.value, llms.GEMINI.name: SecretKey.GOOGLE_API_KEY.value, + llms.TELNYX.name: SecretKey.TELNYX_API_KEY.value, } if llm_api and llm_api not in env_keys: @@ -67,6 +70,15 @@ def get_environment(llm_api: str = "", llm_model: str = "") -> tuple: "GOOGLE_API_KEY not found in environment. Switching to offline mode." ) + # If TELNYX_API_KEY does not exist in env when --api telnyx is set + if ( + llm_api == llms.TELNYX.name + and SecretKey.TELNYX_API_KEY.value not in os.environ + ): + return _set_offline( + "TELNYX_API_KEY not found in environment. Switching to offline mode." + ) + # If no specific API is provided or the provided API is valid for api_name, env_key in env_keys.items(): if llm_api == api_name or (not llm_api and env_key in os.environ): diff --git a/readmeai/models/factory.py b/readmeai/models/factory.py index 889d8425..9fb529d1 100644 --- a/readmeai/models/factory.py +++ b/readmeai/models/factory.py @@ -19,6 +19,7 @@ class ModelFactory: llms.OLLAMA.value: OpenAIHandler, llms.OPENAI.value: OpenAIHandler, llms.GEMINI.value: GeminiHandler, + llms.TELNYX.value: OpenAIHandler, } @staticmethod diff --git a/readmeai/models/openai.py b/readmeai/models/openai.py index eed26007..4dea83b0 100644 --- a/readmeai/models/openai.py +++ b/readmeai/models/openai.py @@ -48,6 +48,13 @@ def _model_settings(self): self.client = openai.OpenAI( base_url=_localhost, api_key=llms.OLLAMA.name ) + elif self.config.llm.api == llms.TELNYX.name: + self.endpoint = self.config.llm.telnyx_base_url + self.model = self.config.llm.telnyx_base_model + self.client = openai.OpenAI( + base_url=self.endpoint, + api_key=os.environ.get("TELNYX_API_KEY"), + ) self.headers = {"Authorization": f"Bearer {self.client.api_key}"} async def _build_payload(self, prompt: str, tokens: int) -> dict: diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py index ca86970c..034d2459 100644 --- a/tests/core/test_utils.py +++ b/tests/core/test_utils.py @@ -51,6 +51,18 @@ def test_offline_mode_when_no_env_vars_set(mock_configs): assert test_model == ModelOptions.OFFLINE.name +@patch.dict("os.environ", {"TELNYX_API_KEY": "KEYXXX"}, clear=True) +def test_get_environment_telnyx(mock_configs): + """Test that the environment is setup correctly for OpenAI.""" + mock_configs.config.llm.api = ModelOptions.TELNYX.name + mock_configs.config.llm.model = "meta-llama/Meta-Llama-3-70B" + test_api, test_model = get_environment( + mock_configs.config.llm.api, mock_configs.config.llm.model + ) + assert test_api == ModelOptions.TELNYX.name + assert test_model == "meta-llama/Meta-Llama-3-70B" + + @patch.dict("os.environ", {}, clear=True) def test_set_offline_mode(mock_configs): """Test that the environment is setup correctly for offline mode."""