diff --git a/README.md b/README.md index 7dc103adb..bfbc53fcd 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,7 @@ export OPENAI_API_KEY="your-api-key-here" > > - openai (default) > - openrouter +> - azure > - gemini > - ollama > - mistral @@ -394,6 +395,11 @@ Below is a comprehensive example of `config.json` with multiple custom providers "baseURL": "https://api.openai.com/v1", "envKey": "OPENAI_API_KEY" }, + "azure": { + "name": "AzureOpenAI", + "baseURL": "https://YOUR_PROJECT_NAME.openai.azure.com/openai", + "envKey": "AZURE_OPENAI_API_KEY" + }, "openrouter": { "name": "OpenRouter", "baseURL": "https://openrouter.ai/api/v1", @@ -455,6 +461,10 @@ For each AI provider, you need to set the corresponding API key in your environm # OpenAI export OPENAI_API_KEY="your-api-key-here" +# Azure OpenAI +export AZURE_OPENAI_API_KEY="your-azure-api-key-here" +export AZURE_OPENAI_API_VERSION="2025-03-01-preview" (Optional) + # OpenRouter export OPENROUTER_API_KEY="your-openrouter-key-here" diff --git a/codex-cli/src/components/chat/terminal-chat.tsx b/codex-cli/src/components/chat/terminal-chat.tsx index 7f59c0b3c..998a190cf 100644 --- a/codex-cli/src/components/chat/terminal-chat.tsx +++ b/codex-cli/src/components/chat/terminal-chat.tsx @@ -13,7 +13,7 @@ import { useTerminalSize } from "../../hooks/use-terminal-size.js"; import { AgentLoop } from "../../utils/agent/agent-loop.js"; import { ReviewDecision } from "../../utils/agent/review.js"; import { generateCompactSummary } from "../../utils/compact-summary.js"; -import { getBaseUrl, getApiKey, saveConfig } from "../../utils/config.js"; +import { saveConfig } from "../../utils/config.js"; import { extractAppliedPatches as _extractAppliedPatches } from "../../utils/extract-applied-patches.js"; import { getGitDiff } from "../../utils/get-diff.js"; import { createInputItem } from "../../utils/input-utils.js"; @@ -23,6 +23,7 @@ import { calculateContextPercentRemaining, uniqueById, } from "../../utils/model-utils.js"; +import { createOpenAIClient } from "../../utils/openai-client.js"; import { CLI_VERSION } from "../../utils/session.js"; import { shortCwd } from "../../utils/short-path.js"; import { saveRollout } from "../../utils/storage/save-rollout.js"; @@ -34,7 +35,6 @@ import ModelOverlay from "../model-overlay.js"; import chalk from "chalk"; import { Box, Text } from "ink"; import { spawn } from "node:child_process"; -import OpenAI from "openai"; import React, { useEffect, useMemo, useRef, useState } from "react"; import { inspect } from "util"; @@ -78,10 +78,7 @@ async function generateCommandExplanation( ): Promise { try { // Create a temporary OpenAI client - const oai = new OpenAI({ - apiKey: getApiKey(config.provider), - baseURL: getBaseUrl(config.provider), - }); + const oai = createOpenAIClient(config); // Format the command for display const commandForDisplay = formatCommandForDisplay(command); diff --git a/codex-cli/src/components/singlepass-cli-app.tsx b/codex-cli/src/components/singlepass-cli-app.tsx index b57b40e43..f365f5eb7 100644 --- a/codex-cli/src/components/singlepass-cli-app.tsx +++ b/codex-cli/src/components/singlepass-cli-app.tsx @@ -5,13 +5,7 @@ import type { FileOperation } from "../utils/singlepass/file_ops"; import Spinner from "./vendor/ink-spinner"; // Third‑party / vendor components import TextInput from "./vendor/ink-text-input"; -import { - OPENAI_TIMEOUT_MS, - OPENAI_ORGANIZATION, - OPENAI_PROJECT, - getBaseUrl, - getApiKey, -} from "../utils/config"; +import { createOpenAIClient } from "../utils/openai-client"; import { generateDiffSummary, generateEditSummary, @@ -26,7 +20,6 @@ import { EditedFilesSchema } from "../utils/singlepass/file_ops"; import * as fsSync from "fs"; import * as fsPromises from "fs/promises"; import { Box, Text, useApp, useInput } from "ink"; -import OpenAI from "openai"; import { zodResponseFormat } from "openai/helpers/zod"; import path from "path"; import React, { useEffect, useState, useRef } from "react"; @@ -399,20 +392,7 @@ export function SinglePassApp({ files, }); - const headers: Record = {}; - if (OPENAI_ORGANIZATION) { - headers["OpenAI-Organization"] = OPENAI_ORGANIZATION; - } - if (OPENAI_PROJECT) { - headers["OpenAI-Project"] = OPENAI_PROJECT; - } - - const openai = new OpenAI({ - apiKey: getApiKey(config.provider), - baseURL: getBaseUrl(config.provider), - timeout: OPENAI_TIMEOUT_MS, - defaultHeaders: headers, - }); + const openai = createOpenAIClient(config); const chatResp = await openai.beta.chat.completions.parse({ model: config.model, ...(config.flexMode ? { service_tier: "flex" } : {}), diff --git a/codex-cli/src/utils/agent/agent-loop.ts b/codex-cli/src/utils/agent/agent-loop.ts index 85d1d3e7b..51b8a738c 100644 --- a/codex-cli/src/utils/agent/agent-loop.ts +++ b/codex-cli/src/utils/agent/agent-loop.ts @@ -17,6 +17,7 @@ import { OPENAI_PROJECT, getApiKey, getBaseUrl, + AZURE_OPENAI_API_VERSION, } from "../config.js"; import { log } from "../logger/log.js"; import { parseToolCallArguments } from "../parsers.js"; @@ -31,7 +32,7 @@ import { import { handleExecCommand } from "./handle-exec-command.js"; import { HttpsProxyAgent } from "https-proxy-agent"; import { randomUUID } from "node:crypto"; -import OpenAI, { APIConnectionTimeoutError } from "openai"; +import OpenAI, { APIConnectionTimeoutError, AzureOpenAI } from "openai"; // Wait time before retrying after rate limit errors (ms). const RATE_LIMIT_RETRY_WAIT_MS = parseInt( @@ -322,6 +323,25 @@ export class AgentLoop { ...(timeoutMs !== undefined ? { timeout: timeoutMs } : {}), }); + if (this.provider.toLowerCase() === "azure") { + this.oai = new AzureOpenAI({ + apiKey, + baseURL, + apiVersion: AZURE_OPENAI_API_VERSION, + defaultHeaders: { + originator: ORIGIN, + version: CLI_VERSION, + session_id: this.sessionId, + ...(OPENAI_ORGANIZATION + ? { "OpenAI-Organization": OPENAI_ORGANIZATION } + : {}), + ...(OPENAI_PROJECT ? { "OpenAI-Project": OPENAI_PROJECT } : {}), + }, + httpAgent: PROXY_URL ? new HttpsProxyAgent(PROXY_URL) : undefined, + ...(timeoutMs !== undefined ? { timeout: timeoutMs } : {}), + }); + } + setSessionId(this.sessionId); setCurrentModel(this.model); diff --git a/codex-cli/src/utils/compact-summary.ts b/codex-cli/src/utils/compact-summary.ts index 82a337e38..383991e23 100644 --- a/codex-cli/src/utils/compact-summary.ts +++ b/codex-cli/src/utils/compact-summary.ts @@ -1,12 +1,14 @@ import type { AppConfig } from "./config.js"; import type { ResponseItem } from "openai/resources/responses/responses.mjs"; -import { getBaseUrl, getApiKey } from "./config.js"; -import OpenAI from "openai"; +import { createOpenAIClient } from "./openai-client.js"; + /** * Generate a condensed summary of the conversation items. * @param items The list of conversation items to summarize * @param model The model to use for generating the summary + * @param flexMode Whether to use the flex-mode service tier + * @param config The configuration object * @returns A concise structured summary string */ /** @@ -23,10 +25,7 @@ export async function generateCompactSummary( flexMode = false, config: AppConfig, ): Promise { - const oai = new OpenAI({ - apiKey: getApiKey(config.provider), - baseURL: getBaseUrl(config.provider), - }); + const oai = createOpenAIClient(config); const conversationText = items .filter( diff --git a/codex-cli/src/utils/config.ts b/codex-cli/src/utils/config.ts index 29e5b312b..4c51deb31 100644 --- a/codex-cli/src/utils/config.ts +++ b/codex-cli/src/utils/config.ts @@ -68,6 +68,9 @@ export const OPENAI_TIMEOUT_MS = export const OPENAI_BASE_URL = process.env["OPENAI_BASE_URL"] || ""; export let OPENAI_API_KEY = process.env["OPENAI_API_KEY"] || ""; +export const AZURE_OPENAI_API_VERSION = + process.env["AZURE_OPENAI_API_VERSION"] || "2025-03-01-preview"; + export const DEFAULT_REASONING_EFFORT = "high"; export const OPENAI_ORGANIZATION = process.env["OPENAI_ORGANIZATION"] || ""; export const OPENAI_PROJECT = process.env["OPENAI_PROJECT"] || ""; diff --git a/codex-cli/src/utils/model-utils.ts b/codex-cli/src/utils/model-utils.ts index 0d370f273..01a21c0a7 100644 --- a/codex-cli/src/utils/model-utils.ts +++ b/codex-cli/src/utils/model-utils.ts @@ -1,14 +1,9 @@ import type { ResponseItem } from "openai/resources/responses/responses.mjs"; import { approximateTokensUsed } from "./approximate-tokens-used.js"; -import { - OPENAI_ORGANIZATION, - OPENAI_PROJECT, - getBaseUrl, - getApiKey, -} from "./config"; +import { getApiKey } from "./config.js"; import { type SupportedModelId, openAiModelInfo } from "./model-info.js"; -import OpenAI from "openai"; +import { createOpenAIClient } from "./openai-client.js"; const MODEL_LIST_TIMEOUT_MS = 2_000; // 2 seconds export const RECOMMENDED_MODELS: Array = ["o4-mini", "o3"]; @@ -27,19 +22,7 @@ async function fetchModels(provider: string): Promise> { } try { - const headers: Record = {}; - if (OPENAI_ORGANIZATION) { - headers["OpenAI-Organization"] = OPENAI_ORGANIZATION; - } - if (OPENAI_PROJECT) { - headers["OpenAI-Project"] = OPENAI_PROJECT; - } - - const openai = new OpenAI({ - apiKey: getApiKey(provider), - baseURL: getBaseUrl(provider), - defaultHeaders: headers, - }); + const openai = createOpenAIClient({ provider }); const list = await openai.models.list(); const models: Array = []; for await (const model of list as AsyncIterable<{ id?: string }>) { diff --git a/codex-cli/src/utils/openai-client.ts b/codex-cli/src/utils/openai-client.ts new file mode 100644 index 000000000..fb8117fed --- /dev/null +++ b/codex-cli/src/utils/openai-client.ts @@ -0,0 +1,51 @@ +import type { AppConfig } from "./config.js"; + +import { + getBaseUrl, + getApiKey, + AZURE_OPENAI_API_VERSION, + OPENAI_TIMEOUT_MS, + OPENAI_ORGANIZATION, + OPENAI_PROJECT, +} from "./config.js"; +import OpenAI, { AzureOpenAI } from "openai"; + +type OpenAIClientConfig = { + provider: string; +}; + +/** + * Creates an OpenAI client instance based on the provided configuration. + * Handles both standard OpenAI and Azure OpenAI configurations. + * + * @param config The configuration containing provider information + * @returns An instance of either OpenAI or AzureOpenAI client + */ +export function createOpenAIClient( + config: OpenAIClientConfig | AppConfig, +): OpenAI | AzureOpenAI { + const headers: Record = {}; + if (OPENAI_ORGANIZATION) { + headers["OpenAI-Organization"] = OPENAI_ORGANIZATION; + } + if (OPENAI_PROJECT) { + headers["OpenAI-Project"] = OPENAI_PROJECT; + } + + if (config.provider?.toLowerCase() === "azure") { + return new AzureOpenAI({ + apiKey: getApiKey(config.provider), + baseURL: getBaseUrl(config.provider), + apiVersion: AZURE_OPENAI_API_VERSION, + timeout: OPENAI_TIMEOUT_MS, + defaultHeaders: headers, + }); + } + + return new OpenAI({ + apiKey: getApiKey(config.provider), + baseURL: getBaseUrl(config.provider), + timeout: OPENAI_TIMEOUT_MS, + defaultHeaders: headers, + }); +} diff --git a/codex-cli/src/utils/providers.ts b/codex-cli/src/utils/providers.ts index adf628bba..698d0d70a 100644 --- a/codex-cli/src/utils/providers.ts +++ b/codex-cli/src/utils/providers.ts @@ -12,6 +12,11 @@ export const providers: Record< baseURL: "https://openrouter.ai/api/v1", envKey: "OPENROUTER_API_KEY", }, + azure: { + name: "AzureOpenAI", + baseURL: "https://YOUR_PROJECT_NAME.openai.azure.com/openai", + envKey: "AZURE_OPENAI_API_KEY", + }, gemini: { name: "Gemini", baseURL: "https://generativelanguage.googleapis.com/v1beta/openai",