diff --git a/README.md b/README.md index 660e348..8c4ae3a 100644 --- a/README.md +++ b/README.md @@ -43,19 +43,20 @@ jobs: openai_api_key: ${{ secrets.OPENAI_API_KEY }} ``` -| Input | Description | Required | Default | -| ----------------------- | -------------------------------------------------------------- | -------- | -------------------------- | -| `github_token` | The GitHub token to use for the Action | Yes | | -| `openai_api_key` | The [OpenAI API key] to use, keep it hidden | Yes | | -| `pull_request_id` | The ID of the pull request to use | No | Extracted from metadata | -| `openai_model` | The [OpenAI model] to use | No | `gpt-4o-mini` | -| `max_tokens` | The maximum number of **prompt tokens** to use | No | `1000` | -| `temperature` | Higher values will make the model more creative (0-2) | No | `0.6` | -| `sample_prompt` | The prompt to use for giving context to the model | No | See `SAMPLE_PROMPT` | -| `sample_response` | A sample response for giving context to the model | No | See `GOOD_SAMPLE_RESPONSE` | -| `completion_prompt` | The prompt to use for the model to generate the PR description | No | See `COMPLETION_PROMPT` | -| `overwrite_description` | Whether to overwrite the PR description if it already exists | No | `false` | - +| Input | Description | Required | Default | +| -------------------------- | -------------------------------------------------------------- | -------- | -------------------------- | +| `github_token` | The GitHub token to use for the Action | Yes | | +| `openai_api_key` | The [OpenAI API key] to use, keep it hidden | Yes | | +| `pull_request_id` | The ID of the pull request to use | No | Extracted from metadata | +| `openai_model` | The [OpenAI model] to use | No | `gpt-4o-mini` | +| `max_tokens` | The maximum number of **prompt tokens** to use | No | `1000` | +| `temperature` | Higher values will make the model more creative (0-2) | No | `0.6` | +| `sample_prompt` | The prompt to use for giving context to the model | No | See `SAMPLE_PROMPT` | +| `sample_response` | A sample response for giving context to the model | No | See `GOOD_SAMPLE_RESPONSE` | +| `completion_prompt` | The prompt to use for the model to generate the PR description | No | See `COMPLETION_PROMPT` | +| `overwrite_description` | Whether to overwrite the PR description if it already exists | No | `false` | +| `azure_endpoint` | The OpenAI API Azure endpoint if you use one | No | | +| `azure_openai_api_version` | The OpenAI API version to use if you use Azure | No | | [OpenAI API key]: https://help.openai.com/en/articles/4936850-where-do-i-find-my-secret-api-key [OpenAI model]: https://platform.openai.com/docs/models diff --git a/action.yml b/action.yml index afbaaff..f992270 100644 --- a/action.yml +++ b/action.yml @@ -43,6 +43,14 @@ inputs: description: 'Overwrite the PR description if it already exists. This also means that the action will run on every PR update' required: false default: 'false' + azure_endpoint: + description: 'Azure endpoint for OpenAI, if you are using Azure. Setting this will make the action use Azure OpenAI.' + required: false + default: '' + azure_openai_api_version: + description: 'OpenAI API version, if you are using Azure.' + required: false + default: '' runs: using: 'docker' diff --git a/autofill_description.py b/autofill_description.py index 15d48d5..2088811 100755 --- a/autofill_description.py +++ b/autofill_description.py @@ -176,7 +176,18 @@ def main(): if len(completion_prompt) > max_allowed_characters: completion_prompt = completion_prompt[:max_allowed_characters] - openai_client = openai.OpenAI(api_key=openai_api_key) + azure_endpoint = os.environ.get("INPUT_AZURE_ENDPOINT", "") + azure_openai_api_version = os.environ.get("INPUT_AZURE_OPENAI_API_VERSION", "") + + openai_client = ( + openai.AzureOpenAI( + api_key=openai_api_key, + azure_endpoint=azure_endpoint, + api_version=azure_openai_api_version, + ) + if azure_endpoint + else openai.OpenAI(api_key=openai_api_key) + ) openai_response = openai_client.chat.completions.create( model=open_ai_model, messages=[