Skip to content

Commit

Permalink
Pr resolve issue patchflow (#59)
Browse files Browse the repository at this point in the history
* finish resolve issue flow

generate pr at the end

* Update ResolveIssue.py

* Update prompt and defaults

* Update prompt.json

* Update test.yml

* Update defaults.yml

update model

* add flag to enable fixing the issue

* add gemini changes

---------

Co-authored-by: TIANYOU CHEN <[email protected]>
  • Loading branch information
codelion and CTY-git authored May 3, 2024
1 parent eee64d3 commit fffc82e
Show file tree
Hide file tree
Showing 4 changed files with 86 additions and 3 deletions.
3 changes: 2 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,5 +90,6 @@ jobs:
- name: Propose relevant file to issues
run: |
poetry run patchwork ResolveIssue --log debug \
--openai_api_key=${{ secrets.OPENAI_KEY }} \
--github_api_key=${{ secrets.SCM_GITHUB_KEY }} \
--issue_url=https://github.com/patched-codes/patchwork/issues/20
--issue_url=https://github.com/patched-codes/patchwork/issues/20
63 changes: 61 additions & 2 deletions patchwork/patchflows/ResolveIssue/ResolveIssue.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,41 @@
from pathlib import Path

import yaml
import json
import tempfile

from patchwork.step import Step
from patchwork.steps import (
CreateIssueComment,
GenerateCodeRepositoryEmbeddings,
QueryEmbeddings,
ReadIssues,
PreparePrompt,
CallOpenAI,
ExtractModelResponse,
ModifyCode,
CommitChanges,
CreatePR,
)

_DEFAULT_INPUT_FILE = Path(__file__).parent / "defaults.yml"

_DEFAULT_PROMPT_JSON = Path(__file__).parent / "prompt.json"

class ResolveIssue(Step):
def __init__(self, inputs: dict):
final_inputs = yaml.safe_load(_DEFAULT_INPUT_FILE.read_text())

if final_inputs is None:
final_inputs = {}
final_inputs.update(inputs)


if "prompt_template_file" not in final_inputs.keys():
final_inputs["prompt_template_file"] = _DEFAULT_PROMPT_JSON

final_inputs["pr_title"] = f"PatchWork {self.__class__.__name__}"
final_inputs["branch_prefix"] = f"{self.__class__.__name__.lower()}-"

self.fix_issue = bool(final_inputs.get("fix_issue", False))
self.inputs = final_inputs

def run(self) -> dict:
Expand Down Expand Up @@ -47,5 +63,48 @@ def run(self) -> dict:

outputs = CreateIssueComment(self.inputs).run()
self.inputs.update(outputs)

if self.fix_issue:
extracted_code_contexts = []
# Call LLM to make necessary updates to files to resolve the issue
for result in self.inputs["embedding_results"]:
with open(result["path"], "r") as file:
file_content = file.read()
lines = file_content.splitlines(keepends=True)
extracted_code_contexts.append(
{
"uri": result["path"],
"startLine": 0,
"endLine": len(lines),
"affectedCode": file_content,
"messageText": "\n".join(self.inputs["texts"]),
})

self.inputs["prompt_values"] = extracted_code_contexts

# Save extracted data to JSON
output_file = Path(tempfile.mktemp(".json"))
with open(output_file, "w", encoding="utf-8") as f:
json.dump(extracted_code_contexts, f, indent=2)

self.inputs["code_file"] = output_file
self.inputs["prompt_id"] = "resolve_issue"
self.inputs["response_partitions"] = {"patch": []}
outputs = PreparePrompt(self.inputs).run()
self.inputs.update(outputs)
outputs = CallOpenAI(self.inputs).run()
self.inputs.update(outputs)
outputs = ExtractModelResponse(self.inputs).run()
self.inputs.update(outputs)

# Modify code files with the suggested changes
outputs = ModifyCode(self.inputs).run()
self.inputs.update(outputs)

# Commit changes and create PR
outputs = CommitChanges(self.inputs).run()
self.inputs.update(outputs)
outputs = CreatePR(self.inputs).run()
self.inputs.update(outputs)

return self.inputs
11 changes: 11 additions & 0 deletions patchwork/patchflows/ResolveIssue/defaults.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,21 @@
# gitlab_api_key: required-for-gitlab-scm
# issue_url: required

fix_issue: false
# GenerateEmbeddings Inputs
# For OpenAI API use the following the select the model
# openai_embedding_model: text-embedding-3-small
# For HuggingFace API use the following the select the model
# huggingface_embedding_model: codellama/CodeLlama-70b-Instruct-hf
# For either API, use the following to provide the API key
# openai_api_key: required-for-openai
# google_api_key: required-for-google
# client_base_url: https://api.openai.com/v1
# model: gpt-3.5-turbo

# CommitChanges Inputs
disable_branch: false

# CreatePR Inputs
disable_pr: false
force_pr_creation: true
12 changes: 12 additions & 0 deletions patchwork/patchflows/ResolveIssue/prompt.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
[
{
"id": "resolve_issue",
"prompts": [
{
"role": "system",
"content": "You are a senior software engineer who is best in the world at resolving bugs. Users will give you a code snippet and you will generate a fix based on the provided bug message. Minimize the amount of changes needed for the fix. If no changes are necessary return the original code as is. Only respond with the new code, do not add any comments or change the indentation. Make sure you respond with the full code and not only the parts that are changed.\n\nResolve the bug described below by making necessary updates to the code.\n\n{{messageText}}."
},
{"role": "user", "content": "{{affectedCode}}"}
]
}
]

0 comments on commit fffc82e

Please sign in to comment.