Skip to content

Commit

Permalink
new writing
Browse files Browse the repository at this point in the history
  • Loading branch information
CTY-git committed May 6, 2024
1 parent fffc82e commit 81eae96
Show file tree
Hide file tree
Showing 4 changed files with 282 additions and 253 deletions.
62 changes: 44 additions & 18 deletions patchwork/app.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,22 @@
import importlib
import importlib.util
import json
import sys
import traceback
from pathlib import Path
from types import ModuleType

import click
import yaml

from patchwork.logger import init_cli_logger, logger
from patchwork.steps.PreparePrompt import PreparePrompt

_DATA_FORMAT_MAPPING = {
"yaml": yaml.dump,
"json": json.dumps,
}

def _get_config_path(config: str, patchflow: str) -> tuple[Path | None, Path | None]:
config_path = Path(config)
prompt_path = None
Expand Down Expand Up @@ -39,7 +47,6 @@ def _get_config_path(config: str, patchflow: str) -> tuple[Path | None, Path | N
ignore_unknown_options=True,
)
)

@click.version_option(message="%(version)s", package_name="patchwork-cli")
@click.help_option("-h", "--help")
@click.option(
Expand Down Expand Up @@ -67,24 +74,27 @@ def _get_config_path(config: str, patchflow: str) -> tuple[Path | None, Path | N
@click.option("--output", type=click.Path(exists=False, resolve_path=True, writable=True), help="Output data file")
@click.option("data_format", "--format", type=click.Choice(["yaml", "json"]), default="json", help="Output data format")
def cli(log: str, patchflow: str, opts: list[str], config: str | None, output: str | None, data_format: str):
try:
module = importlib.import_module(".patchflows", "patchwork")
except ModuleNotFoundError:
logger.debug(f"Patchflow {patchflow} not found")
exit(1)

try:
patchflow_class = getattr(module, patchflow)
except AttributeError:
logger.debug(f"Patchflow {patchflow} not found as a class in {Path(__file__).parent / 'patchflows'}")
if patchflow.lower() == "chat":
from patchwork.patchwork_interpreter import run_chat

run_chat()
exit(0)

if "::" not in patchflow:
patchflow = "patchwork.patchflows::" + patchflow

module_path, _, patchflow_name = patchflow.partition("::")
module = find_module(module_path, patchflow)

try:
patchflow_class = getattr(module, patchflow_name)
except AttributeError:
logger.debug(f"Patchflow {patchflow} not found as a class in {module_path}")
exit(1)

inputs = {}
if config is not None:
config_path, prompt_path = _get_config_path(config, patchflow)
config_path, prompt_path = _get_config_path(config, patchflow_name)
if config_path is None and prompt_path is None:
exit(1)

Expand All @@ -111,16 +121,32 @@ def cli(log: str, patchflow: str, opts: list[str], config: str | None, output: s
logger.error(f"Error running patchflow {patchflow}: {e}")
exit(1)

data_format_mapping = {
"yaml": yaml.dump,
"json": json.dumps,
}

if output is not None:
serialize = data_format_mapping.get(data_format, json.dumps)
serialize = _DATA_FORMAT_MAPPING.get(data_format, json.dumps)
with open(output, "w") as file:
file.write(serialize(inputs))


def find_module(module_path, patchflow) -> ModuleType:
try:
spec = importlib.util.spec_from_file_location("custom_module", module_path)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
except Exception:
logger.debug(f"Patchflow {patchflow} not found as a file/directory in {module_path}")
module = None

if module is not None:
return module

try:
module = importlib.import_module(module_path)
except ModuleNotFoundError:
logger.debug(f"Patchflow {patchflow} not found as a module in {module_path}")
exit(1)

return module


if __name__ == "__main__":
cli()
114 changes: 59 additions & 55 deletions patchwork/patchflows/ResolveIssue/ResolveIssue.py
Original file line number Diff line number Diff line change
@@ -1,40 +1,41 @@
import json
import tempfile
from pathlib import Path

import yaml
import json
import tempfile

from patchwork.step import Step
from patchwork.steps import (
CallLLM,
CommitChanges,
CreateIssueComment,
CreatePR,
ExtractModelResponse,
GenerateCodeRepositoryEmbeddings,
ModifyCode,
PreparePrompt,
QueryEmbeddings,
ReadIssues,
PreparePrompt,
CallOpenAI,
ExtractModelResponse,
ModifyCode,
CommitChanges,
CreatePR,
)

_DEFAULT_INPUT_FILE = Path(__file__).parent / "defaults.yml"
_DEFAULT_PROMPT_JSON = Path(__file__).parent / "prompt.json"


class ResolveIssue(Step):
def __init__(self, inputs: dict):
final_inputs = yaml.safe_load(_DEFAULT_INPUT_FILE.read_text())

if final_inputs is None:
final_inputs = {}
final_inputs.update(inputs)

if "prompt_template_file" not in final_inputs.keys():
final_inputs["prompt_template_file"] = _DEFAULT_PROMPT_JSON

final_inputs["pr_title"] = f"PatchWork {self.__class__.__name__}"
final_inputs["branch_prefix"] = f"{self.__class__.__name__.lower()}-"

self.fix_issue = bool(final_inputs.get("fix_issue", False))
self.inputs = final_inputs

Expand Down Expand Up @@ -63,48 +64,51 @@ def run(self) -> dict:

outputs = CreateIssueComment(self.inputs).run()
self.inputs.update(outputs)

if self.fix_issue:
extracted_code_contexts = []
# Call LLM to make necessary updates to files to resolve the issue
for result in self.inputs["embedding_results"]:
with open(result["path"], "r") as file:
file_content = file.read()
lines = file_content.splitlines(keepends=True)
extracted_code_contexts.append(
{
"uri": result["path"],
"startLine": 0,
"endLine": len(lines),
"affectedCode": file_content,
"messageText": "\n".join(self.inputs["texts"]),
})

self.inputs["prompt_values"] = extracted_code_contexts

# Save extracted data to JSON
output_file = Path(tempfile.mktemp(".json"))
with open(output_file, "w", encoding="utf-8") as f:
json.dump(extracted_code_contexts, f, indent=2)

self.inputs["code_file"] = output_file
self.inputs["prompt_id"] = "resolve_issue"
self.inputs["response_partitions"] = {"patch": []}
outputs = PreparePrompt(self.inputs).run()
self.inputs.update(outputs)
outputs = CallOpenAI(self.inputs).run()
self.inputs.update(outputs)
outputs = ExtractModelResponse(self.inputs).run()
self.inputs.update(outputs)

# Modify code files with the suggested changes
outputs = ModifyCode(self.inputs).run()
self.inputs.update(outputs)

# Commit changes and create PR
outputs = CommitChanges(self.inputs).run()
self.inputs.update(outputs)
outputs = CreatePR(self.inputs).run()
self.inputs.update(outputs)

if not self.fix_issue:
return self.inputs

extracted_code_contexts = []
# Call LLM to make necessary updates to files to resolve the issue
for result in self.inputs["embedding_results"]:
with open(result["path"], "r") as file:
file_content = file.read()
lines = file_content.splitlines(keepends=True)
extracted_code_contexts.append(
{
"uri": result["path"],
"startLine": 0,
"endLine": len(lines),
"affectedCode": file_content,
"messageText": "\n".join(self.inputs["texts"]),
}
)

self.inputs["prompt_values"] = extracted_code_contexts

# Save extracted data to JSON
output_file = Path(tempfile.mktemp(".json"))
with open(output_file, "w", encoding="utf-8") as f:
json.dump(extracted_code_contexts, f, indent=2)

self.inputs["code_file"] = output_file
self.inputs["prompt_id"] = "resolve_issue"
self.inputs["response_partitions"] = {"patch": []}
outputs = PreparePrompt(self.inputs).run()
self.inputs.update(outputs)
outputs = CallLLM(self.inputs).run()
self.inputs.update(outputs)
outputs = ExtractModelResponse(self.inputs).run()
self.inputs.update(outputs)

# Modify code files with the suggested changes
outputs = ModifyCode(self.inputs).run()
self.inputs.update(outputs)

# Commit changes and create PR
outputs = CommitChanges(self.inputs).run()
self.inputs.update(outputs)
outputs = CreatePR(self.inputs).run()
self.inputs.update(outputs)

return self.inputs
4 changes: 2 additions & 2 deletions patchwork/steps/CallLLM/CallLLM.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def call(self, prompts):
json=dict(
generationConfig=self.model_args,
contents=[dict(parts=texts)],
safetySettings=self._SAFETY_SETTINGS
)
safetySettings=self._SAFETY_SETTINGS,
),
)
response.raise_for_status()
response_dict = response.json()
Expand Down
Loading

0 comments on commit 81eae96

Please sign in to comment.