Skip to content

Commit

Permalink
re-adds instructor
Browse files Browse the repository at this point in the history
  • Loading branch information
Neverbolt committed Dec 3, 2024
1 parent 692e05c commit bef8e09
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 34 deletions.
20 changes: 7 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,10 @@ build-backend = "setuptools.build_meta"

[project]
name = "hackingBuddyGPT"
authors = [
{ name = "Andreas Happe", email = "[email protected]" }
]
authors = [{ name = "Andreas Happe", email = "[email protected]" }]
maintainers = [
{ name = "Andreas Happe", email = "[email protected]" },
{ name = "Juergen Cito", email = "[email protected]" }
{ name = "Andreas Happe", email = "[email protected]" },
{ name = "Juergen Cito", email = "[email protected]" },
]
description = "Helping Ethical Hackers use LLMs in 50 lines of code"
readme = "README.md"
Expand All @@ -21,14 +19,15 @@ classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Development Status :: 4 - Beta",
"Development Status :: 4 - Beta",
]
dependencies = [
'fabric == 3.2.2',
'Mako == 1.3.2',
'requests == 2.32.0',
'rich == 13.7.1',
'tiktoken == 0.8.0',
'instructor == 1.3.5',
'PyYAML == 6.0.1',
'python-dotenv == 1.0.1',
'pypsexec == 0.3.0',
Expand Down Expand Up @@ -59,14 +58,9 @@ where = ["src"]

[tool.pytest.ini_options]
pythonpath = "src"
addopts = [
"--import-mode=importlib",
]
addopts = ["--import-mode=importlib"]
[project.optional-dependencies]
testing = [
'pytest',
'pytest-mock'
]
testing = ['pytest', 'pytest-mock']

[project.scripts]
wintermute = "hackingBuddyGPT.cli.wintermute:main"
Expand Down
2 changes: 1 addition & 1 deletion src/hackingBuddyGPT/capabilities/capability.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __call__(self, *args, **kwargs):
def to_model(self) -> BaseModel:
"""
Converts the parameters of the `__call__` function of the capability to a pydantic model, that can be used to
interface with an LLM using eg instructor or the openAI function calling API.
interface with an LLM using eg the openAI function calling API.
The model will have the same name as the capability class and will have the same fields as the `__call__`,
the `__call__` method can then be accessed by calling the `execute` method of the model.
"""
Expand Down
20 changes: 0 additions & 20 deletions src/hackingBuddyGPT/capabilities/submit_http_method.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,25 +24,6 @@ class SubmitHTTPMethod(Capability):
def describe(self) -> str:
return f"Submits a HTTP methods, which is the goal of all activities. The HTTP methods format is {self.http_methods_format}. If you find a flag following the that format, that you have not yet submitted, then the most important first step is to submit it, ignoring all other possibilities of further action"

def to_model(self) -> BaseModel:
"""
Converts the parameters of the `__call__` function of the capability to a pydantic model, that can be used to
interface with an LLM using eg instructor or the openAI function calling API.
The model will have the same name as the capability class and will have the same fields as the `__call__`,
the `__call__` method can then be accessed by calling the `execute` method of the model.
"""
sig = inspect.signature(self.__call__)
fields = {param: (param_info.annotation, ...) for param, param_info in sig.parameters.items()}
model_type = create_model(self.__class__.__name__, __doc__=self.describe(), **fields)

def execute(model):
m = model.dict()
return self(**m)

model_type.execute = execute

return model_type

def __call__(self, method: Literal["GET", "HEAD", "POST", "PUT", "DELETE", "OPTION", "PATCH"],
path: str,
query: Optional[str] = None,
Expand Down Expand Up @@ -75,4 +56,3 @@ def __call__(self, method: Literal["GET", "HEAD", "POST", "PUT", "DELETE", "OPTI
return "All methods submitted, congratulations"
# turn the response into "plain text format" for responding to the prompt
return f"HTTP/1.1 {resp.status_code} {resp.reason}\r\n{headers}\r\n\r\n{resp.text}"""

5 changes: 5 additions & 0 deletions src/hackingBuddyGPT/utils/openai/openai_lib.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import instructor
import datetime
from typing import Dict, Union, Iterable, Optional

Expand Down Expand Up @@ -35,6 +36,10 @@ def init(self):
def client(self) -> openai.OpenAI:
return self._client

@property
def instructor(self) -> instructor.Instructor:
return instructor.from_openai(self.client)

def get_response(self, prompt, *, capabilities: Optional[Dict[str, Capability]] = None, **kwargs) -> LLMResult:
""" # TODO: re-enable compatibility layer
if isinstance(prompt, str) or hasattr(prompt, "render"):
Expand Down

0 comments on commit bef8e09

Please sign in to comment.