Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Devel #61

Merged
merged 36 commits into from
Jan 15, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
36 commits
Select commit Hold shift + click to select a range
d92d27c
Update gpt_handler.py
jamesoncollins Jan 7, 2025
95da922
dont break
jamesoncollins Jan 8, 2025
926624b
try again
jamesoncollins Jan 8, 2025
f6acc68
again
jamesoncollins Jan 8, 2025
11a999b
debug
jamesoncollins Jan 9, 2025
7695777
debug
jamesoncollins Jan 9, 2025
0fc009e
debug
jamesoncollins Jan 9, 2025
9acfa7c
debug
jamesoncollins Jan 9, 2025
a5a4954
debug
jamesoncollins Jan 9, 2025
9ae008e
debug
jamesoncollins Jan 9, 2025
f0e8b41
debug
jamesoncollins Jan 9, 2025
6234f71
debug
jamesoncollins Jan 9, 2025
acf6601
debug
jamesoncollins Jan 9, 2025
a833dea
debug
jamesoncollins Jan 9, 2025
d058820
debug
jamesoncollins Jan 9, 2025
df6ab05
debug
jamesoncollins Jan 9, 2025
fb6d457
debug
jamesoncollins Jan 9, 2025
bf0b2ad
debug
jamesoncollins Jan 9, 2025
2eba4be
debug
jamesoncollins Jan 9, 2025
3a0976e
debug
jamesoncollins Jan 9, 2025
f4ab2b4
debug
jamesoncollins Jan 9, 2025
014dd65
debug
jamesoncollins Jan 9, 2025
eb8d187
debug
jamesoncollins Jan 9, 2025
2e31622
debug
jamesoncollins Jan 9, 2025
6b9ddc7
debug
jamesoncollins Jan 9, 2025
01c6b60
debug
jamesoncollins Jan 9, 2025
8172964
debug
jamesoncollins Jan 9, 2025
24a9c56
add system prompts
jamesoncollins Jan 12, 2025
cb2c998
help messages
jamesoncollins Jan 12, 2025
d92fbfb
try to make help text required
jamesoncollins Jan 12, 2025
530c518
try that again
jamesoncollins Jan 12, 2025
15676dd
fix typo
jamesoncollins Jan 15, 2025
0947819
return revised prompt, make dalle3 default
jamesoncollins Jan 15, 2025
57bbb5a
must be list
jamesoncollins Jan 15, 2025
d6fd120
get committer
jamesoncollins Jan 15, 2025
1f591d7
fix git function
jamesoncollins Jan 15, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions handlers/base_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,14 @@ def process_message(self, msg, attachments):
"attachments": processed_attachments,
}

@staticmethod
def get_name() -> str:
raise NotImplementedError("Subclasses must implement this method.")

@staticmethod
def get_help_text() -> str:
raise NotImplementedError("Subclasses must implement this method.")

def assign_context(self, context):
self.context = context

Expand Down
154 changes: 120 additions & 34 deletions handlers/gpt_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
import os
import json
from openai import OpenAI
import warnings
import warnings
import base64
import io
from openai.types.chat.chat_completion_message import ChatCompletionMessage


key = os.environ.get("OPENAI_API_KEY", "")
Expand All @@ -29,25 +32,34 @@ def get_substring_mapping(self) -> dict:
# Provide mapping and default value for 'model'
return {0: ("model", "gpt-4o-mini")}

def get_attachments(self) -> list:
if self.hashtag_data["model"] == "image":
self.hashtag_data["model"] = "dall-e-2"
if self.hashtag_data["model"] in image_generation_models:
return submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])
return []

def get_message(self) -> str:
def process_message(self, msg, attachments):

if self.hashtag_data.get("model") == "help":
return self.get_help_text()
return {"message": self.get_help_text(), "attachments": []}

if self.hashtag_data["model"] == "image":
self.hashtag_data["model"] = "dall-e-2"
self.hashtag_data["model"] = "dall-e-3"

if self.hashtag_data["model"] in image_generation_models:
return f"GPT Image Prompt {self.input_str}"
return submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])

return submit_gpt(self.cleaned_input, None, self.hashtag_data["model"])

def get_help_text(self) -> str:
# try to get quote info. currently this is a try becuase i dont know
# how it looks for a data message
json_quoted_convo = None
try:
quote_msg = self.context.message.raw_message["envelope"]["syncMessage"]["sentMessage"]["quote"]
quote_author = quote_msg["author"]
quote_text = quote_msg["text"]
quote_attachments = quote_msg["attachments"]
convo_b64 = find_first_text_file_base64(quote_attachments)
json_quoted_convo = base64_text_file_to_json(convo_b64)
except:
pass

return submit_gpt(self.cleaned_input, json_quoted_convo, None, self.hashtag_data["model"])

@staticmethod
def get_help_text() -> str:
retval = "The first substring specifies the model being used, e.g., #gpt.gpt-4o-mini.\n"
retval += "Available models are: \n"

Expand Down Expand Up @@ -88,7 +100,7 @@ def save_conversation_history(session_key, history):
with open(history_file, "w") as file:
json.dump(trimmed_history, file, indent=4)

def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"):
def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o-mini"):
"""
Submits user input to the GPT model, maintaining conversation history.

Expand All @@ -101,18 +113,24 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"):
Returns:
str: The assistant's response along with model details.
"""
# Initialize conversation history
if session_key:
conversation_history = load_conversation_history(session_key)
else:
conversation_history = []

if not json_session:
json_session = []


# control the gpt system prompts.
# this is the spot you might use for having different personailities
if len(json_session) == 0:
json_session.append({"role": "system", "content":
"You are a helpful chatbot for signal groups."
}
)

# Append user's message to the conversation history
conversation_history.append({"role": "user", "content": user_input})
json_session.append({"role": "user", "content": user_input})

# Format the conversation history for the new API
formatted_messages = [
{"role": msg["role"], "content": msg["content"]} for msg in conversation_history
{"role": msg["role"], "content": msg["content"]} for msg in json_session
]

# Call the OpenAI API with the conversation history
Expand All @@ -121,15 +139,13 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"):
except Exception as e:
# Code to handle the exception
print(f"An error occurred: {e}")
return f"An error occurred: {e}"

return {"message": f"An error occurred: {e}", "attachments": []}
# Extract the assistant's response
assistant_message = response.choices[0].message
conversation_history.append(assistant_message)
json_session.append( {"role": "assistant", "content": assistant_message.content} )

# Save updated conversation history if session_key is provided
if session_key:
save_conversation_history(session_key, conversation_history)
print(json_session)

# Prepare model details
model_details = {
Expand All @@ -144,9 +160,10 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"):
f"Session Key: {model_details['session_key']}\n"
f"Token Usage: {model_details['usage']}"
)


# Return the assistant's reply with model details
return assistant_message.content + details_string
return {"message": assistant_message.content + details_string, "attachments": [json_to_base64_text_file(json_session)]}

def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"):

Expand All @@ -160,7 +177,76 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"):
#size="256x256",
response_format="b64_json",
)
print(response.data[0].revised_prompt)
#print(response.data[0].url)
return [response.data[0].b64_json]

return { "message": response.data[0].revised_prompt, "attachments": [response.data[0].b64_json] }





def json_to_base64_text_file(json_data):

try:
input_data = json.dumps(json_data)

# Convert the input string to bytes
input_bytes = input_data.encode('utf-8')

# Encode the bytes to Base64
base64_encoded = base64.b64encode(input_bytes).decode('utf-8')
return base64_encoded

# Construct the MIME data
mime_type = "text/plain"
mime_data = f"data:{mime_type};name=log.txt;base64,{base64_encoded}"

# Return MIME data as bytes
return mime_data.encode('utf-8')
except Exception as e:
raise ValueError(f"An error occurred: {e}")

def base64_text_file_to_json(b64_file_content):
"""
Decodes a Base64-encoded text file and converts it back to JSON data.

:param b64_file_content: The Base64-encoded content of the text file (bytes object).
:return: The decoded JSON data as a Python dictionary or list.
"""
# Decode the Base64 content to get the original JSON string
decoded_bytes = base64.b64decode(b64_file_content)
json_string = decoded_bytes.decode('utf-8')

# Parse the JSON string back into a Python object
json_data = json.loads(json_string)

print(f"loaded json data {json_data}")

return json_data

def find_first_text_file_base64(base64_files):
"""
Identifies the first Base64-encoded file in the list that is a text file and returns it.

:param base64_files: A list of Base64-encoded file contents (as bytes or strings).
:return: The Base64 string representing the first text file found, or None if no text file is found.
"""
for b64_file in base64_files:
try:
# Decode the Base64 content
decoded_bytes = base64.b64decode(b64_file)

# Attempt to decode the bytes as UTF-8 (text)
decoded_text = decoded_bytes.decode('utf-8')

#if contentType == "application/json":
# return decoded_bytes.decode('utf-8')

# If successful, return the original Base64 string
return b64_file
except (base64.binascii.Error, UnicodeDecodeError):
# If decoding fails, it's not a valid Base64 or not a text file
continue

# Return None if no text file is found
return None

3 changes: 2 additions & 1 deletion handlers/mmw_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@ def get_message(self) -> str:

raise Exception("shouldnt get here")

def get_help_text(self) -> str:
@staticmethod
def get_help_text() -> str:
return "mmw help:\nAdd #mmw to your message to save it on the log.\n Type #mmw alone to retrieve the log"

@staticmethod
Expand Down
43 changes: 19 additions & 24 deletions run.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,27 +13,6 @@

LOGMSG = "----TURBOBOT----\n"

import git
import os

def get_git_info():
"""
Retrieves the current branch name, commit ID, and timestamp of the latest commit
from the Git repository.

Returns:
str: A formatted string with the branch name, commit ID, and timestamp on separate lines.
Returns "Not a Git repository" if not in a Git repository.
"""
try:
repo = git.Repo(os.path.dirname(os.path.abspath(__file__)), search_parent_directories=True)
branch_name = repo.active_branch.name
commit_id = repo.head.commit.hexsha
commit_time = datetime.fromtimestamp(repo.head.commit.committed_date).strftime('%Y-%m-%d %H:%M:%S')

return f"Branch: {branch_name}\nCommit ID: {commit_id}\nTimestamp: {commit_time}"
except git.InvalidGitRepositoryError:
return "Not a Git repository"

def find_group_by_internal_id(data, target_id):
for entry in data:
Expand Down Expand Up @@ -167,7 +146,7 @@ async def handle(self, c: Context):
print("unknown message type")
return

print(f"source {source}, recipient {c.message.recipient()}, dest {destination}, group {group}, message type {c.message.type.name}")
print(f"source {source}, recipient: {c.message.recipient()}, dest: {destination}, group: {group}, message type: {c.message.type.name}")

if msg is None:
print("Message was None")
Expand All @@ -194,6 +173,20 @@ async def handle(self, c: Context):
print("is reboot")
await c.reply( LOGMSG + "turbobot rebooting...")
sys.exit(1)
elif msg == "#help":
handler_classes = BaseHandler.get_all_handlers()
retmsg = ""
for handler_class in handler_classes:
handler_name = "Unknown"
try:
handler_name = handler_class.get_name()
handler_help_string = handler_class.get_help_text()
retmsg += f"{handler_name}:\n"
retmsg += f"{handler_help_string}\n\n"
except Exception as e:
retmsg += f"{handler_name} help text is not enabled \n\n"
print(f"Handler {handler_name} exception: {e}")
await c.reply( LOGMSG + retmsg )
else:
handler_classes = BaseHandler.get_all_handlers()
for handler_class in handler_classes:
Expand All @@ -208,13 +201,15 @@ async def handle(self, c: Context):
retdict = handler.process_message(msg, b64_attachments)
returnMsg = retdict["message"]
returnAttachments = retdict["attachments"]
print(f"retmessage {returnMsg}")
print(f"attachment len {len(returnAttachments)}")
except Exception as e:
returnMsg += f"Handler {handler_name} exception: {e}"
returnAttachments = []
try:
await c.reply( LOGMSG + returnMsg, base64_attachments=returnAttachments )
except Exception as e:
c.reply( LOGMSG + returnMsg + "failed to send signal message" )
await c.reply( LOGMSG + returnMsg + "failed to send signal message" )
except Exception as e:
print(f"Handler {handler_name} exception: {e}")
return
Expand All @@ -225,7 +220,7 @@ async def handle(self, c: Context):
"phone_number": os.environ["BOT_NUMBER"]
})

print('bot starting...')
print(f'bot starting, api {os.environ["SIGNAL_API_URL"]}, bot number: {os.environ["BOT_NUMBER"]} ...')

# Parse environment variables
contact_number = parse_env_var("CONTACT_NUMBERS")
Expand Down
8 changes: 8 additions & 0 deletions tests/test_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,14 @@ async def test_ping_pong(self, receive_mock, send_mock):
self.assertEqual(send_mock.call_count, 1)
self.assertEqual(send_mock.call_args_list[0].args[1], LOGMSG + "Pong")

@patch("signalbot.SignalAPI.send", new_callable=SendMessagesMock)
@patch("signalbot.SignalAPI.receive", new_callable=ReceiveMessagesMock)
async def test_hash(self, receive_mock, send_mock):
receive_mock.define(["#"])
await self.run_bot()
self.assertEqual(send_mock.call_count, 1)
self.assertEqual( LOGMSG in send_mock.call_args_list[0].args[1] , True)

@patch("signalbot.SignalAPI.send", new_callable=SendMessagesMock)
@patch("signalbot.SignalAPI.receive", new_callable=ReceiveMessagesMock)
async def test_ticker(self, receive_mock, send_mock):
Expand Down
28 changes: 26 additions & 2 deletions utils/misc_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,8 @@
import hashlib
import json
from cryptography.fernet import Fernet

import git
from datetime import datetime

# Append a dictionary to the JSON file
def append_to_json_file(file_path, new_data, encryption_key=None):
Expand Down Expand Up @@ -120,4 +121,27 @@ def parse_env_var(env_var, delimiter=";"):
elif delimiter in value:
return value.split(delimiter) # Return as a list
else:
return [value] # Single value as a list
return [value] # Single value as a list

def get_git_info():
"""
Retrieves the current branch name, commit ID, timestamp, and committer name
of the latest commit from the Git repository.

Returns:
str: A formatted string with the branch name, commit ID, timestamp, and committer name
on separate lines. Returns "Not a Git repository" if not in a Git repository.
"""
try:
repo = git.Repo(os.path.dirname(os.path.abspath(__file__)), search_parent_directories=True)
branch_name = repo.active_branch.name
commit_id = repo.head.commit.hexsha
commit_time = datetime.fromtimestamp(repo.head.commit.committed_date).strftime('%Y-%m-%d %H:%M:%S')
committer_name = repo.head.commit.committer.name

return (f"Branch: {branch_name}\n"
f"Commit ID: {commit_id}\n"
f"Timestamp: {commit_time}\n"
f"Committer: {committer_name}")
except git.InvalidGitRepositoryError:
return "Not a Git repository"
Loading