From d92d27c02ee2177376c688a8f1b8012cf75553a4 Mon Sep 17 00:00:00 2001 From: jamesoncollins <35897639+jamesoncollins@users.noreply.github.com> Date: Tue, 7 Jan 2025 16:09:05 -0500 Subject: [PATCH 01/36] Update gpt_handler.py --- handlers/gpt_handler.py | 31 +++++++++++++++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 7e4d875..e6444c2 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -3,7 +3,9 @@ import os import json from openai import OpenAI -import warnings +import warnings +import base64 +import io key = os.environ.get("OPENAI_API_KEY", "") @@ -162,5 +164,30 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): ) print(response.data[0].revised_prompt) #print(response.data[0].url) - return [response.data[0].b64_json] + return [response.data[0].b64_json] + + + + +def string_to_base64_text_file(input_string): + """ + Creates a Base64-encoded text file from a string without saving it to disk. + + :param input_string: The string to encode. + :return: A bytes object representing the Base64-encoded text file. + """ + # Encode the string to Base64 + b64_encoded = base64.b64encode(input_string.encode('utf-8')) + + # Create a virtual file + virtual_file = io.BytesIO() + virtual_file.write(b64_encoded) + virtual_file.seek(0) # Rewind the file to the beginning + + # Get the contents of the virtual file as bytes + return virtual_file.read() + +# Example usage +encoded_file_content = string_to_base64_text_file("Hello, World!") +print(encoded_file_content) # This is the Base64-encoded representation of the text file \ No newline at end of file From 95da92255e6fdf75fe5035386a8b358c64381792 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 18:05:38 -0500 Subject: [PATCH 02/36] dont break --- handlers/gpt_handler.py | 127 ++++++++++++++++++++++++++++------------ 1 file changed, 91 insertions(+), 36 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index e6444c2..ddd3b11 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -31,23 +31,32 @@ def get_substring_mapping(self) -> dict: # Provide mapping and default value for 'model' return {0: ("model", "gpt-4o-mini")} - def get_attachments(self) -> list: - if self.hashtag_data["model"] == "image": - self.hashtag_data["model"] = "dall-e-2" - if self.hashtag_data["model"] in image_generation_models: - return submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"]) - return [] - - def get_message(self) -> str: + def process_message(self, msg, attachments): + if self.hashtag_data.get("model") == "help": - return self.get_help_text() + return {"message": self.get_help_text(), "attachments": []} if self.hashtag_data["model"] == "image": - self.hashtag_data["model"] = "dall-e-2" + self.hashtag_data["model"] = "dall-e-2" + if self.hashtag_data["model"] in image_generation_models: - return f"GPT Image Prompt {self.input_str}" + return { "messge": "image", "attachments": submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])} - return submit_gpt(self.cleaned_input, None, self.hashtag_data["model"]) + # try to get quote info. currently this is a try becuase i dont know + # how it looks for a data message + json_quoted_convo = None + try: + quote_msg = self.context.message.raw_message["envelope"]["syncMessage"]["sentMessage"]["quote"] + quote_author = quote_msg["author"] + quote_text = quote_msg["text"] + quote_attachments = quote_msg["attachments"] + convo_b64 = find_first_text_file_base64(quote_attachments) + json_quoted_convo = base64_text_file_to_json(convo_b64) + except: + pass + + return submit_gpt(self.cleaned_input, json_quoted_convo, None, self.hashtag_data["model"]) + def get_help_text(self) -> str: retval = "The first substring specifies the model being used, e.g., #gpt.gpt-4o-mini.\n" @@ -90,7 +99,7 @@ def save_conversation_history(session_key, history): with open(history_file, "w") as file: json.dump(trimmed_history, file, indent=4) -def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"): +def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o-mini"): """ Submits user input to the GPT model, maintaining conversation history. @@ -106,8 +115,11 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"): # Initialize conversation history if session_key: conversation_history = load_conversation_history(session_key) + elif json_session: + conversation_history = json_session else: conversation_history = [] + # Append user's message to the conversation history conversation_history.append({"role": "user", "content": user_input}) @@ -123,7 +135,7 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"): except Exception as e: # Code to handle the exception print(f"An error occurred: {e}") - return f"An error occurred: {e}" + return {"messge": f"An error occurred: {e}", "attachments": []} # Extract the assistant's response assistant_message = response.choices[0].message @@ -133,6 +145,8 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"): if session_key: save_conversation_history(session_key, conversation_history) + json_session = conversation_history + # Prepare model details model_details = { "model": response.model, @@ -146,9 +160,10 @@ def submit_gpt(user_input, session_key=None, model="gpt-4o-mini"): f"Session Key: {model_details['session_key']}\n" f"Token Usage: {model_details['usage']}" ) + # Return the assistant's reply with model details - return assistant_message.content + details_string + return {"message": assistant_message.content + details_string, "attachments": [json_to_base64_text_file(json_session)]} def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): @@ -169,25 +184,65 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): -def string_to_base64_text_file(input_string): - """ - Creates a Base64-encoded text file from a string without saving it to disk. - - :param input_string: The string to encode. - :return: A bytes object representing the Base64-encoded text file. - """ - # Encode the string to Base64 - b64_encoded = base64.b64encode(input_string.encode('utf-8')) - - # Create a virtual file - virtual_file = io.BytesIO() - virtual_file.write(b64_encoded) - virtual_file.seek(0) # Rewind the file to the beginning - - # Get the contents of the virtual file as bytes - return virtual_file.read() - -# Example usage -encoded_file_content = string_to_base64_text_file("Hello, World!") -print(encoded_file_content) # This is the Base64-encoded representation of the text file + +def json_to_base64_text_file(json_data): + """ + Creates a Base64-encoded text file from JSON data without saving it to disk. + + :param json_data: The JSON data to encode (Python dictionary or list). + :return: A bytes object representing the Base64-encoded text file. + """ + # Serialize the JSON data to a string + json_string = json.dumps(json_data) + + # Encode the JSON string to Base64 + b64_encoded = base64.b64encode(json_string.encode('utf-8')) + + # Create a virtual file + virtual_file = io.BytesIO() + virtual_file.write(b64_encoded) + virtual_file.seek(0) # Rewind the file to the beginning + + # Get the contents of the virtual file as bytes + return virtual_file.read() + +def base64_text_file_to_json(b64_file_content): + """ + Decodes a Base64-encoded text file and converts it back to JSON data. + + :param b64_file_content: The Base64-encoded content of the text file (bytes object). + :return: The decoded JSON data as a Python dictionary or list. + """ + # Decode the Base64 content to get the original JSON string + decoded_bytes = base64.b64decode(b64_file_content) + json_string = decoded_bytes.decode('utf-8') + + # Parse the JSON string back into a Python object + json_data = json.loads(json_string) + + return json_data + +def find_first_text_file_base64(base64_files): + """ + Identifies the first Base64-encoded file in the list that is a text file and returns it. + + :param base64_files: A list of Base64-encoded file contents (as bytes or strings). + :return: The Base64 string representing the first text file found, or None if no text file is found. + """ + for b64_file in base64_files: + try: + # Decode the Base64 content + decoded_bytes = base64.b64decode(b64_file) + + # Attempt to decode the bytes as UTF-8 (text) + decoded_text = decoded_bytes.decode('utf-8') + + # If successful, return the original Base64 string + return b64_file + except (base64.binascii.Error, UnicodeDecodeError): + # If decoding fails, it's not a valid Base64 or not a text file + continue + + # Return None if no text file is found + return None \ No newline at end of file From 926624b9b65ae9a3a2b70c6abcb9c41ca89a3bb5 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 18:09:30 -0500 Subject: [PATCH 03/36] try again --- handlers/gpt_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index ddd3b11..22d001a 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -145,7 +145,8 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- if session_key: save_conversation_history(session_key, conversation_history) - json_session = conversation_history + # Convert the chat history to a JSON string + json_session = json.dumps(conversation_history, indent=4) # Prepare model details model_details = { From f6acc68e8164a0c494487e089586a79b6dd0f634 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 18:13:14 -0500 Subject: [PATCH 04/36] again --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 22d001a..bec9aa6 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -146,7 +146,7 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- save_conversation_history(session_key, conversation_history) # Convert the chat history to a JSON string - json_session = json.dumps(conversation_history, indent=4) + json_session = json.dumps(conversation_history) # Prepare model details model_details = { From 11a999ba92d004cf5d57146512f762ebbed6ade5 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 19:16:49 -0500 Subject: [PATCH 05/36] debug --- handlers/gpt_handler.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index bec9aa6..d898c03 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -135,8 +135,10 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- except Exception as e: # Code to handle the exception print(f"An error occurred: {e}") - return {"messge": f"An error occurred: {e}", "attachments": []} - + return {"message": f"An error occurred: {e}", "attachments": []} + + print(response) + # Extract the assistant's response assistant_message = response.choices[0].message conversation_history.append(assistant_message) From 769577767c95b8dc13e5dfd9761cc4763c06a550 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:04:10 -0500 Subject: [PATCH 06/36] debug --- handlers/gpt_handler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index d898c03..d80e98b 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -149,6 +149,8 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- # Convert the chat history to a JSON string json_session = json.dumps(conversation_history) + + print(json_session) # Prepare model details model_details = { From 0fc009e5a955d43fb8b8b137e1e15fee4c96d2c1 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:13:57 -0500 Subject: [PATCH 07/36] debug --- handlers/gpt_handler.py | 24 ++++-------------------- 1 file changed, 4 insertions(+), 20 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index d80e98b..19ebcca 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -112,21 +112,12 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- Returns: str: The assistant's response along with model details. """ - # Initialize conversation history - if session_key: - conversation_history = load_conversation_history(session_key) - elif json_session: - conversation_history = json_session - else: - conversation_history = [] - - - # Append user's message to the conversation history - conversation_history.append({"role": "user", "content": user_input}) + if not json_session: + json_session = [] # Format the conversation history for the new API formatted_messages = [ - {"role": msg["role"], "content": msg["content"]} for msg in conversation_history + {"role": msg["role"], "content": msg["content"]} for msg in json_session ] # Call the OpenAI API with the conversation history @@ -141,15 +132,8 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- # Extract the assistant's response assistant_message = response.choices[0].message - conversation_history.append(assistant_message) + json_session.append(assistant_message) - # Save updated conversation history if session_key is provided - if session_key: - save_conversation_history(session_key, conversation_history) - - # Convert the chat history to a JSON string - json_session = json.dumps(conversation_history) - print(json_session) # Prepare model details From 9acfa7ceb842e606083b8316a9046565175b17c9 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:15:52 -0500 Subject: [PATCH 08/36] debug --- handlers/gpt_handler.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 19ebcca..babbc79 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -114,6 +114,9 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- """ if not json_session: json_session = [] + + # Append user's message to the conversation history + json_session.append({"role": "user", "content": user_input}) # Format the conversation history for the new API formatted_messages = [ From a5a4954dfc028eaabf7a897d544dbb554815b53a Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:17:59 -0500 Subject: [PATCH 09/36] debug --- handlers/gpt_handler.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index babbc79..d4577c4 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -6,6 +6,7 @@ import warnings import base64 import io +from openai.types.chat.chat_completion_message import ChatCompletionMessage key = os.environ.get("OPENAI_API_KEY", "") @@ -135,7 +136,7 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- # Extract the assistant's response assistant_message = response.choices[0].message - json_session.append(assistant_message) + json_session.append(model_dump_json(assistant_message)) print(json_session) From 9ae008e71036e9c61384bcb4653aaceab002b113 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:19:05 -0500 Subject: [PATCH 10/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index d4577c4..a71b911 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -136,7 +136,7 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- # Extract the assistant's response assistant_message = response.choices[0].message - json_session.append(model_dump_json(assistant_message)) + json_session.append(assistant_message.model_dump_json()) print(json_session) From f0e8b41464f0677d6508a45f6a93be25f1224588 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:22:08 -0500 Subject: [PATCH 11/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index a71b911..d91ea14 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -136,7 +136,7 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- # Extract the assistant's response assistant_message = response.choices[0].message - json_session.append(assistant_message.model_dump_json()) + json_session.append( {"role": "assistant", "content": assistant_message.content} ) print(json_session) From 6234f7167d2406baa9ba264201d5aea212f34e88 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:24:02 -0500 Subject: [PATCH 12/36] debug --- handlers/gpt_handler.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index d91ea14..1ee21eb 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -132,8 +132,6 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- print(f"An error occurred: {e}") return {"message": f"An error occurred: {e}", "attachments": []} - print(response) - # Extract the assistant's response assistant_message = response.choices[0].message json_session.append( {"role": "assistant", "content": assistant_message.content} ) @@ -185,8 +183,10 @@ def json_to_base64_text_file(json_data): :param json_data: The JSON data to encode (Python dictionary or list). :return: A bytes object representing the Base64-encoded text file. """ + print("here") # Serialize the JSON data to a string json_string = json.dumps(json_data) + print("here1") # Encode the JSON string to Base64 b64_encoded = base64.b64encode(json_string.encode('utf-8')) From acf660170b5cef9dfe17e04f945d2370c308546f Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:26:01 -0500 Subject: [PATCH 13/36] debug --- handlers/gpt_handler.py | 2 -- run.py | 2 ++ 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 1ee21eb..08dd332 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -183,10 +183,8 @@ def json_to_base64_text_file(json_data): :param json_data: The JSON data to encode (Python dictionary or list). :return: A bytes object representing the Base64-encoded text file. """ - print("here") # Serialize the JSON data to a string json_string = json.dumps(json_data) - print("here1") # Encode the JSON string to Base64 b64_encoded = base64.b64encode(json_string.encode('utf-8')) diff --git a/run.py b/run.py index 42c5915..de002f4 100644 --- a/run.py +++ b/run.py @@ -208,6 +208,8 @@ async def handle(self, c: Context): retdict = handler.process_message(msg, b64_attachments) returnMsg = retdict["message"] returnAttachments = retdict["attachments"] + print(f"retmessage {returnMsg}") + print(f"attachment len {returnAttachments.len()}") except Exception as e: returnMsg += f"Handler {handler_name} exception: {e}" returnAttachments = [] From a833deaf51000b7f9f632d18c6ce8947a1f675c6 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:27:33 -0500 Subject: [PATCH 14/36] debug --- run.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/run.py b/run.py index de002f4..7e88e1a 100644 --- a/run.py +++ b/run.py @@ -209,7 +209,7 @@ async def handle(self, c: Context): returnMsg = retdict["message"] returnAttachments = retdict["attachments"] print(f"retmessage {returnMsg}") - print(f"attachment len {returnAttachments.len()}") + print(f"attachment len {len(returnAttachments)}") except Exception as e: returnMsg += f"Handler {handler_name} exception: {e}" returnAttachments = [] From d05882056c90425ee02d858019c2cdb933c2e8c9 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:34:44 -0500 Subject: [PATCH 15/36] debug --- handlers/gpt_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 08dd332..e3b208d 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -185,6 +185,7 @@ def json_to_base64_text_file(json_data): """ # Serialize the JSON data to a string json_string = json.dumps(json_data) + print(f"json string is {json_string}") # Encode the JSON string to Base64 b64_encoded = base64.b64encode(json_string.encode('utf-8')) From df6ab058d3854880137b6dfae0c6f7e09f054b77 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:39:57 -0500 Subject: [PATCH 16/36] debug --- handlers/gpt_handler.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index e3b208d..6b22bdd 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -189,6 +189,8 @@ def json_to_base64_text_file(json_data): # Encode the JSON string to Base64 b64_encoded = base64.b64encode(json_string.encode('utf-8')) + + return b64_encoded # Create a virtual file virtual_file = io.BytesIO() @@ -196,7 +198,9 @@ def json_to_base64_text_file(json_data): virtual_file.seek(0) # Rewind the file to the beginning # Get the contents of the virtual file as bytes - return virtual_file.read() + bbytes = virtual_file.read() + + return bbytes def base64_text_file_to_json(b64_file_content): """ From fb6d4572d415355e03fb02d91c85c7626fbfab30 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:41:45 -0500 Subject: [PATCH 17/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 6b22bdd..fb4f0d8 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -189,7 +189,7 @@ def json_to_base64_text_file(json_data): # Encode the JSON string to Base64 b64_encoded = base64.b64encode(json_string.encode('utf-8')) - + print(f"b64_encoded {b64_encoded}") return b64_encoded # Create a virtual file From bf0b2adc3b9d0b50a4cabdf12c614df46553d358 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:54:15 -0500 Subject: [PATCH 18/36] debug --- handlers/gpt_handler.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index fb4f0d8..ce5c78a 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -188,8 +188,12 @@ def json_to_base64_text_file(json_data): print(f"json string is {json_string}") # Encode the JSON string to Base64 - b64_encoded = base64.b64encode(json_string.encode('utf-8')) - print(f"b64_encoded {b64_encoded}") + b64_encoded = base64.b64encode(json_string.encode('utf-8').decode('utf-8')) + + # Construct the MIME data + mime_data = f"data:{text/plain};name={log.txt};base64,{base64_encoded}" + + return mime_data return b64_encoded # Create a virtual file From 2eba4be74ff437a513cb0c68dc5582fee8e734a4 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 20:56:33 -0500 Subject: [PATCH 19/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index ce5c78a..123665e 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -193,7 +193,7 @@ def json_to_base64_text_file(json_data): # Construct the MIME data mime_data = f"data:{text/plain};name={log.txt};base64,{base64_encoded}" - return mime_data + return mime_data.encode('utf-8') return b64_encoded # Create a virtual file From 3a0976eca432006f1fc5361a47ed776203f15535 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:00:40 -0500 Subject: [PATCH 20/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 123665e..b5b88f6 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -191,7 +191,7 @@ def json_to_base64_text_file(json_data): b64_encoded = base64.b64encode(json_string.encode('utf-8').decode('utf-8')) # Construct the MIME data - mime_data = f"data:{text/plain};name={log.txt};base64,{base64_encoded}" + mime_data = f"data:text/plain;name=log.txt;base64,{base64_encoded}" return mime_data.encode('utf-8') return b64_encoded From f4ab2b422d0e7942f8b680c76846042ad13f1a78 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:01:53 -0500 Subject: [PATCH 21/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index b5b88f6..298f411 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -191,7 +191,7 @@ def json_to_base64_text_file(json_data): b64_encoded = base64.b64encode(json_string.encode('utf-8').decode('utf-8')) # Construct the MIME data - mime_data = f"data:text/plain;name=log.txt;base64,{base64_encoded}" + mime_data = f"data:text/plain;name=log.txt;base64,{b64_encoded}" return mime_data.encode('utf-8') return b64_encoded From 014dd652676e8eda355f1afc6c2918ea66d3ab7e Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:02:59 -0500 Subject: [PATCH 22/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 298f411..129ce88 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -192,7 +192,7 @@ def json_to_base64_text_file(json_data): # Construct the MIME data mime_data = f"data:text/plain;name=log.txt;base64,{b64_encoded}" - + print(f"mime_data {mime_data}") return mime_data.encode('utf-8') return b64_encoded From eb8d1879b52c261092253fd9344167cb5f52d327 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:08:11 -0500 Subject: [PATCH 23/36] debug --- handlers/gpt_handler.py | 42 ++++++++++++++++------------------------- 1 file changed, 16 insertions(+), 26 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 129ce88..195330f 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -177,34 +177,24 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): def json_to_base64_text_file(json_data): - """ - Creates a Base64-encoded text file from JSON data without saving it to disk. - - :param json_data: The JSON data to encode (Python dictionary or list). - :return: A bytes object representing the Base64-encoded text file. - """ - # Serialize the JSON data to a string - json_string = json.dumps(json_data) - print(f"json string is {json_string}") - # Encode the JSON string to Base64 - b64_encoded = base64.b64encode(json_string.encode('utf-8').decode('utf-8')) - - # Construct the MIME data - mime_data = f"data:text/plain;name=log.txt;base64,{b64_encoded}" - print(f"mime_data {mime_data}") - return mime_data.encode('utf-8') - return b64_encoded - - # Create a virtual file - virtual_file = io.BytesIO() - virtual_file.write(b64_encoded) - virtual_file.seek(0) # Rewind the file to the beginning - - # Get the contents of the virtual file as bytes - bbytes = virtual_file.read() + try: + input_data = json.dump(json_data) + + # Convert the input string to bytes + input_bytes = input_data.encode('utf-8') - return bbytes + # Encode the bytes to Base64 + base64_encoded = base64.b64encode(input_bytes).decode('utf-8') + + # Construct the MIME data + mime_type = "text/plain" + mime_data = f"data:{mime_type};name=log.txtbase64,{base64_encoded}" + + # Return MIME data as bytes + return mime_data.encode('utf-8') + except Exception as e: + raise ValueError(f"An error occurred: {e}") def base64_text_file_to_json(b64_file_content): """ From 2e31622e3d07316bde782389191a37fe9a69ff79 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:09:09 -0500 Subject: [PATCH 24/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 195330f..b2f5a74 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -179,7 +179,7 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): def json_to_base64_text_file(json_data): try: - input_data = json.dump(json_data) + input_data = json.dumps(json_data) # Convert the input string to bytes input_bytes = input_data.encode('utf-8') From 6b9ddc7ff4e2eb094597c836150a6f710390c027 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:10:05 -0500 Subject: [PATCH 25/36] debug --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index b2f5a74..4e4d159 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -189,7 +189,7 @@ def json_to_base64_text_file(json_data): # Construct the MIME data mime_type = "text/plain" - mime_data = f"data:{mime_type};name=log.txtbase64,{base64_encoded}" + mime_data = f"data:{mime_type};name=log.txt;base64,{base64_encoded}" # Return MIME data as bytes return mime_data.encode('utf-8') From 01c6b60d47db5399d1c102d29bc44ec1a8e662a1 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:12:29 -0500 Subject: [PATCH 26/36] debug --- handlers/gpt_handler.py | 1 + 1 file changed, 1 insertion(+) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 4e4d159..46ab637 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -186,6 +186,7 @@ def json_to_base64_text_file(json_data): # Encode the bytes to Base64 base64_encoded = base64.b64encode(input_bytes).decode('utf-8') + return base64_encoded # Construct the MIME data mime_type = "text/plain" From 8172964051e1823783056ce470eec8d2d94475e6 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 8 Jan 2025 21:15:41 -0500 Subject: [PATCH 27/36] debug --- handlers/gpt_handler.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 46ab637..40f5087 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -211,6 +211,8 @@ def base64_text_file_to_json(b64_file_content): # Parse the JSON string back into a Python object json_data = json.loads(json_string) + print(f"loaded json data {json_data}") + return json_data def find_first_text_file_base64(base64_files): From 24a9c564dbe1e9ed8b7b2a3a057d32bf5203a52d Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Sun, 12 Jan 2025 16:51:38 -0500 Subject: [PATCH 28/36] add system prompts --- handlers/gpt_handler.py | 12 ++++++++++++ run.py | 4 ++-- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 40f5087..9daf3fe 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -115,6 +115,15 @@ def submit_gpt(user_input, json_session = None, session_key=None, model="gpt-4o- """ if not json_session: json_session = [] + + + # control the gpt system prompts. + # this is the spot you might use for having different personailities + if len(json_session) == 0: + json_session.append({"role": "system", "content": + "You are a helpful chatbot for signal groups." + } + ) # Append user's message to the conversation history json_session.append({"role": "user", "content": user_input}) @@ -229,6 +238,9 @@ def find_first_text_file_base64(base64_files): # Attempt to decode the bytes as UTF-8 (text) decoded_text = decoded_bytes.decode('utf-8') + + #if contentType == "application/json": + # return decoded_bytes.decode('utf-8') # If successful, return the original Base64 string return b64_file diff --git a/run.py b/run.py index 7e88e1a..8468e54 100644 --- a/run.py +++ b/run.py @@ -167,7 +167,7 @@ async def handle(self, c: Context): print("unknown message type") return - print(f"source {source}, recipient {c.message.recipient()}, dest {destination}, group {group}, message type {c.message.type.name}") + print(f"source {source}, recipient: {c.message.recipient()}, dest: {destination}, group: {group}, message type: {c.message.type.name}") if msg is None: print("Message was None") @@ -227,7 +227,7 @@ async def handle(self, c: Context): "phone_number": os.environ["BOT_NUMBER"] }) - print('bot starting...') + print(f'bot starting, api {os.environ["SIGNAL_API_URL"]}, bot number: {os.environ["BOT_NUMBER"]} ...') # Parse environment variables contact_number = parse_env_var("CONTACT_NUMBERS") From cb2c9981e6dd330a1d946f6fe77e7df292e5983e Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Sun, 12 Jan 2025 16:56:14 -0500 Subject: [PATCH 29/36] help messages --- run.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/run.py b/run.py index 8468e54..712ba6d 100644 --- a/run.py +++ b/run.py @@ -194,6 +194,18 @@ async def handle(self, c: Context): print("is reboot") await c.reply( LOGMSG + "turbobot rebooting...") sys.exit(1) + elif msg == "#help": + handler_classes = BaseHandler.get_all_handlers() + retmsg = "" + for handler_class in handler_classes: + try: + handler_name = handler_class.get_name() + handler_help_string = handler_class.get_help() + msg += f"{handler_name}:\n" + msg += f"{handler_help_string}\n\n" + except Exception as e: + print(f"Handler {handler_name} exception: {e}") + c.reply( LOGMSG + retmsg ) else: handler_classes = BaseHandler.get_all_handlers() for handler_class in handler_classes: From d92fbfbf5fa2431ea39b488b51c346a887dc1e2e Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Sun, 12 Jan 2025 17:05:19 -0500 Subject: [PATCH 30/36] try to make help text required --- handlers/base_handler.py | 8 ++++++++ run.py | 8 +++++--- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/handlers/base_handler.py b/handlers/base_handler.py index 1aea64e..9b7661f 100644 --- a/handlers/base_handler.py +++ b/handlers/base_handler.py @@ -45,6 +45,14 @@ def process_message(self, msg, attachments): "attachments": processed_attachments, } + @staticmethod + def get_name() -> str: + raise NotImplementedError("Subclasses must implement this method.") + + @staticmethod + def get_help_text() -> str: + raise NotImplementedError("Subclasses must implement this method.") + def assign_context(self, context): self.context = context diff --git a/run.py b/run.py index 712ba6d..9197a53 100644 --- a/run.py +++ b/run.py @@ -198,12 +198,14 @@ async def handle(self, c: Context): handler_classes = BaseHandler.get_all_handlers() retmsg = "" for handler_class in handler_classes: + handler_name = "Unknown" try: handler_name = handler_class.get_name() - handler_help_string = handler_class.get_help() - msg += f"{handler_name}:\n" - msg += f"{handler_help_string}\n\n" + handler_help_string = handler_class.get_help_text() + retmsg += f"{handler_name}:\n" + retmsg += f"{handler_help_string}\n\n" except Exception as e: + retmsg += f"{handler_name} help text is not enabled \n\n" print(f"Handler {handler_name} exception: {e}") c.reply( LOGMSG + retmsg ) else: From 530c5183db125484447e0affb539f6b45c229ad8 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Sun, 12 Jan 2025 17:11:46 -0500 Subject: [PATCH 31/36] try that again --- handlers/gpt_handler.py | 4 ++-- handlers/mmw_handler.py | 3 ++- run.py | 4 ++-- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 9daf3fe..8c06448 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -58,8 +58,8 @@ def process_message(self, msg, attachments): return submit_gpt(self.cleaned_input, json_quoted_convo, None, self.hashtag_data["model"]) - - def get_help_text(self) -> str: + @staticmethod + def get_help_text() -> str: retval = "The first substring specifies the model being used, e.g., #gpt.gpt-4o-mini.\n" retval += "Available models are: \n" diff --git a/handlers/mmw_handler.py b/handlers/mmw_handler.py index 542c099..f9e99b8 100644 --- a/handlers/mmw_handler.py +++ b/handlers/mmw_handler.py @@ -52,7 +52,8 @@ def get_message(self) -> str: raise Exception("shouldnt get here") - def get_help_text(self) -> str: + @staticmethod + def get_help_text() -> str: return "mmw help:\nAdd #mmw to your message to save it on the log.\n Type #mmw alone to retrieve the log" @staticmethod diff --git a/run.py b/run.py index 9197a53..bc94ef3 100644 --- a/run.py +++ b/run.py @@ -207,7 +207,7 @@ async def handle(self, c: Context): except Exception as e: retmsg += f"{handler_name} help text is not enabled \n\n" print(f"Handler {handler_name} exception: {e}") - c.reply( LOGMSG + retmsg ) + await c.reply( LOGMSG + retmsg ) else: handler_classes = BaseHandler.get_all_handlers() for handler_class in handler_classes: @@ -230,7 +230,7 @@ async def handle(self, c: Context): try: await c.reply( LOGMSG + returnMsg, base64_attachments=returnAttachments ) except Exception as e: - c.reply( LOGMSG + returnMsg + "failed to send signal message" ) + await c.reply( LOGMSG + returnMsg + "failed to send signal message" ) except Exception as e: print(f"Handler {handler_name} exception: {e}") return From 15676dd70b8d17497a4a878ae496930bf123f760 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 15 Jan 2025 17:00:51 -0500 Subject: [PATCH 32/36] fix typo --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index 8c06448..b87f92d 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -41,7 +41,7 @@ def process_message(self, msg, attachments): self.hashtag_data["model"] = "dall-e-2" if self.hashtag_data["model"] in image_generation_models: - return { "messge": "image", "attachments": submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])} + return { "message": "image", "attachments": submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])} # try to get quote info. currently this is a try becuase i dont know # how it looks for a data message From 0947819dfee780cee2baed9ac6084020246ebdfb Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 15 Jan 2025 17:03:57 -0500 Subject: [PATCH 33/36] return revised prompt, make dalle3 default --- handlers/gpt_handler.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index b87f92d..ef08bb3 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -38,10 +38,10 @@ def process_message(self, msg, attachments): return {"message": self.get_help_text(), "attachments": []} if self.hashtag_data["model"] == "image": - self.hashtag_data["model"] = "dall-e-2" + self.hashtag_data["model"] = "dall-e-3" if self.hashtag_data["model"] in image_generation_models: - return { "message": "image", "attachments": submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"])} + return submit_gpt_image_gen(self.cleaned_input, None, self.hashtag_data["model"]) # try to get quote info. currently this is a try becuase i dont know # how it looks for a data message @@ -177,9 +177,8 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): #size="256x256", response_format="b64_json", ) - print(response.data[0].revised_prompt) - #print(response.data[0].url) - return [response.data[0].b64_json] + + return { "message": response.data[0].revised_prompt, "attachments": response.data[0].b64_json } From 57bbb5a30e6669bf37ee613510547a92679d9568 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 15 Jan 2025 17:06:34 -0500 Subject: [PATCH 34/36] must be list --- handlers/gpt_handler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/handlers/gpt_handler.py b/handlers/gpt_handler.py index ef08bb3..7abb9bf 100644 --- a/handlers/gpt_handler.py +++ b/handlers/gpt_handler.py @@ -178,7 +178,7 @@ def submit_gpt_image_gen(user_input, session_key=None, model="dall-e-2"): response_format="b64_json", ) - return { "message": response.data[0].revised_prompt, "attachments": response.data[0].b64_json } + return { "message": response.data[0].revised_prompt, "attachments": [response.data[0].b64_json] } From d6fd120694b6e3a5a6e9640653dea27a99957024 Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 15 Jan 2025 17:07:26 -0500 Subject: [PATCH 35/36] get committer --- run.py | 18 ------------------ utils/misc_utils.py | 25 ++++++++++++++++++++++++- 2 files changed, 24 insertions(+), 19 deletions(-) diff --git a/run.py b/run.py index bc94ef3..b3378a6 100644 --- a/run.py +++ b/run.py @@ -16,24 +16,6 @@ import git import os -def get_git_info(): - """ - Retrieves the current branch name, commit ID, and timestamp of the latest commit - from the Git repository. - - Returns: - str: A formatted string with the branch name, commit ID, and timestamp on separate lines. - Returns "Not a Git repository" if not in a Git repository. - """ - try: - repo = git.Repo(os.path.dirname(os.path.abspath(__file__)), search_parent_directories=True) - branch_name = repo.active_branch.name - commit_id = repo.head.commit.hexsha - commit_time = datetime.fromtimestamp(repo.head.commit.committed_date).strftime('%Y-%m-%d %H:%M:%S') - - return f"Branch: {branch_name}\nCommit ID: {commit_id}\nTimestamp: {commit_time}" - except git.InvalidGitRepositoryError: - return "Not a Git repository" def find_group_by_internal_id(data, target_id): for entry in data: diff --git a/utils/misc_utils.py b/utils/misc_utils.py index e805743..993afd4 100644 --- a/utils/misc_utils.py +++ b/utils/misc_utils.py @@ -120,4 +120,27 @@ def parse_env_var(env_var, delimiter=";"): elif delimiter in value: return value.split(delimiter) # Return as a list else: - return [value] # Single value as a list \ No newline at end of file + return [value] # Single value as a list + +def get_git_info(): + """ + Retrieves the current branch name, commit ID, timestamp, and committer name + of the latest commit from the Git repository. + + Returns: + str: A formatted string with the branch name, commit ID, timestamp, and committer name + on separate lines. Returns "Not a Git repository" if not in a Git repository. + """ + try: + repo = git.Repo(os.path.dirname(os.path.abspath(__file__)), search_parent_directories=True) + branch_name = repo.active_branch.name + commit_id = repo.head.commit.hexsha + commit_time = datetime.fromtimestamp(repo.head.commit.committed_date).strftime('%Y-%m-%d %H:%M:%S') + committer_name = repo.head.commit.committer.name + + return (f"Branch: {branch_name}\n" + f"Commit ID: {commit_id}\n" + f"Timestamp: {commit_time}\n" + f"Committer: {committer_name}") + except git.InvalidGitRepositoryError: + return "Not a Git repository" \ No newline at end of file From 1f591d773badacc1edca59054c4f511a82ca8b9e Mon Sep 17 00:00:00 2001 From: Jameson Collins Date: Wed, 15 Jan 2025 17:14:27 -0500 Subject: [PATCH 36/36] fix git function --- run.py | 3 --- tests/test_run.py | 8 ++++++++ utils/misc_utils.py | 3 ++- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/run.py b/run.py index b3378a6..bba4abd 100644 --- a/run.py +++ b/run.py @@ -13,9 +13,6 @@ LOGMSG = "----TURBOBOT----\n" -import git -import os - def find_group_by_internal_id(data, target_id): for entry in data: diff --git a/tests/test_run.py b/tests/test_run.py index 67a12d7..63f8276 100644 --- a/tests/test_run.py +++ b/tests/test_run.py @@ -20,6 +20,14 @@ async def test_ping_pong(self, receive_mock, send_mock): self.assertEqual(send_mock.call_count, 1) self.assertEqual(send_mock.call_args_list[0].args[1], LOGMSG + "Pong") + @patch("signalbot.SignalAPI.send", new_callable=SendMessagesMock) + @patch("signalbot.SignalAPI.receive", new_callable=ReceiveMessagesMock) + async def test_hash(self, receive_mock, send_mock): + receive_mock.define(["#"]) + await self.run_bot() + self.assertEqual(send_mock.call_count, 1) + self.assertEqual( LOGMSG in send_mock.call_args_list[0].args[1] , True) + @patch("signalbot.SignalAPI.send", new_callable=SendMessagesMock) @patch("signalbot.SignalAPI.receive", new_callable=ReceiveMessagesMock) async def test_ticker(self, receive_mock, send_mock): diff --git a/utils/misc_utils.py b/utils/misc_utils.py index 993afd4..a553c36 100644 --- a/utils/misc_utils.py +++ b/utils/misc_utils.py @@ -5,7 +5,8 @@ import hashlib import json from cryptography.fernet import Fernet - +import git +from datetime import datetime # Append a dictionary to the JSON file def append_to_json_file(file_path, new_data, encryption_key=None):