diff --git a/.github/workflows/prod-801-ci-cd.yaml b/.github/workflows/prod-801-ci-cd.yaml new file mode 100644 index 00000000..9545044f --- /dev/null +++ b/.github/workflows/prod-801-ci-cd.yaml @@ -0,0 +1,70 @@ +name: Deploy A2rchi Prod for 8.01 +run-name: ${{ github.actor }} deploys A2rchi for 8.01 to prod +on: + push: + branches: + - release-8.01 +jobs: + deploy-prod-system: + runs-on: ubuntu-latest + env: + SSH_AUTH_SOCK: /tmp/ssh_agent.sock + steps: + # boilerplate message and pull repository to CI runner + - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." + - uses: actions/checkout@v3 + - run: echo "The ${{ github.repository }} repository has been cloned to the runner." + + # setup SSH + - name: Setup SSH + run: | + mkdir -p /home/runner/.ssh/ + echo "${{ secrets.SSH_PRIVATE_KEY_MDRUSSO }}" > /home/runner/.ssh/id_rsa_submit + chmod 600 /home/runner/.ssh/id_rsa_submit + echo "${{ secrets.SSH_SUBMIT_KNOWN_HOSTS }}" > ~/.ssh/known_hosts + cp ${{ github.workspace }}/deploy/ssh_config /home/runner/.ssh/config + ssh-agent -a $SSH_AUTH_SOCK > /dev/null + ssh-add /home/runner/.ssh/id_rsa_submit + + # create secrets files for docker-compose + - name: Create Secrets Files + run: | + mkdir -p ${{ github.workspace }}/deploy/prod-801/secrets/ + touch ${{ github.workspace }}/deploy/prod-801/secrets/flask_uploader_app_secret_key.txt + echo "${{ secrets.PROD_FLASK_UPLOADER_APP_SECRET_KEY }}" >> ${{ github.workspace }}/deploy/prod-801/secrets/flask_uploader_app_secret_key.txt + chmod 400 ${{ github.workspace }}/deploy/prod-801/secrets/flask_uploader_app_secret_key.txt + touch ${{ github.workspace }}/deploy/prod-801/secrets/uploader_salt.txt + echo "${{ secrets.PROD_UPLOADER_SALT }}" >> ${{ github.workspace }}/deploy/prod-801/secrets/uploader_salt.txt + chmod 400 ${{ github.workspace }}/deploy/prod-801/secrets/uploader_salt.txt + touch ${{ github.workspace }}/deploy/prod-801/secrets/openai_api_key.txt + echo "${{ secrets.OPENAI_API_KEY }}" >> ${{ github.workspace }}/deploy/prod-801/secrets/openai_api_key.txt + chmod 400 ${{ github.workspace }}/deploy/prod-801/secrets/openai_api_key.txt + touch ${{ github.workspace }}/deploy/prod-801/secrets/hf_token.txt + echo "${{ secrets.HF_TOKEN }}" >> ${{ github.workspace }}/deploy/prod-801/secrets/hf_token.txt + chmod 400 ${{ github.workspace }}/deploy/prod-801/secrets/hf_token.txt + + # stop any existing docker compose that's running + - name: Stop Docker Compose + run: | + ssh submit-t3desk 'bash -s' < ${{ github.workspace }}/deploy/prod-801/prod-801-stop.sh + + # copy repository to machine + - name: Copy Repository + run: | + rsync -e ssh -r ${{ github.workspace}}/* --exclude .git/ --delete submit-t3desk:~/A2rchi-prod-801/ + + # run deploy script + - name: Run Deploy Script + run: | + ssh submit-t3desk 'bash -s' < ${{ github.workspace }}/deploy/prod-801/prod-801-install.sh + + # clean up secret files + - name: Remove Secrets from Runner + run: | + rm ${{ github.workspace }}/deploy/prod-801/secrets/flask_uploader_app_secret_key.txt + rm ${{ github.workspace }}/deploy/prod-801/secrets/uploader_salt.txt + rm ${{ github.workspace }}/deploy/prod-801/secrets/openai_api_key.txt + rm ${{ github.workspace }}/deploy/prod-801/secrets/hf_token.txt + + # print job status + - run: echo "🍏 This job's status is ${{ job.status }}." \ No newline at end of file diff --git a/.gitignore b/.gitignore index 1c11446a..bdd9ff66 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,4 @@ venv *.egg-info *sqlite_db .vscode +801-content/ diff --git a/A2rchi/bin/service_chat.py b/A2rchi/bin/service_chat.py index 4a74cd36..2b6f0120 100644 --- a/A2rchi/bin/service_chat.py +++ b/A2rchi/bin/service_chat.py @@ -11,6 +11,7 @@ os.environ['OPENAI_API_KEY'] = read_secret("OPENAI_API_KEY") os.environ['HUGGING_FACE_HUB_TOKEN'] = read_secret("HUGGING_FACE_HUB_TOKEN") config = Config_Loader().config["interfaces"]["chat_app"] +global_config = Config_Loader().config["global"] print(f"Starting Chat Service with (host, port): ({config['HOST']}, {config['PORT']})") def generate_script(config): @@ -22,6 +23,7 @@ def generate_script(config): template = f.read() filled_template = template.replace('XX-HTTP_PORT-XX', str(config["EXTERNAL_PORT"])) + filled_template = filled_template.replace('XX-TRAINED_ON-XX', str(global_config["TRAINED_ON"])) script_file = os.path.join(config["static_folder"], "script.js") with open(script_file, "w") as f: diff --git a/A2rchi/chains/prompts.py b/A2rchi/chains/prompts.py index e2c26c6a..cfc5ef93 100644 --- a/A2rchi/chains/prompts.py +++ b/A2rchi/chains/prompts.py @@ -1,27 +1,31 @@ # flake8: noqa from langchain.prompts.prompt import PromptTemplate +from A2rchi.utils.config_loader import Config_Loader -condense_history_template = """Given the following conversation between you (the AI named A2rchi), a human user who needs help, and an expert, and a follow up question, rephrase the follow up question to be a standalone question, in its original language. +config = Config_Loader().config["chains"]["prompts"] -Chat History: -{chat_history} -Follow Up Input: {question} -Standalone question:""" +def read_prompt(prompt_filepath, is_condense_prompt=False, is_main_prompt=False): + with open(prompt_filepath, "r") as f: + raw_prompt = f.read() -prompt_template = """You are a conversational chatbot named A2rchi who helps people navigate a computing resource named subMIT. You will be provided context to help you answer their questions. -Using your linux and computing knowledge, answer the question at the end. Unless otherwise indicated, assume the users are not well versed computing. - Please do not assume that subMIT machines have anything installed on top of native linux except if the context mentions it. -If you don't know, say "I don't know", if you need to ask a follow up question, please do. + prompt = "" + for line in raw_prompt.split("\n"): + if len(line.lstrip())>0 and line.lstrip()[0:1] != "#": + prompt += line + "\n" -Context: {context} Additionally, it is always preferred to use conda, if possible. + if is_condense_prompt and ("{chat_history}" not in prompt or "{question}" not in prompt): + raise ValueError("""Condensing prompt must contain \"{chat_history}\" and \"{question}\" tags. Instead, found prompt to be: + """ + prompt) + if is_main_prompt and ("{context}" not in prompt or "{question}" not in prompt): + raise ValueError("""Condensing prompt must contain \"{context}\" and \"{question}\" tags. Instead, found prompt to be: + """ + prompt) -Question: {question} -Helpful Answer:""" + return prompt QA_PROMPT = PromptTemplate( - template=prompt_template, input_variables=["context", "question"] + template=read_prompt(config["MAIN_PROMPT"], is_main_prompt=True), input_variables=["context", "question"] ) CONDENSE_QUESTION_PROMPT = PromptTemplate( - template=condense_history_template, input_variables=["chat_history", "question"] + template=read_prompt(config["CONDENSING_PROMPT"], is_condense_prompt=True), input_variables=["chat_history", "question"] ) diff --git a/A2rchi/interfaces/chat_app/app.py b/A2rchi/interfaces/chat_app/app.py index e4353ce9..20401bd5 100644 --- a/A2rchi/interfaces/chat_app/app.py +++ b/A2rchi/interfaces/chat_app/app.py @@ -153,8 +153,9 @@ def __init__(self, app, **configs): CORS(self.app) # add endpoints for flask app - self.add_endpoint('/get_chat_response', 'get_chat_response', self.get_chat_response, methods=["POST"]) + self.add_endpoint('/api/get_chat_response', 'get_chat_response', self.get_chat_response, methods=["POST"]) self.add_endpoint('/', '', self.index) + self.add_endpoint('/terms', 'terms', self.terms) def configs(self, **configs): for config, value in configs: @@ -193,3 +194,6 @@ def get_chat_response(self): def index(self): return render_template('index.html') + + def terms(self): + return render_template('terms.html') diff --git a/A2rchi/interfaces/chat_app/static/script.js b/A2rchi/interfaces/chat_app/static/script.js index 2b7ab165..ed0b5302 100644 --- a/A2rchi/interfaces/chat_app/static/script.js +++ b/A2rchi/interfaces/chat_app/static/script.js @@ -18,7 +18,9 @@ const loadDataFromLocalstorage = () => { const defaultText = `
Start a conversation and explore the power of A2rchi.
Your chat history will be displayed here.
Start a conversation and explore the power of A2rchi, specially trained on subMIT.
+ Your chat history will be displayed here.
+ By using this website, you agree to the terms and conditions.
Start a conversation and explore the power of A2rchi.
Your chat history will be displayed here.
Start a conversation and explore the power of A2rchi, specially trained on XX-TRAINED_ON-XX.
+ Your chat history will be displayed here.
+ By using this website, you agree to the terms and conditions.
Welcome to A2rchi. By using this website, you agree to comply with and be bound by the following terms and conditions of use. Please review these terms carefully before using the website. If you do not agree to these terms, you should not use the website. +
+ +1.1 We may collect and store the questions you ask the chat bot for research and improvement purposes. However, we do not collect any additional meta data, and we will never share this information with any third party.
+ +2.1 You agree to use the chat bot provided on the website only for its intended purpose and not for any unauthorized or unlawful activities.
+ +2.2 You agree not to use the chat bot to generate spam or any automated content that disrupts the service or violates the rights of others.
+ +3.1 We reserve the right to terminate your access to the chat bot and the website at our discretion, without notice, if you violate these terms and conditions.
+ +4.1 We may revise these terms and conditions at any time without notice. By continuing to use the website after such changes, you agree to be bound by the revised terms.
+ +5.1 The chat bot provided on the website is for informational purposes only. We do not guarantee the accuracy, completeness, or reliability of the information provided by the chat bot.
+ +6.1 If you have any questions or concerns regarding these terms and conditions, please contact us at a2rchi@mit.edu.
+ +By using the website, you acknowledge that you have read, understood, and agree to be bound by these terms and conditions. Your continued use of the website constitutes your acceptance of these terms.
+ +