From 00a34897ba13bc91cf3d971f34511643e2dc42a6 Mon Sep 17 00:00:00 2001 From: baobabKoodaa Date: Tue, 18 Jun 2024 15:13:45 +0300 Subject: [PATCH] Migrate to GPT-4o. --- README.md | 4 ++-- fly.toml | 2 +- public/index.html | 4 ++-- server.js | 8 ++++++-- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 8a4d974..c8385e9 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,12 @@ Try it here: [https://future.attejuvonen.fi](https://future.attejuvonen.fi) ### Dev -1. Set up environment variables +1. Set up environment variables (e.g. in /.vscode/launch.json) - OPENAI_API_KEY - OPENAI_ORGANIZATION - LOG_ENDPOINT -2. Run server.js with node v17 +2. Run server.js with node v17 (e.g. with VSCode run -> run without debugging) 3. Open http://localhost:3000 diff --git a/fly.toml b/fly.toml index 6dd8ee2..f751b9b 100644 --- a/fly.toml +++ b/fly.toml @@ -6,7 +6,7 @@ kill_timeout = 5 processes = [] [build] - builder = "heroku/buildpacks:20" + builder = "heroku/builder:24" [env] PORT = "8080" diff --git a/public/index.html b/public/index.html index 3765393..ea2fd40 100644 --- a/public/index.html +++ b/public/index.html @@ -872,9 +872,9 @@

return Math.round(Math.random() * 40) } if (['.', '?', '!'].includes(nextChar)) { - return 300 + Math.round(Math.random() * 500) + return 150 + Math.round(Math.random() * 250) } - return Math.round(Math.random() * 120) + return Math.round(Math.random() * 60) } const stopSpinner = () => { diff --git a/server.js b/server.js index 77a7d45..f39409d 100644 --- a/server.js +++ b/server.js @@ -9,6 +9,10 @@ import PROMPT_QA_EXAMPLES from "./prompt-qa-examples.js"; const PROMPT_INSTRUCTIONS = fs.readFileSync('prompt-instructions.txt', 'utf8'); +if (!process.env.OPENAI_ORGANIZATION || !process.env.OPENAI_API_KEY) { + throw new Error('Missing env variables for OpenAI authentication!') +} + const configuration = new Configuration({ organization: process.env.OPENAI_ORGANIZATION, apiKey: process.env.OPENAI_API_KEY @@ -100,7 +104,7 @@ const constructPromptChatGPT = (PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, session const smokeTestAPI = async () => { try { - const response = await openai.retrieveModel("text-davinci-003"); + const response = await openai.retrieveModel("gpt-4o-2024-05-13"); } catch (error) { serverStatusGreen = false const errorMessage = error.response ? (error.response.status + error.response.data) : error.message @@ -122,7 +126,7 @@ const getResponse = async (PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHisto } try { const response = await openai.createChatCompletion({ - model: "gpt-3.5-turbo-0301", + model: "gpt-4o-2024-05-13", messages: messages, max_tokens: 256, temperature: 0.4