From 4b202babf8db14600ed39742518a7f290fad0610 Mon Sep 17 00:00:00 2001 From: BoundlessAsura <122777244+boundless-asura@users.noreply.github.com> Date: Tue, 25 Jul 2023 19:16:56 +0530 Subject: [PATCH] Dev rebase (#858) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix/agents delete (#795) * fixing the send email attachement issue * delete run label (#796) * fix * fixing schedule agent run * Updated README.md Provided Instructions for serper.dev API key creation and configuring it in the Toolkit Page * Update README.md Updated Image * Update config_template.yaml Modified Serp Configuration for SERPER * Twitter and Google Oauth for Web (#808) Co-authored-by: Tarraann * Updated DB settings (#805) * document fix (#806) * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * Bugs fixes new (#809) * csv file fix (#810) * ui bug bash fixes * ui bug bash fixes * Bugs new (#814) * Fixes (#815) Co-authored-by: Tarraann * ui bug bash fixes * ui bug bash fixes * Handling the rate limit exception * fixed add_to_vector_store_and_create_summary (#827) * fix 97971622 (#824) * Llm models fix (#829) * removed unnecessary toolkits from marketplace * Dalle fix (#825) * ui bug bash fixes (#839) * Fixing twitter creds * fixing google oauth issues * fixing google oauth issue * fixess * fixess * fixess * fixess (#854) * Resource duplicate fix (#803) * Supercoder Improve tool addition (#755) * rebased * Read file s3 fix (#823) * Updated * Read from s3 works * Read from s3 works * Minor Updates * Added Unit Tests * Updated Tests * Updated Tests * Refactored * Updated Test * Updated Test * Updated Test * Edit agent templates fix (#838) * ui bug bash fixes * ui bug bash fixes * ui bug bash fixes * rebased * first commit * instagram APIs implemented and AI generated caption utility added * Image upload to S3 and URL generation added * added upload to s3 instagram bucket * Refactored code * removed jpeg files used for testing * fixed recurring run issues * fixed recurring issues * Readme added * instabot config folder deleted * refactored code * Handled the case where stable diffusion generated multiple photos * docker compose.yaml version reverted to original * removed the utility to add stable diffusion automatically * added test cases and modified readme * Added instagram tool bucket entry in config_template.yaml * added instagram tool bucket entry in config template --------- Co-authored-by: I’m <133493246+TransformerOptimus@users.noreply.github.com> Co-authored-by: TransformerOptimus Co-authored-by: Phoenix2809 <133874957+Phoenix2809@users.noreply.github.com> Co-authored-by: Taran <97586318+Tarraann@users.noreply.github.com> Co-authored-by: Tarraann Co-authored-by: luciferlinx <129729795+luciferlinx101@users.noreply.github.com> Co-authored-by: Fluder-Paradyne <121793617+Fluder-Paradyne@users.noreply.github.com> Co-authored-by: Kalki Co-authored-by: Kalki <97698934+jedan2506@users.noreply.github.com> Co-authored-by: GeekyBaller <43145646+neelayan7@users.noreply.github.com> Co-authored-by: Nishant Borthakur <101320057+nborthy@users.noreply.github.com> Co-authored-by: NishantBorthakur Co-authored-by: Autocop-Agent <129729746+Autocop-Agent@users.noreply.github.com> Co-authored-by: Anisha Gupta <60440541+anisha1607@users.noreply.github.com> Co-authored-by: Maverick-F35 --- config_template.yaml | 6 +- gui/pages/Content/Agents/ActivityFeed.js | 14 ++-- gui/pages/Content/Agents/AgentCreate.js | 73 ++++++++----------- gui/pages/Content/Agents/AgentSchedule.js | 11 +-- .../Content/Agents/AgentTemplatesList.js | 11 +-- gui/pages/Content/Agents/AgentWorkspace.js | 14 ++-- gui/pages/Content/Agents/Agents.js | 2 +- gui/pages/Content/Agents/Details.js | 4 +- gui/pages/Content/Agents/ResourceList.js | 46 ++++++------ gui/pages/Content/Agents/ResourceManager.js | 1 - gui/pages/Content/Marketplace/MarketTools.js | 10 ++- .../Content/Toolkits/ToolkitWorkspace.js | 10 ++- gui/pages/Content/Toolkits/Toolkits.js | 6 +- gui/pages/Dashboard/Content.js | 4 +- gui/pages/Dashboard/Settings/Settings.js | 46 +++++++----- gui/pages/_app.css | 26 ++++++- gui/pages/api/DashboardService.js | 4 + gui/utils/utils.js | 15 ++-- main.py | 5 +- .../cac478732572_delete_agent_feature.py | 4 +- superagi/agent/output_parser.py | 1 - superagi/agent/super_agi.py | 20 +++-- superagi/controllers/agent.py | 58 ++++++++------- superagi/controllers/google_oauth.py | 14 +++- superagi/controllers/organisation.py | 34 +++++++++ superagi/controllers/twitter_oauth.py | 19 ++--- superagi/helper/twitter_tokens.py | 8 +- superagi/jobs/agent_executor.py | 6 +- superagi/llms/base_llm.py | 4 + superagi/llms/google_palm.py | 14 ++++ superagi/llms/llm_model_factory.py | 1 + superagi/llms/openai.py | 18 +++++ superagi/models/db.py | 8 +- superagi/resource_manager/file_manager.py | 5 +- .../llama_document_summary.py | 2 + superagi/resource_manager/resource_manager.py | 9 ++- superagi/resource_manager/resource_summary.py | 5 +- .../{README.MD => README.md} | 6 +- 38 files changed, 332 insertions(+), 212 deletions(-) rename superagi/tools/google_serp_search/{README.MD => README.md} (85%) diff --git a/config_template.yaml b/config_template.yaml index 331553776..fedd3b642 100644 --- a/config_template.yaml +++ b/config_template.yaml @@ -68,8 +68,8 @@ WEAVIATE_USE_EMBEDDED: true GOOGLE_API_KEY: YOUR_GOOGLE_API_KEY SEARCH_ENGINE_ID: YOUR_SEARCH_ENIGNE_ID -# IF YOU DONT HAVE GOOGLE SEARCH KEY, USE THIS -SERP_API_KEY: YOUR_SERP_API_KEY +# IF YOU DONT HAVE GOOGLE SEARCH KEY, YOU CAN USE SERPER.DEV KEYS +SERP_API_KEY: YOUR_SERPER_API_KEY #ENTER YOUR EMAIL CREDENTIALS TO ACCESS EMAIL TOOL EMAIL_ADDRESS: YOUR_EMAIL_ADDRESS @@ -118,4 +118,4 @@ ENGINE_ID: "stable-diffusion-xl-beta-v2-2-2" ## To use Qdrant for vector store #QDRANT_HOST_NAME: YOUR_QDRANT_HOST_NAME -#QDRANT_PORT: YOUR_QDRANT_PORT \ No newline at end of file +#QDRANT_PORT: YOUR_QDRANT_PORT diff --git a/gui/pages/Content/Agents/ActivityFeed.js b/gui/pages/Content/Agents/ActivityFeed.js index 8e592af8b..8c8d83cc5 100644 --- a/gui/pages/Content/Agents/ActivityFeed.js +++ b/gui/pages/Content/Agents/ActivityFeed.js @@ -74,10 +74,9 @@ export default function ActivityFeed({selectedRunId, selectedView, setFetchedDat }, [runStatus]) function fetchFeeds() { - console.log("In") - setIsLoading(true); - console.log(isLoading) - getExecutionFeeds(selectedRunId) + if (selectedRunId !== null) { + setIsLoading(true); + getExecutionFeeds(selectedRunId) .then((response) => { const data = response.data; setFeeds(data.feeds); @@ -90,6 +89,7 @@ export default function ActivityFeed({selectedRunId, selectedView, setFetchedDat console.error('Error fetching execution feeds:', error); setIsLoading(false); // and this line }); + } } useEffect(() => { @@ -166,12 +166,10 @@ export default function ActivityFeed({selectedRunId, selectedView, setFetchedDat } {feeds.length < 1 && !agent?.is_running && !agent?.is_scheduled ? (isLoading ? -
+
- :
- The Agent is not scheduled -
): null + :
The Agent is not scheduled
): null }
{feedContainerRef.current && feedContainerRef.current.scrollTop >= 1200 && diff --git a/gui/pages/Content/Agents/AgentCreate.js b/gui/pages/Content/Agents/AgentCreate.js index 2db2c97ac..c01d945c0 100644 --- a/gui/pages/Content/Agents/AgentCreate.js +++ b/gui/pages/Content/Agents/AgentCreate.js @@ -8,6 +8,7 @@ import { editAgentTemplate, fetchAgentTemplateConfigLocal, getOrganisationConfig, + getLlmModels, updateExecution, uploadFile } from "@/pages/api/DashboardService"; @@ -16,22 +17,13 @@ import { openNewTab, removeTab, setLocalStorageValue, - setLocalStorageArray, returnResourceIcon, getUserTimezone, createInternalId, preventDefault + setLocalStorageArray, returnResourceIcon, getUserTimezone, createInternalId,preventDefault,excludedToolkits } from "@/utils/utils"; import {EventBus} from "@/utils/eventBus"; import 'moment-timezone'; import AgentSchedule from "@/pages/Content/Agents/AgentSchedule"; -export default function AgentCreate({ - sendAgentData, - selectedProjectId, - fetchAgents, - toolkits, - organisationId, - template, - internalId - }) { - +export default function AgentCreate({sendAgentData, selectedProjectId, fetchAgents, toolkits, organisationId, template, internalId, env}) { const [advancedOptions, setAdvancedOptions] = useState(false); const [agentName, setAgentName] = useState(""); const [agentTemplateId, setAgentTemplateId] = useState(null); @@ -59,8 +51,8 @@ export default function AgentCreate({ const [goals, setGoals] = useState(['Describe the agent goals here']); const [instructions, setInstructions] = useState(['']); - const models = ['gpt-4', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-4-32k', 'google-palm-bison-001'] - const [model, setModel] = useState(models[1]); + const [modelsArray, setModelsArray] = useState([]); + const [model, setModel] = useState(''); const modelRef = useRef(null); const [modelDropdown, setModelDropdown] = useState(false); @@ -94,18 +86,12 @@ export default function AgentCreate({ const toolkitRef = useRef(null); const [toolkitDropdown, setToolkitDropdown] = useState(false); - const excludedToolkits = ["Thinking Toolkit", "Human Input Toolkit", "Resource Toolkit"]; const [hasAPIkey, setHasAPIkey] = useState(false); const [createDropdown, setCreateDropdown] = useState(false); const [createModal, setCreateModal] = useState(false); const [scheduleData, setScheduleData] = useState(null); - const [col6ScrollTop, setCol6ScrollTop] = useState(0); - - const handleCol3Scroll = (event) => { - setCol6ScrollTop(event.target.scrollTop); - }; useEffect(() => { getOrganisationConfig(organisationId, "model_api_key") @@ -138,6 +124,21 @@ export default function AgentCreate({ }, [toolNames]); useEffect(() => { + getLlmModels() + .then((response) => { + const models = response.data || []; + const selected_model = localStorage.getItem("agent_model_" + String(internalId)) || ''; + setModelsArray(models); + if(models.length > 0 && !selected_model) { + setLocalStorageValue("agent_model_" + String(internalId), models[0], setModel); + } else { + setModel(selected_model); + } + }) + .catch((error) => { + console.error('Error fetching models:', error); + }); + if (template !== null) { setLocalStorageValue("agent_name_" + String(internalId), template.name, setAgentName); setLocalStorageValue("agent_description_" + String(internalId), template.description, setAgentDescription); @@ -266,8 +267,8 @@ export default function AgentCreate({ }; const handleModelSelect = (index) => { - setLocalStorageValue("agent_model_" + String(internalId), models[index], setModel); - if (models[index] === "google-palm-bison-001") { + setLocalStorageValue("agent_model_" + String(internalId), modelsArray[index], setModel); + if (modelsArray[index] === "google-palm-bison-001") { setAgentType("Fixed Task Queue") } setModelDropdown(false); @@ -392,6 +393,10 @@ export default function AgentCreate({ toast.error("Add atleast one tool", {autoClose: 1800}); return false; } + if(!modelsArray.includes(model)) { + toast.error("Your key does not have access to the selected model", {autoClose: 1800}); + return false; + } return true; } @@ -716,7 +721,7 @@ export default function AgentCreate({ return (<>
-
+
Create new agent
@@ -787,7 +792,7 @@ export default function AgentCreate({
{modelDropdown &&
- {models.map((model, index) => ( + {modelsArray?.map((model, index) => (
handleModelSelect(index)} style={{padding: '12px 14px', maxWidth: '100%'}}> {model} @@ -826,7 +831,7 @@ export default function AgentCreate({ {toolkitDropdown &&
{toolkitList && toolkitList.filter((toolkit) => toolkit.tools ? toolkit.tools.some((tool) => tool.name.toLowerCase().includes(searchValue.toLowerCase())) : false).map((toolkit, index) => (
- {toolkit.name !== null && !excludedToolkits.includes(toolkit.name) &&
+ {toolkit.name !== null && !excludedToolkits().includes(toolkit.name) &&
addToolkit(toolkit)} className="custom_select_option" style={{ padding: '10px 14px', maxWidth: '100%', @@ -1055,23 +1060,7 @@ export default function AgentCreate({ )}
- {createDropdown && (
{ - setCreateModal(true); - setCreateDropdown(false); - }}>Create & Schedule Run + {createDropdown && (
{setCreateModal(true);setCreateDropdown(false);}}>Create & Schedule Run
)}
@@ -1088,7 +1077,7 @@ export default function AgentCreate({
{createModal && ( - + )}
diff --git a/gui/pages/Content/Agents/AgentSchedule.js b/gui/pages/Content/Agents/AgentSchedule.js index b78b2b168..574c0e6c7 100644 --- a/gui/pages/Content/Agents/AgentSchedule.js +++ b/gui/pages/Content/Agents/AgentSchedule.js @@ -9,21 +9,14 @@ import {agentScheduleComponent, createAndScheduleRun, updateSchedule} from "@/pa import {EventBus} from "@/utils/eventBus"; import moment from 'moment'; -export default function AgentSchedule({ - internalId, - closeCreateModal, - type, - agentId, - setCreateModal, - setCreateEditModal - }) { +export default function AgentSchedule({internalId, closeCreateModal, type, agentId, setCreateModal, setCreateEditModal, env}) { const [isRecurring, setIsRecurring] = useState(false); const [timeDropdown, setTimeDropdown] = useState(false); const [expiryDropdown, setExpiryDropdown] = useState(false); const [startTime, setStartTime] = useState(''); - const timeUnitArray = ['Days', 'Hours', 'Minutes']; + const timeUnitArray = (env === 'PROD') ? ['Days', 'Hours'] : ['Days', 'Hours', 'Minutes']; const [timeUnit, setTimeUnit] = useState(timeUnitArray[1]); const [timeValue, setTimeValue] = useState(null); diff --git a/gui/pages/Content/Agents/AgentTemplatesList.js b/gui/pages/Content/Agents/AgentTemplatesList.js index 01cda96c7..dfec926da 100644 --- a/gui/pages/Content/Agents/AgentTemplatesList.js +++ b/gui/pages/Content/Agents/AgentTemplatesList.js @@ -5,14 +5,7 @@ import {fetchAgentTemplateListLocal} from "@/pages/api/DashboardService"; import AgentCreate from "@/pages/Content/Agents/AgentCreate"; import {setLocalStorageValue, openNewTab} from "@/utils/utils"; -export default function AgentTemplatesList({ - sendAgentData, - selectedProjectId, - fetchAgents, - toolkits, - organisationId, - internalId - }) { +export default function AgentTemplatesList({sendAgentData, selectedProjectId, fetchAgents, toolkits, organisationId, internalId, env}) { const [agentTemplates, setAgentTemplates] = useState([]) const [createAgentClicked, setCreateAgentClicked] = useState(false) const [sendTemplate, setSendTemplate] = useState(null) @@ -112,7 +105,7 @@ export default function AgentTemplatesList({
: } + template={sendTemplate} env={env} />}
) }; diff --git a/gui/pages/Content/Agents/AgentWorkspace.js b/gui/pages/Content/Agents/AgentWorkspace.js index bd3372d07..71c80bd00 100644 --- a/gui/pages/Content/Agents/AgentWorkspace.js +++ b/gui/pages/Content/Agents/AgentWorkspace.js @@ -25,7 +25,7 @@ import {EventBus} from "@/utils/eventBus"; import 'moment-timezone'; import AgentSchedule from "@/pages/Content/Agents/AgentSchedule"; -export default function AgentWorkspace({agentId, agentName, selectedView, agents, internalId}) { +export default function AgentWorkspace({env, agentId, agentName, selectedView, agents, internalId}) { const [leftPanel, setLeftPanel] = useState('activity_feed') const [rightPanel, setRightPanel] = useState('') const [history, setHistory] = useState(true) @@ -235,7 +235,7 @@ export default function AgentWorkspace({agentId, agentName, selectedView, agents } function fetchAgentScheduleComponent() { - if (agent.is_scheduled) { + if (agent?.is_scheduled) { getDateTime(agentId) .then((response) => { setAgentScheduleDetails(response.data) @@ -365,9 +365,9 @@ export default function AgentWorkspace({agentId, agentName, selectedView, agents }}>Resume} {agentExecutions && agentExecutions.length > 1 &&
  • { updateRunStatus("TERMINATED") - }}>Delete
  • } + }}>Delete Run} - {agent && agent.is_scheduled ? (
    + {agent?.is_scheduled ? (
  • Edit Schedule
  • Stop Schedule
  • ) : (
    @@ -379,10 +379,10 @@ export default function AgentWorkspace({agentId, agentName, selectedView, agents
    } {createModal && - setCreateModal(false)}/>} {createEditModal && - setCreateEditModal(false)}/>} {createStopModal && (
    @@ -406,7 +406,7 @@ export default function AgentWorkspace({agentId, agentName, selectedView, agents
    {leftPanel === 'activity_feed' &&
    -
    } {leftPanel === 'agent_type' && diff --git a/gui/pages/Content/Agents/Agents.js b/gui/pages/Content/Agents/Agents.js index 3eb519868..1451acbda 100644 --- a/gui/pages/Content/Agents/Agents.js +++ b/gui/pages/Content/Agents/Agents.js @@ -30,7 +30,7 @@ export default function Agents({sendAgentData, agents}) { src="/images/loading.gif" alt="active-icon"/>
    }
    {agent.name}
    - {agent.is_scheduled && + {agent?.is_scheduled &&
    check-icon
    }
    diff --git a/gui/pages/Content/Agents/Details.js b/gui/pages/Content/Agents/Details.js index 8e1620daf..2a5c66f48 100644 --- a/gui/pages/Content/Agents/Details.js +++ b/gui/pages/Content/Agents/Details.js @@ -16,7 +16,7 @@ export default function Details({agentDetails, runCount, goals, instructions, ag }, [instructions]); useEffect(() => { - if (agent.is_scheduled) { + if (agent?.is_scheduled) { if (agentScheduleDetails?.recurrence_interval !== null) { if ((agentScheduleDetails?.expiry_runs === -1 || agentScheduleDetails?.expiry_runs == null) && agentScheduleDetails?.expiry_date !== null) { let expiryDate; @@ -182,7 +182,7 @@ export default function Details({agentDetails, runCount, goals, instructions, ag
    info-icon
    Stop after {agentDetails.max_iterations} iterations
    } - {agent.is_scheduled &&
    + {agent?.is_scheduled &&
    info-icon
    {scheduleText}
    } diff --git a/gui/pages/Content/Agents/ResourceList.js b/gui/pages/Content/Agents/ResourceList.js index 288628eac..0207211ce 100644 --- a/gui/pages/Content/Agents/ResourceList.js +++ b/gui/pages/Content/Agents/ResourceList.js @@ -34,8 +34,8 @@ export default function ResourceList({files, channel, runs}) { ) return ( -
    - {channel === 'output' && (!isAnyFileWithAgentId || files.length <= 0 ? +
    + {channel === 'output' && (!isAnyFileWithAgentId || files.length <= 0 ?
    no-permissions No Output files! @@ -43,30 +43,30 @@ export default function ResourceList({files, channel, runs}) { :
    {filesByRun.map((filesRun, index) => ( -
    -
    setSelectedRunId(filesRun.run.id === selectedRunId ? null : filesRun.run.id)}> -
    - arrow - {filesRun.run.name} -
    bolt Run {index + 1}
    -
    - download_icon downloadRunFiles(filesRun.run.id, filesRun.run.name)}/> -
    +
    +
    setSelectedRunId(filesRun.run.id === selectedRunId ? null : filesRun.run.id)}> +
    + arrow + {filesRun.run.name} +
    bolt Run {filesByRun.length - index}
    +
    + download_icon downloadRunFiles(filesRun.run.id, filesRun.run.name)}/> +
    - {selectedRunId === filesRun.run.id && ( -
    - {filesRun.files.map((file, index) => )} -
    - )} + {selectedRunId === filesRun.run.id && ( +
    + {filesRun.files.map((file, index) => )}
    - ))} + )}
    + ))} +
    )} {channel === 'input' && diff --git a/gui/pages/Content/Agents/ResourceManager.js b/gui/pages/Content/Agents/ResourceManager.js index d710dca49..f3acd2743 100644 --- a/gui/pages/Content/Agents/ResourceManager.js +++ b/gui/pages/Content/Agents/ResourceManager.js @@ -30,7 +30,6 @@ export default function ResourceManager({agentId, runs}) { } } }; - const handleFileInputChange = (event) => { const files = event.target.files; handleFile(files); diff --git a/gui/pages/Content/Marketplace/MarketTools.js b/gui/pages/Content/Marketplace/MarketTools.js index 6b7c800f1..1f76c4a32 100644 --- a/gui/pages/Content/Marketplace/MarketTools.js +++ b/gui/pages/Content/Marketplace/MarketTools.js @@ -3,7 +3,7 @@ import Image from "next/image"; import styles from './Market.module.css'; import {fetchToolTemplateList} from "@/pages/api/DashboardService"; import {EventBus} from "@/utils/eventBus"; -import {loadingTextEffect} from "@/utils/utils"; +import {loadingTextEffect, excludedToolkits} from "@/utils/utils"; import axios from 'axios'; export default function MarketTools() { @@ -20,7 +20,8 @@ export default function MarketTools() { axios.get('https://app.superagi.com/api/toolkits/marketplace/list/0') .then((response) => { const data = response.data || []; - setToolTemplates(data); + const filteredData = data?.filter((item) => !excludedToolkits().includes(item.name)); + setToolTemplates(filteredData); setIsLoading(false); }) .catch((error) => { @@ -30,7 +31,8 @@ export default function MarketTools() { fetchToolTemplateList() .then((response) => { const data = response.data || []; - setToolTemplates(data); + const filteredData = data?.filter((item) => !excludedToolkits().includes(item.name)); + setToolTemplates(filteredData); setIsLoading(false); }) .catch((error) => { @@ -48,7 +50,7 @@ export default function MarketTools() {
    {!isLoading ?
    - {toolTemplates.length > 0 ?
    {toolTemplates.map((item, index) => ( + {toolTemplates.length > 0 ?
    {toolTemplates.map((item) => (
    handleTemplateClick(item)}>
    diff --git a/gui/pages/Content/Toolkits/ToolkitWorkspace.js b/gui/pages/Content/Toolkits/ToolkitWorkspace.js index 99fc711e7..894cc9d0f 100644 --- a/gui/pages/Content/Toolkits/ToolkitWorkspace.js +++ b/gui/pages/Content/Toolkits/ToolkitWorkspace.js @@ -10,7 +10,7 @@ import { import styles from './Tool.module.css'; import {setLocalStorageValue, setLocalStorageArray, returnToolkitIcon, convertToTitleCase} from "@/utils/utils"; -export default function ToolkitWorkspace({toolkitDetails, internalId}) { +export default function ToolkitWorkspace({env, toolkitDetails, internalId}) { const [activeTab, setActiveTab] = useState('configuration') const [showDescription, setShowDescription] = useState(false) const [apiConfigs, setApiConfigs] = useState([]); @@ -25,9 +25,15 @@ export default function ToolkitWorkspace({toolkitDetails, internalId}) { }; function getGoogleToken(client_data) { + var redirect_uri = ""; + if (env == "PROD"){ + redirect_uri = 'https://app.superagi.com/api/google/oauth-tokens'; + } + else { + redirect_uri = "http://localhost:3000/api/google/oauth-tokens"; + } const client_id = client_data.client_id const scope = 'https://www.googleapis.com/auth/calendar'; - const redirect_uri = 'http://localhost:3000/api/google/oauth-tokens'; window.location.href = `https://accounts.google.com/o/oauth2/v2/auth?client_id=${client_id}&redirect_uri=${redirect_uri}&access_type=offline&response_type=code&scope=${scope}`; } diff --git a/gui/pages/Content/Toolkits/Toolkits.js b/gui/pages/Content/Toolkits/Toolkits.js index 6a9d31646..d103f2a2b 100644 --- a/gui/pages/Content/Toolkits/Toolkits.js +++ b/gui/pages/Content/Toolkits/Toolkits.js @@ -3,11 +3,9 @@ import Image from "next/image"; import styles from './Tool.module.css'; import styles1 from '../Agents/Agents.module.css' import 'react-toastify/dist/ReactToastify.css'; -import {createInternalId, returnToolkitIcon} from "@/utils/utils"; +import {createInternalId, returnToolkitIcon, excludedToolkits} from "@/utils/utils"; export default function Toolkits({sendToolkitData, toolkits, env}) { - const excludedToolkits = ["Thinking Toolkit", "Human Input Toolkit", "Resource Toolkit"]; - return ( <>
    @@ -28,7 +26,7 @@ export default function Toolkits({sendToolkitData, toolkits, env}) {
    {toolkits.map((tool, index) => - tool.name !== null && !excludedToolkits.includes(tool.name) && ( + tool.name !== null && !excludedToolkits().includes(tool.name) && (
    sendToolkitData(tool)}>
    diff --git a/gui/pages/Dashboard/Content.js b/gui/pages/Dashboard/Content.js index 8d9270712..4d4a9d924 100644 --- a/gui/pages/Dashboard/Content.js +++ b/gui/pages/Dashboard/Content.js @@ -364,14 +364,14 @@ export default function Content({env, selectedView, selectedProjectId, organisat } {tab.contentType === 'Toolkits' && - } + } {tab.contentType === 'Settings' && } {tab.contentType === 'Marketplace' && } {tab.contentType === 'Add_Toolkit' && } {tab.contentType === 'Create_Agent' && } + fetchAgents={getAgentList} toolkits={toolkits} env={env} />} {tab.contentType === 'APM' && }
    }
    diff --git a/gui/pages/Dashboard/Settings/Settings.js b/gui/pages/Dashboard/Settings/Settings.js index 291feea85..1fe3a88c0 100644 --- a/gui/pages/Dashboard/Settings/Settings.js +++ b/gui/pages/Dashboard/Settings/Settings.js @@ -41,7 +41,7 @@ export default function Settings({organisationId}) { function handleClickOutside(event) { if (sourceRef.current && !sourceRef.current.contains(event.target)) { - setSourceDropdown(false) + setSourceDropdown(false); } } @@ -52,15 +52,14 @@ export default function Settings({organisationId}) { }, [organisationId]); function updateKey(key, value) { - const configData = {"key": key, "value": value}; - updateOrganisationConfig(organisationId, configData) + const configData = { "key": key, "value": value }; + return updateOrganisationConfig(organisationId, configData) .then((response) => { - getKey("model_api_key"); - EventBus.emit("keySet", {}); - toast.success("Settings updated", {autoClose: 1800}); + return response.data; }) .catch((error) => { - console.error('Error fetching project:', error); + console.error('Error updating settings:', error); + throw new Error('Failed to update settings'); }); } @@ -75,19 +74,32 @@ export default function Settings({organisationId}) { const saveSettings = () => { if (modelApiKey === null || modelApiKey.replace(/\s/g, '') === '') { - toast.error("API key is empty", {autoClose: 1800}); - return + toast.error("API key is empty", { autoClose: 1800 }); + return; } validateLLMApiKey(source, modelApiKey) - .then((response) => { - if (response.data.status==="success") { - updateKey("model_api_key", modelApiKey); - updateKey("model_source", source); - } else { - toast.error("Invalid API key", {autoClose: 1800}); - } - }) + .then((response) => { + if (response.data.status === "success") { + Promise.all([ + updateKey("model_api_key", modelApiKey), + updateKey("model_source", source) + ]) + .then(() => { + toast.success("Settings updated", { autoClose: 1800 }); + }) + .catch((error) => { + console.error('Error updating settings:', error); + toast.error("Failed to update settings", { autoClose: 1800 }); + }); + } else { + toast.error("Invalid API key", { autoClose: 1800 }); + } + }) + .catch((error) => { + console.error('Error validating API key:', error); + toast.error("Failed to validate API key", { autoClose: 1800 }); + }); }; const handleTemperatureChange = (event) => { diff --git a/gui/pages/_app.css b/gui/pages/_app.css index 217eaf6c3..1656b217f 100644 --- a/gui/pages/_app.css +++ b/gui/pages/_app.css @@ -415,7 +415,7 @@ input[type="range"]::-moz-range-track { background-color: transparent; } -.custom_select_option { +.custom_select_option, .create_agent_dropdown_options { cursor: pointer; font-size: 12px; color: white; @@ -427,16 +427,29 @@ input[type="range"]::-moz-range-track { text-overflow: ellipsis; } -.custom_select_option:hover { +.custom_select_option:hover, .create_agent_dropdown_options:hover { background: #3B3B49; border-radius: 8px; } -.custom_select_option:active { +.custom_select_option:active, .create_agent_dropdown_options:active { background: #3B3B49; border-radius: 8px; } +.create_agent_dropdown_options{ + background: #3B3B49; + border-radius: 8px; + position: absolute; + top: -40px; + right: 0; + box-shadow: 0 2px 7px rgba(0,0,0,.4), 0 0 2px rgba(0,0,0,.22); + height: 40px; + width: 150px; + padding-top: 10px; + text-align: center; +} + @keyframes scale-in { from { opacity: 0; @@ -983,11 +996,16 @@ tr{ } .tools_used_tooltip{ - position: relative; + position: absolute; cursor: pointer; z-index: 100; } +.image_class{ + background: #FFFFFF80; + border-radius: 20px; +} + .image_class{ background: #FFFFFF80; border-radius: 20px; diff --git a/gui/pages/api/DashboardService.js b/gui/pages/api/DashboardService.js index f78c005da..ab9ef8df1 100644 --- a/gui/pages/api/DashboardService.js +++ b/gui/pages/api/DashboardService.js @@ -209,4 +209,8 @@ export const getActiveRuns = () => { export const getToolsUsage = () => { return api.get(`analytics/tools/used`); +} + +export const getLlmModels = () => { + return api.get(`organisations/llm_models`); } \ No newline at end of file diff --git a/gui/utils/utils.js b/gui/utils/utils.js index e8d05ba92..8f9682da2 100644 --- a/gui/utils/utils.js +++ b/gui/utils/utils.js @@ -37,13 +37,15 @@ export const convertToGMT = (dateTime) => { export const formatTimeDifference = (timeDifference) => { const units = ['years', 'months', 'days', 'hours', 'minutes']; + const singularUnits = ['year', 'month', 'day', 'hour', 'minute']; - for (const unit of units) { + for (let i = 0; i < units.length; i++) { + const unit = units[i]; if (timeDifference[unit] !== 0) { if (unit === 'minutes') { - return `${timeDifference[unit]} minutes ago`; + return `${timeDifference[unit]} ${timeDifference[unit] === 1 ? singularUnits[i] : unit} ago`; } else { - return `${timeDifference[unit]} ${unit} ago`; + return `${timeDifference[unit]} ${timeDifference[unit] === 1 ? singularUnits[i] : unit} ago`; } } } @@ -376,7 +378,10 @@ export const convertToTitleCase = (str) => { const capitalizedWords = words.map((word) => word.charAt(0).toUpperCase() + word.slice(1)); return capitalizedWords.join(' '); }; - export const preventDefault = (e) => { e.stopPropagation(); -}; \ No newline at end of file +}; + +export const excludedToolkits = () => { + return ["Thinking Toolkit", "Human Input Toolkit", "Resource Toolkit"]; +} \ No newline at end of file diff --git a/main.py b/main.py index 33c11eb98..0045c59a4 100644 --- a/main.py +++ b/main.py @@ -126,8 +126,9 @@ class Settings(BaseModel): def create_access_token(email, Authorize: AuthJWT = Depends()): - # expiry_time_hours = get_config("JWT_EXPIRY") - expiry_time_hours = 1 + expiry_time_hours = superagi.config.config.get_config("JWT_EXPIRY") + if type(expiry_time_hours) == str: + expiry_time_hours = int(expiry_time_hours) expires = timedelta(hours=expiry_time_hours) access_token = Authorize.create_access_token(subject=email, expires_time=expires) return access_token diff --git a/migrations/versions/cac478732572_delete_agent_feature.py b/migrations/versions/cac478732572_delete_agent_feature.py index e0c483975..15153145b 100644 --- a/migrations/versions/cac478732572_delete_agent_feature.py +++ b/migrations/versions/cac478732572_delete_agent_feature.py @@ -8,7 +8,6 @@ from alembic import op import sqlalchemy as sa - # revision identifiers, used by Alembic. revision = 'cac478732572' down_revision = 'e39295ec089c' @@ -17,7 +16,8 @@ def upgrade() -> None: - op.add_column('agents', sa.Column('is_deleted', sa.Boolean(), nullable=True)) + op.add_column('agents', sa.Column('is_deleted', sa.Boolean(), nullable=True, server_default=sa.false())) + def downgrade() -> None: op.drop_column('agents', 'is_deleted') diff --git a/superagi/agent/output_parser.py b/superagi/agent/output_parser.py index bd3747580..892c8cde0 100644 --- a/superagi/agent/output_parser.py +++ b/superagi/agent/output_parser.py @@ -42,4 +42,3 @@ def parse(self, response: str) -> AgentGPTAction: ) except BaseException as e: logger.info(f"AgentSchemaOutputParser: Error parsing JSON respons {e}") - return {} diff --git a/superagi/agent/super_agi.py b/superagi/agent/super_agi.py index cbcda55ca..5f324bea3 100644 --- a/superagi/agent/super_agi.py +++ b/superagi/agent/super_agi.py @@ -136,7 +136,10 @@ def execute(self, workflow_step: AgentWorkflowStep): total_tokens = current_tokens + TokenCounter.count_message_tokens(response, self.llm.get_model()) self.update_agent_execution_tokens(current_calls, total_tokens, session) - + + if 'error' in response and response['error'] == "RATE_LIMIT_EXCEEDED": + return {"result": "RATE_LIMIT_EXCEEDED", "retry": True} + if 'content' not in response or response['content'] is None: raise RuntimeError(f"Failed to get response from llm") assistant_reply = response['content'] @@ -210,18 +213,18 @@ def execute(self, workflow_step: AgentWorkflowStep): def handle_tool_response(self, session, assistant_reply): action = self.output_parser.parse(assistant_reply) tools = {t.name.lower().replace(" ", ""): t for t in self.tools} - action_name = action.name.lower().replace(" ", "") + action_name = action.name.lower().replace(" ", "") if action is not None else "" agent = session.query(Agent).filter(Agent.id == self.agent_config["agent_id"],).first() organisation = agent.get_agent_organisation(session) - if action_name == FINISH or action.name == "": + if action_name == FINISH or action_name == "": logger.info("\nTask Finished :) \n") output = {"result": "COMPLETE", "retry": False} - EventHandler(session=session).create_event('tool_used', {'tool_name':action.name}, self.agent_config["agent_id"], organisation.id), + EventHandler(session=session).create_event('tool_used', {'tool_name':action_name}, self.agent_config["agent_id"], organisation.id), return output if action_name in tools: tool = tools[action_name] retry = False - EventHandler(session=session).create_event('tool_used', {'tool_name':action.name}, self.agent_config["agent_id"], organisation.id), + EventHandler(session=session).create_event('tool_used', {'tool_name':action_name}, self.agent_config["agent_id"], organisation.id), try: parsed_args = self.clean_tool_args(action.args) observation = tool.execute(parsed_args) @@ -237,12 +240,12 @@ def handle_tool_response(self, session, assistant_reply): ) result = f"Tool {tool.name} returned: {observation}" output = {"result": result, "retry": retry} - elif action.name == "ERROR": + elif action_name == "ERROR": result = f"Error2: {action.args}. " output = {"result": result, "retry": False} else: result = ( - f"Unknown tool '{action.name}'. " + f"Unknown tool '{action_name}'. " f"Please refer to the 'TOOLS' list for available " f"tools and only respond in the specified JSON format." ) @@ -298,7 +301,8 @@ def check_permission_in_restricted_mode(self, assistant_reply: str, session): excluded_tools = [FINISH, '', None] - if self.agent_config["permission_type"].upper() == "RESTRICTED" and action.name not in excluded_tools and \ + if self.agent_config["permission_type"].upper() == "RESTRICTED" and action is not None and \ + action.name not in excluded_tools and \ tools.get(action.name) and tools[action.name].permission_required: new_agent_execution_permission = AgentExecutionPermission( agent_execution_id=self.agent_config["agent_execution_id"], diff --git a/superagi/controllers/agent.py b/superagi/controllers/agent.py index 0b7169330..b287b05bc 100644 --- a/superagi/controllers/agent.py +++ b/superagi/controllers/agent.py @@ -9,7 +9,7 @@ from jsonmerge import merge from pytz import timezone -from sqlalchemy import func +from sqlalchemy import func, or_ from superagi.models.agent_execution_permission import AgentExecutionPermission from superagi.worker import execute_agent from superagi.helper.auth import check_auth @@ -109,11 +109,9 @@ def get_agent(agent_id: int, HTTPException (Status Code=404): If the Agent is not found or deleted. """ - if ( - db_agent := db.session.query(Agent) - .filter(Agent.id == agent_id, Agent.is_deleted == False) - .first() - ): + if (db_agent := db.session.query(Agent) + .filter(Agent.id == agent_id, or_(Agent.is_deleted == False, Agent.is_deleted is None)) + .first()): return db_agent else: raise HTTPException(status_code=404, detail="agent not found") @@ -141,7 +139,7 @@ def update_agent(agent_id: int, agent: AgentIn, HTTPException (Status Code=404): If the Agent or associated Project is not found. """ - db_agent = db.session.query(Agent).filter(Agent.id == agent_id, Agent.is_deleted == False).first() + db_agent = db.session.query(Agent).filter(Agent.id == agent_id, or_(Agent.is_deleted == False, Agent.is_deleted is None)).first() if not db_agent: raise HTTPException(status_code=404, detail="agent not found") @@ -190,10 +188,11 @@ def create_agent_with_config(agent_with_config: AgentConfigInput, project = db.session.query(Project).get(agent_with_config.project_id) if not project: raise HTTPException(status_code=404, detail="Project not found") - + invalid_tools = Tool.get_invalid_tools(agent_with_config.tools, db.session) if len(invalid_tools) > 0: # If the returned value is not True (then it is an invalid tool_id) - raise HTTPException(status_code=404, detail=f"Tool with IDs {str(invalid_tools)} does not exist. 404 Not Found.") + raise HTTPException(status_code=404, + detail=f"Tool with IDs {str(invalid_tools)} does not exist. 404 Not Found.") agent_toolkit_tools = Toolkit.fetch_tool_ids_from_toolkit(session=db.session, toolkit_ids=agent_with_config.toolkits) @@ -215,10 +214,14 @@ def create_agent_with_config(agent_with_config: AgentConfigInput, AgentExecutionConfiguration.add_or_update_agent_execution_config(session=db.session, execution=execution, agent_execution_configs=agent_execution_configs) - agent = db.session.query(Agent).filter(Agent.id == db_agent.id,).first() + agent = db.session.query(Agent).filter(Agent.id == db_agent.id, ).first() organisation = agent.get_agent_organisation(db.session) - EventHandler(session=db.session).create_event('run_created', {'agent_execution_id': execution.id,'agent_execution_name':execution.name}, db_agent.id, organisation.id if organisation else 0), - EventHandler(session=db.session).create_event('agent_created', {'agent_name': agent_with_config.name, 'model': agent_with_config.model}, db_agent.id, organisation.id if organisation else 0) + EventHandler(session=db.session).create_event('run_created', {'agent_execution_id': execution.id, + 'agent_execution_name': execution.name}, db_agent.id, + organisation.id if organisation else 0), + EventHandler(session=db.session).create_event('agent_created', {'agent_name': agent_with_config.name, + 'model': agent_with_config.model}, db_agent.id, + organisation.id if organisation else 0) # execute_agent.delay(execution.id, datetime.now()) @@ -231,6 +234,7 @@ def create_agent_with_config(agent_with_config: AgentConfigInput, "contentType": "Agents" } + @router.post("/schedule", status_code=201) def create_and_schedule_agent(agent_config_schedule: AgentConfigSchedule, Authorize: AuthJWT = Depends(check_auth)): @@ -253,7 +257,8 @@ def create_and_schedule_agent(agent_config_schedule: AgentConfigSchedule, agent_config = agent_config_schedule.agent_config invalid_tools = Tool.get_invalid_tools(agent_config.tools, db.session) if len(invalid_tools) > 0: # If the returned value is not True (then it is an invalid tool_id) - raise HTTPException(status_code=404, detail=f"Tool with IDs {str(invalid_tools)} does not exist. 404 Not Found.") + raise HTTPException(status_code=404, + detail=f"Tool with IDs {str(invalid_tools)} does not exist. 404 Not Found.") agent_toolkit_tools = Toolkit.fetch_tool_ids_from_toolkit(session=db.session, toolkit_ids=agent_config.toolkits) @@ -289,6 +294,7 @@ def create_and_schedule_agent(agent_config_schedule: AgentConfigSchedule, "schedule_id": agent_schedule.id } + @router.post("/stop/schedule", status_code=200) def stop_schedule(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): """ @@ -303,7 +309,7 @@ def stop_schedule(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): """ agent_to_delete = db.session.query(AgentSchedule).filter(AgentSchedule.agent_id == agent_id, - AgentSchedule.status == "SCHEDULED").first() + AgentSchedule.status == "SCHEDULED").first() if not agent_to_delete: raise HTTPException(status_code=404, detail="Schedule not found") agent_to_delete.status = "STOPPED" @@ -326,7 +332,7 @@ def edit_schedule(schedule: AgentScheduleInput, """ agent_to_edit = db.session.query(AgentSchedule).filter(AgentSchedule.agent_id == schedule.agent_id, - AgentSchedule.status == "SCHEDULED").first() + AgentSchedule.status == "SCHEDULED").first() if not agent_to_edit: raise HTTPException(status_code=404, detail="Schedule not found") @@ -358,7 +364,7 @@ def get_schedule_data(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): expiry_runs (Integer): The number of runs before the agent expires. """ agent = db.session.query(AgentSchedule).filter(AgentSchedule.agent_id == agent_id, - AgentSchedule.status == "SCHEDULED").first() + AgentSchedule.status == "SCHEDULED").first() if not agent: raise HTTPException(status_code=404, detail="Agent Schedule not found") @@ -371,7 +377,6 @@ def get_schedule_data(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): else: tzone = timezone('GMT') - current_datetime = datetime.now(tzone).strftime("%d/%m/%Y %I:%M %p") return { @@ -406,7 +411,7 @@ def get_agents_by_project_id(project_id: int, if not project: raise HTTPException(status_code=404, detail="Project not found") - agents = db.session.query(Agent).filter(Agent.project_id == project_id, Agent.is_deleted == False).all() + agents = db.session.query(Agent).filter(Agent.project_id == project_id, or_(Agent.is_deleted == False, Agent.is_deleted is None)).all() new_agents, new_agents_sorted = [], [] for agent in agents: @@ -423,7 +428,7 @@ def get_agents_by_project_id(project_id: int, break # Check if the agent is scheduled is_scheduled = db.session.query(AgentSchedule).filter_by(agent_id=agent_id, - status="SCHEDULED").first() is not None + status="SCHEDULED").first() is not None new_agent = { **agent_dict, @@ -454,7 +459,7 @@ def get_agent_configuration(agent_id: int, # Define the agent_config keys to fetch keys_to_fetch = AgentTemplate.main_keys() - agent = db.session.query(Agent).filter(agent_id == Agent.id, Agent.is_deleted == False).first() + agent = db.session.query(Agent).filter(agent_id == Agent.id,or_(Agent.is_deleted == False, Agent.is_deleted is None)).first() if not agent: raise HTTPException(status_code=404, detail="Agent not found") @@ -485,7 +490,8 @@ def get_agent_configuration(agent_id: int, return response -@router.put("/delete/{agent_id}", status_code = 200) + +@router.put("/delete/{agent_id}", status_code=200) def delete_agent(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): """ Delete an existing Agent @@ -502,18 +508,18 @@ def delete_agent(agent_id: int, Authorize: AuthJWT = Depends(check_auth)): Raises: HTTPException (Status Code=404): If the Agent or associated Project is not found or deleted already. """ - + db_agent = db.session.query(Agent).filter(Agent.id == agent_id).first() - db_agent_executions = db.session.query(AgentExecution).filter(AgentExecution.agent_id == agent_id).all() - + db_agent_executions = db.session.query(AgentExecution).filter(AgentExecution.agent_id == agent_id).all() + if not db_agent or db_agent.is_deleted: raise HTTPException(status_code=404, detail="agent not found") - + # Deletion Procedure db_agent.is_deleted = True if db_agent_executions: # Updating all the RUNNING executions to TERMINATED for db_agent_execution in db_agent_executions: db_agent_execution.status = "TERMINATED" - + db.session.commit() diff --git a/superagi/controllers/google_oauth.py b/superagi/controllers/google_oauth.py index 4eecd6ba3..1b3b99221 100644 --- a/superagi/controllers/google_oauth.py +++ b/superagi/controllers/google_oauth.py @@ -11,10 +11,11 @@ from datetime import datetime, timedelta from superagi.models.db import connect_db import http.client as http_client -from superagi.helper.auth import get_current_user +from superagi.helper.auth import get_current_user, check_auth from superagi.models.tool_config import ToolConfig from superagi.models.toolkit import Toolkit from superagi.models.oauth_tokens import OauthTokens +from superagi.config.config import get_config router = APIRouter() @@ -26,10 +27,15 @@ async def google_auth_calendar(code: str = Query(...), Authorize: AuthJWT = Depe client_secret = client_secret.value token_uri = 'https://oauth2.googleapis.com/token' scope = 'https://www.googleapis.com/auth/calendar' + env = get_config("ENV", "DEV") + if env == "DEV": + redirect_uri = "http://localhost:3000/api/google/oauth-tokens" + else: + redirect_uri = "https://app.superagi.com/api/google/oauth-tokens" params = { 'client_id': client_id, 'client_secret': client_secret, - 'redirect_uri': "http://localhost:3000/api/google/oauth-tokens", + 'redirect_uri': redirect_uri, 'scope': scope, 'grant_type': 'authorization_code', 'code': code, @@ -46,11 +52,11 @@ async def google_auth_calendar(code: str = Query(...), Authorize: AuthJWT = Depe return RedirectResponse(url=redirect_url_success) @router.post("/send_google_creds/toolkit_id/{toolkit_id}") -def send_google_calendar_configs(google_creds: dict, toolkit_id: int, Authorize: AuthJWT = Depends()): +def send_google_calendar_configs(google_creds: dict, toolkit_id: int, Authorize: AuthJWT = Depends(check_auth)): engine = connect_db() Session = sessionmaker(bind=engine) session = Session() - current_user = get_current_user() + current_user = get_current_user(Authorize) user_id = current_user.id toolkit = db.session.query(Toolkit).filter(Toolkit.id == toolkit_id).first() google_creds = json.dumps(google_creds) diff --git a/superagi/controllers/organisation.py b/superagi/controllers/organisation.py index dae46a532..39403d56d 100644 --- a/superagi/controllers/organisation.py +++ b/superagi/controllers/organisation.py @@ -6,8 +6,13 @@ from fastapi_sqlalchemy import db from pydantic import BaseModel +from superagi.helper.auth import get_user_organisation from superagi.helper.auth import check_auth +from superagi.helper.encyption_helper import decrypt_data from superagi.helper.tool_helper import register_toolkits +from superagi.llms.google_palm import GooglePalm +from superagi.llms.openai import OpenAi +from superagi.models.configuration import Configuration from superagi.models.organisation import Organisation from superagi.models.project import Project from superagi.models.user import User @@ -35,6 +40,7 @@ class OrganisationIn(BaseModel): class Config: orm_mode = True + # CRUD Operations @router.post("/add", response_model=OrganisationOut, status_code=201) def create_organisation(organisation: OrganisationIn, @@ -141,3 +147,31 @@ def get_organisations_by_user(user_id: int): organisation = Organisation.find_or_create_organisation(db.session, user) Project.find_or_create_default_project(db.session, organisation.id) return organisation + + +@router.get("/llm_models") +def get_llm_models(organisation=Depends(get_user_organisation)): + """ + Get all the llm models associated with an organisation. + + Args: + organisation: Organisation data. + """ + + model_api_key = db.session.query(Configuration).filter(Configuration.organisation_id == organisation.id, + Configuration.key == "model_api_key").first() + model_source = db.session.query(Configuration).filter(Configuration.organisation_id == organisation.id, + Configuration.key == "model_source").first() + + if model_api_key is None or model_source is None: + raise HTTPException(status_code=400, + detail="Organisation not found") + + decrypted_api_key = decrypt_data(model_api_key.value) + models = [] + if model_source.value == "OpenAi": + models = OpenAi(api_key=decrypted_api_key).get_models() + elif model_source.value == "Google Palm": + models = GooglePalm(api_key=decrypted_api_key).get_models() + + return models diff --git a/superagi/controllers/twitter_oauth.py b/superagi/controllers/twitter_oauth.py index 6bdfa3761..131c57ccb 100644 --- a/superagi/controllers/twitter_oauth.py +++ b/superagi/controllers/twitter_oauth.py @@ -1,26 +1,23 @@ -from fastapi import Depends, Query +import http.client as http_client +import json + from fastapi import APIRouter +from fastapi import Depends, Query from fastapi.responses import RedirectResponse from fastapi_jwt_auth import AuthJWT from fastapi_sqlalchemy import db -from sqlalchemy.orm import sessionmaker import superagi -import json -from superagi.models.db import connect_db -import http.client as http_client +from superagi.helper.auth import get_current_user, check_auth from superagi.helper.twitter_tokens import TwitterTokens -from superagi.helper.auth import get_current_user +from superagi.models.oauth_tokens import OauthTokens from superagi.models.tool_config import ToolConfig from superagi.models.toolkit import Toolkit -from superagi.models.oauth_tokens import OauthTokens router = APIRouter() @router.get('/oauth-tokens') async def twitter_oauth(oauth_token: str = Query(...),oauth_verifier: str = Query(...), Authorize: AuthJWT = Depends()): - print("///////////////////////////") - print(oauth_token) token_uri = f'https://api.twitter.com/oauth/access_token?oauth_verifier={oauth_verifier}&oauth_token={oauth_token}' conn = http_client.HTTPSConnection("api.twitter.com") conn.request("POST", token_uri, "") @@ -31,8 +28,8 @@ async def twitter_oauth(oauth_token: str = Query(...),oauth_verifier: str = Quer return RedirectResponse(url=redirect_url_success) @router.post("/send_twitter_creds/{twitter_creds}") -def send_twitter_tool_configs(twitter_creds: str, Authorize: AuthJWT = Depends()): - current_user = get_current_user() +def send_twitter_tool_configs(twitter_creds: str, Authorize: AuthJWT = Depends(check_auth)): + current_user = get_current_user(Authorize) user_id = current_user.id credentials = json.loads(twitter_creds) credentials["user_id"] = user_id diff --git a/superagi/helper/twitter_tokens.py b/superagi/helper/twitter_tokens.py index 10b36d59a..3cd8dc031 100644 --- a/superagi/helper/twitter_tokens.py +++ b/superagi/helper/twitter_tokens.py @@ -9,6 +9,7 @@ from sqlalchemy.orm import Session from superagi.models.toolkit import Toolkit from superagi.models.oauth_tokens import OauthTokens +from superagi.config.config import get_config class Creds: @@ -29,8 +30,13 @@ def get_request_token(self,api_data): http_method = 'POST' base_url = 'https://api.twitter.com/oauth/request_token' + env = get_config("ENV", "DEV") + if env == "DEV": + oauth_callback = "http://localhost:3000/api/twitter/oauth-tokens" + else: + oauth_callback = "https://app.superagi.com/api/twitter/oauth-tokens" params = { - 'oauth_callback': 'http://localhost:3000/api/twitter/oauth-tokens', + 'oauth_callback': oauth_callback, 'oauth_consumer_key': api_key, 'oauth_nonce': self.gen_nonce(), 'oauth_signature_method': 'HMAC-SHA1', diff --git a/superagi/jobs/agent_executor.py b/superagi/jobs/agent_executor.py index cd8f0f520..3c2aad9a7 100644 --- a/superagi/jobs/agent_executor.py +++ b/superagi/jobs/agent_executor.py @@ -263,7 +263,11 @@ def execute_next_action(self, agent_execution_id): return if "retry" in response and response["retry"]: - superagi.worker.execute_agent.apply_async((agent_execution_id, datetime.now()), countdown=15) + if "result" in response and response["result"] == "RATE_LIMIT_EXCEEDED": + superagi.worker.execute_agent.apply_async((agent_execution_id, datetime.now()), countdown=60) + else: + superagi.worker.execute_agent.apply_async((agent_execution_id, datetime.now()), countdown=15) + session.close() return diff --git a/superagi/llms/base_llm.py b/superagi/llms/base_llm.py index 12b9eb452..b5d068035 100644 --- a/superagi/llms/base_llm.py +++ b/superagi/llms/base_llm.py @@ -18,6 +18,10 @@ def get_api_key(self): def get_model(self): pass + @abstractmethod + def get_models(self): + pass + @abstractmethod def verify_access_key(self): pass diff --git a/superagi/llms/google_palm.py b/superagi/llms/google_palm.py index 707a1579e..40e25d82f 100644 --- a/superagi/llms/google_palm.py +++ b/superagi/llms/google_palm.py @@ -90,3 +90,17 @@ def verify_access_key(self): except Exception as exception: logger.info("Google palm Exception:", exception) return False + + def get_models(self): + """ + Get the models. + + Returns: + list: The models. + """ + try: + models_supported = ["chat-bison-001"] + return models_supported + except Exception as exception: + logger.info("Google palm Exception:", exception) + return [] diff --git a/superagi/llms/llm_model_factory.py b/superagi/llms/llm_model_factory.py index b13f46067..3ec7070f8 100644 --- a/superagi/llms/llm_model_factory.py +++ b/superagi/llms/llm_model_factory.py @@ -22,6 +22,7 @@ def get_model(self, model, **kwargs): factory.register_format("gpt-3.5-turbo-16k", lambda **kwargs: OpenAi(model="gpt-3.5-turbo-16k", **kwargs)) factory.register_format("gpt-3.5-turbo", lambda **kwargs: OpenAi(model="gpt-3.5-turbo", **kwargs)) factory.register_format("google-palm-bison-001", lambda **kwargs: GooglePalm(model='models/chat-bison-001', **kwargs)) +factory.register_format("chat-bison-001", lambda **kwargs: GooglePalm(model='models/chat-bison-001', **kwargs)) def get_model(api_key, model="gpt-3.5-turbo", **kwargs): diff --git a/superagi/llms/openai.py b/superagi/llms/openai.py index a25c0c847..4822ca1e5 100644 --- a/superagi/llms/openai.py +++ b/superagi/llms/openai.py @@ -101,3 +101,21 @@ def verify_access_key(self): except Exception as exception: logger.info("OpenAi Exception:", exception) return False + + def get_models(self): + """ + Get the models. + + Returns: + list: The models. + """ + try: + models = openai.Model.list() + models = [model["id"] for model in models["data"]] + models_supported = ['gpt-4', 'gpt-3.5-turbo', 'gpt-3.5-turbo-16k', 'gpt-4-32k'] + print("CHECK THIS1", models) + models = [model for model in models if model in models_supported] + return models + except Exception as exception: + logger.info("OpenAi Exception:", exception) + return [] diff --git a/superagi/models/db.py b/superagi/models/db.py index d49b5793e..e711280ff 100644 --- a/superagi/models/db.py +++ b/superagi/models/db.py @@ -29,7 +29,13 @@ def connect_db(): db_url = f'postgresql://{db_username}:{db_password}@{database_url}/{db_name}' # Create the SQLAlchemy engine - engine = create_engine(db_url) + engine = create_engine(db_url, + pool_size=20, # Maximum number of database connections in the pool + max_overflow=50, # Maximum number of connections that can be created beyond the pool_size + pool_timeout=30, # Timeout value in seconds for acquiring a connection from the pool + pool_recycle=1800, # Recycle connections after this number of seconds (optional) + pool_pre_ping=False, # Enable connection health checks (optional) + ) # Test the connection try: diff --git a/superagi/resource_manager/file_manager.py b/superagi/resource_manager/file_manager.py index 7379f7585..4c20ba16d 100644 --- a/superagi/resource_manager/file_manager.py +++ b/superagi/resource_manager/file_manager.py @@ -75,10 +75,9 @@ def write_csv_file(self, file_name: str, csv_data): else: final_path = ResourceHelper.get_resource_path(file_name) try: - with open(final_path, mode="w") as file: + with open(final_path, mode="w", newline="") as file: writer = csv.writer(file, lineterminator="\n") - for row in csv_data: - writer.writerows(row) + writer.writerows(csv_data) self.write_to_s3(file_name, final_path) logger.info(f"{file_name} - File written successfully") return f"{file_name} - File written successfully" diff --git a/superagi/resource_manager/llama_document_summary.py b/superagi/resource_manager/llama_document_summary.py index 8f83eb0ca..5eca38913 100644 --- a/superagi/resource_manager/llama_document_summary.py +++ b/superagi/resource_manager/llama_document_summary.py @@ -22,6 +22,8 @@ def generate_summary_of_document(self, documents: list[Document]): :param documents: list of Document objects :return: summary of the documents """ + if documents is None or not documents: + return from llama_index import LLMPredictor, ServiceContext, ResponseSynthesizer, DocumentSummaryIndex os.environ["OPENAI_API_KEY"] = get_config("OPENAI_API_KEY", "") or self.model_api_key llm_predictor_chatgpt = LLMPredictor(llm=self._build_llm()) diff --git a/superagi/resource_manager/resource_manager.py b/superagi/resource_manager/resource_manager.py index 2004d3704..3a417cc7f 100644 --- a/superagi/resource_manager/resource_manager.py +++ b/superagi/resource_manager/resource_manager.py @@ -31,9 +31,9 @@ def create_llama_document(self, file_path: str): """ if file_path is None: raise Exception("file_path must be provided") - documents = SimpleDirectoryReader(input_files=[file_path]).load_data() - - return documents + if os.path.exists(file_path): + documents = SimpleDirectoryReader(input_files=[file_path]).load_data() + return documents def create_llama_document_s3(self, file_path: str): """ @@ -44,6 +44,7 @@ def create_llama_document_s3(self, file_path: str): """ if file_path is None: raise Exception("file_path must be provided") + temporary_file_path = "" try: import boto3 s3 = boto3.client( @@ -61,12 +62,12 @@ def create_llama_document_s3(self, file_path: str): f.write(contents) documents = SimpleDirectoryReader(input_files=[temporary_file_path]).load_data() + return documents except Exception as e: logger.error("superagi/resource_manager/resource_manager.py - create_llama_document_s3 threw : ", e) finally: if os.path.exists(temporary_file_path): os.remove(temporary_file_path) - return documents def save_document_to_vector_store(self, documents: list, resource_id: str, mode_api_key: str = None, model_source: str = ""): diff --git a/superagi/resource_manager/resource_summary.py b/superagi/resource_manager/resource_summary.py index 58084b28d..fb76d3fb0 100644 --- a/superagi/resource_manager/resource_summary.py +++ b/superagi/resource_manager/resource_summary.py @@ -28,7 +28,7 @@ def add_to_vector_store_and_create_summary(self, agent_id: int, resource_id: int agent = self.session.query(Agent).filter(Agent.id == agent_id).first() organization = agent.get_agent_organisation(self.session) model_api_key = Configuration.fetch_configuration(self.session, organization.id, "model_api_key") - model_source = Configuration.fetch_configuration(self.session, organization.id, "model_source") + model_source = Configuration.fetch_configuration(self.session, organization.id, "model_source") or "OpenAi" try: ResourceManager(str(agent_id)).save_document_to_vector_store(documents, str(resource_id), model_api_key, model_source) except Exception as e: @@ -67,7 +67,8 @@ def generate_agent_summary(self, agent_id: int, generate_all: bool = False) -> s documents = ResourceManager(str(agent_id)).create_llama_document_s3(file_path) else: documents = ResourceManager(str(agent_id)).create_llama_document(file_path) - summary_texts.append(LlamaDocumentSummary(model_api_key=model_api_key, model_source=model_source).generate_summary_of_document(documents)) + if documents is not None and len(documents) > 0: + summary_texts.append(LlamaDocumentSummary(model_api_key=model_api_key, model_source=model_source).generate_summary_of_document(documents)) agent_last_resource = self.session.query(AgentConfiguration). \ filter(AgentConfiguration.agent_id == agent_id, diff --git a/superagi/tools/google_serp_search/README.MD b/superagi/tools/google_serp_search/README.md similarity index 85% rename from superagi/tools/google_serp_search/README.MD rename to superagi/tools/google_serp_search/README.md index 2ec1bcc08..efe049054 100644 --- a/superagi/tools/google_serp_search/README.MD +++ b/superagi/tools/google_serp_search/README.md @@ -18,14 +18,14 @@ Set up the SuperAGI by following the instructions given (https://github.com/Tran ### 🔧 **Add Google Serp Search API Key in SuperAGI Dashboard** -1. Register an account verifying your email and phone number. +1. Register an account at [https://serper.dev/](https://serper.dev/) with your Email ID. 2. Your Private API Key would be made. Copy that and save it in a separate text file. -![Screenshot-google serp](https://github.com/TransformerOptimus/SuperAGI/assets/43145646/7f20e9ae-3a25-49cd-aa72-b96f7e6ae305) +![Serper_Key](https://github.com/Phoenix2809/SuperAGI/assets/133874957/dfe70b4f-11e2-483b-aa33-07b15150103d) -3. Open up the Google SERP Toolkit page in SuperAGI's Dashboard and paste your Private API Key. +3. Open up the Google SERP Toolkit page in SuperAGI's Dashboard and paste your Private API Key. ## Running SuperAGI Google Search Serp Tool