Skip to content

Commit

Permalink
Merge pull request #68 from dev-jpnobrega/input-message-context
Browse files Browse the repository at this point in the history
Input message context and others agent configs
  • Loading branch information
dev-jpnobrega authored Aug 24, 2024
2 parents dd3b3db + eeb178f commit 430236e
Show file tree
Hide file tree
Showing 8 changed files with 85 additions and 72 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "ai-agent-enterprise",
"description": "AI Agent simplifies the implementation and use of generative AI with LangChain",
"version": "0.0.43",
"version": "0.0.44",
"main": "./build/index.js",
"types": "./build/index.d.ts",
"files": [
Expand Down
7 changes: 4 additions & 3 deletions src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@ class Agent extends AgentBaseCommand implements IAgent {
}

async call(args: IInputProps): Promise<void> {
const { question, chatThreadID } = args;
const { question, chatThreadID, context } = args;

try {
const chatHistory = await this.buildHistory(
Expand All @@ -123,7 +123,7 @@ class Agent extends AgentBaseCommand implements IAgent {
const chain = await this._chainService.build(
this._llm,
question,
chatHistory.getBufferMemory(),
chatHistory.getBufferMemory(),
);

const chatMessages = await chatHistory.getMessages();
Expand All @@ -132,8 +132,9 @@ class Agent extends AgentBaseCommand implements IAgent {
referencies: referenciesDocs,
relevant_docs: relevantDocs,
input_documents: [],
query: question,
query: question,
question: question,
user_context: context,
chat_history: chatMessages,
format_chat_messages: chatHistory.getFormatedMessages(chatMessages),
user_prompt: this._settings.systemMesssage,
Expand Down
8 changes: 5 additions & 3 deletions src/interface/agent.interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ export interface IDatabaseConfig {
database?: string | number;
container?: string;
synchronize?: boolean;
limit?: number;
limit?: number;
}

export interface IDataSourceConfig {
Expand All @@ -38,6 +38,7 @@ export interface IDataSourceConfig {
ignoreTables?: string[];
customizeSystemMessage?: string;
ssl?: boolean;
maxResult?: number;
}

export interface IOpenAPIConfig {
Expand All @@ -52,15 +53,15 @@ export interface IChatConfig {
topP?: number;
frequencyPenalty?: number;
presencePenalty?: number;
maxTokens?: number;
maxTokens?: number;
}

export interface ILLMConfig {
type: LLM_TYPE;
model: string;
instance?: string;
apiKey: string;
apiVersion: string;
apiVersion: string;
secretAccessKey?: string;
sessionToken?: string;
region?: string;
Expand Down Expand Up @@ -93,6 +94,7 @@ export interface IInputProps {
question?: string;
userSessionId?: string;
chatThreadID?: string;
context?: string;
}

export interface TModel extends Record<string, unknown> {}
Expand Down
11 changes: 7 additions & 4 deletions src/services/chain/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,23 +59,25 @@ class ChainService {

builtMessage += '\n';
builtMessage += `
Given the user prompt and conversation log, the document context, the API output, and the following database output, formulate a response from a knowledge base.\n
Given the user context, question and conversation log, the document context, the API output, and the following database output, formulate a response from a knowledge base.\n
You must follow the following rules and priorities when generating and responding:\n
- Always prioritize user prompt over conversation record.\n
- Ignore any conversation logs that are not directly related to the user prompt.\n
- Ignore any conversation logs that are not directly related to the user question.\n
- Only try to answer if a question is asked.\n
- The question must be a single sentence.\n
- You must remove any punctuation from the question.\n
- You must remove any words that are not relevant to the question.\n
- If you are unable to formulate a answer, respond in a friendly manner so the user can rephrase the question.\n\n
USER PROMPT: {user_prompt}\n
USER CONTEXT:\n
context: {user_context}\n
question: {user_prompt}\n
--------------------------------------
CHAT HISTORY: {format_chat_messages}\n
--------------------------------------
Context found in documents: {relevant_docs}\n
--------------------------------------
Name of reference files: {referencies}\n
Name of reference files: {referencies}\n
`;

if (this._isSQLChainEnabled) {
Expand Down Expand Up @@ -150,6 +152,7 @@ class ChainService {
'chat_history',
'format_chat_messages',
'user_prompt',
'user_context'
],
verbose: this._settings.debug || false,
memory: memoryChat,
Expand Down
5 changes: 3 additions & 2 deletions src/services/chain/sql-chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,13 +47,14 @@ class SqlChain implements IChain {

const chainSQL = new SqlDatabaseChain({
llm,
database,
database,
outputKey: 'sqlResult',
sqlOutputKey: 'sqlQuery',
prompt: new PromptTemplate({
inputVariables: ['question', 'response', 'schema', 'query', 'chat_history'],
inputVariables: ['question', 'response', 'schema', 'query', 'chat_history', 'user_context'],
template: systemTemplate,
}),
topK: this._settings.maxResult,
}, this._settings?.customizeSystemMessage);

return chainSQL;
Expand Down
15 changes: 9 additions & 6 deletions src/services/chain/sql-database-chain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,16 +58,14 @@ export default class SqlDatabaseChain extends BaseChain {
prompt = DEFAULT_SQL_DATABASE_PROMPT;

// Number of results to return from the query
topK = 5;
topK = 5;

inputKey = 'query';

outputKey = 'result';

customMessage = '';

maxDataExamples = 10; // TODO add config in agent settings

sqlOutputKey: string | undefined = undefined;

// Whether to return the result of querying the SQL table directly.
Expand All @@ -85,6 +83,7 @@ export default class SqlDatabaseChain extends BaseChain {
this.customMessage = customMessage || '';
}

//TODO: rever se vai ser necessário colocar o context aqui tbm
getSQLPrompt(): string {
return `
Based on the SQL table schema provided below, write an SQL query that answers the user's question.\n
Expand All @@ -93,14 +92,17 @@ export default class SqlDatabaseChain extends BaseChain {
-------------------------------------------\n
Here are some important observations for generating the query:\n
- Only execute the request on the service if the question is not in CHAT HISTORY, if the question has already been answered, use the same answer and do not make a query on the database.\n
{user_prompt}\n
USER CONTEXT:\n
{user_prompt}\n
{user_context}\n
-------------------------------------------\n
SCHEMA: {schema}\n
-------------------------------------------\n
CHAT HISTORY: {format_chat_messages}\n
-------------------------------------------\n
QUESTION: {question}\n
------------------------------------------\n
------------------------------------------\n
SQL QUERY:
`;
}
Expand Down Expand Up @@ -139,7 +141,7 @@ export default class SqlDatabaseChain extends BaseChain {
const data = JSON.parse(countResult);
const result = parseInt(data[0]?.resultcount, 10);

if (result >= this.maxDataExamples) {
if (result >= this.topK) {
throw new Error(MESSAGES_ERRORS.dataTooBig);
}

Expand Down Expand Up @@ -177,6 +179,7 @@ export default class SqlDatabaseChain extends BaseChain {
chat_history: () => values?.chat_history,
format_chat_messages: () => values?.format_chat_messages,
user_prompt: () => this.customMessage,
user_context: () => values?.user_context,
},
this.buildPromptTemplate(this.getSQLPrompt()),
this.llm.bind({ stop: ['\nSQLResult:'] }),
Expand Down
107 changes: 55 additions & 52 deletions tests/agent.test.ts
Original file line number Diff line number Diff line change
@@ -1,52 +1,55 @@
import { describe, it } from 'node:test';
import { strict as assert } from 'node:assert';

import Agent from'../src/agent';

describe('Agent test', () => {
it('should Agent instance', (t, done) => {
const agent = new Agent({
llmConfig: {
apiKey: '123',
apiVersion: '1.0',
type: 'azure',
instance: 'test',
model: 'test',
},
chatConfig: {
temperature: 0.5,
}
});
assert.ok(agent instanceof Agent);

done();
});


it('should Agent call', { skip: true }, (t, done) => {
const agent = new Agent({
name: 'Agent test Support',
llmConfig: {
type: 'azure',
model: process.env.OPENAI_API_DEPLOYMENT_NAME || 'test',
instance: process.env.OPENAI_API_INSTANCE_NAME || 'test',
apiKey: process.env.OPENAI_API_KEY || 'test',
apiVersion: process.env.OPENAI_API_VERSION || 'test',
},
chatConfig: {
temperature: 0,
},
vectorStoreConfig: {
apiKey: process.env.SEARCH_API_KEY || 'test',
apiVersion: process.env.SEARCH_API_VERSION || 'test',
name: process.env.SEARCH_NAME || 'test',
type: 'azure',
vectorFieldName: 'embedding',
indexes: [
'index-gdp'
],
model: process.env.AZURE_SEARCH_MODEL || 'test'
},
});
});
});
// import { describe, it } from 'node:test';
// import { strict as assert } from 'node:assert';

// import Agent from'../src/agent';
// import { DataSource } from 'typeorm';
// import { IAgentConfig } from '../src/interface/agent.interface';


// const agentConfig = require(`./agent-configs/gsp-testcase-agent.json`);

// describe('Agent test', () => {
// it('should Agent instance', (t, done) => {
// const agent = new Agent({
// llmConfig: {
// apiKey: '123',
// apiVersion: '1.0',
// type: 'azure',
// instance: 'test',
// model: 'test',
// },
// chatConfig: {
// temperature: 0.5,
// }
// });
// assert.ok(agent instanceof Agent);

// done();
// });


// it('should Agent call', (t, done) => {
// const agentSettings = agentConfig as unknown as IAgentConfig;

// if (agentSettings.dataSourceConfig) {
// agentSettings.dataSourceConfig.dataSource = new DataSource({
// ...(agentSettings.dataSourceConfig as any)
// });
// }

// const agent = new Agent(agentSettings);

// agent.on('onMessage', async (message) => {
// assert.ok(message, 'message is not null');
// console.warn('MESSAGE:', message);
// done();
// process.exit(0);
// });

// agent.call({
// question: 'Me fale mais sobre o cliente 1014814.',
// chatThreadID: '01',
// context: 'contexto teste',
// });
// });
// });
2 changes: 1 addition & 1 deletion tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"module": "commonjs",
"esModuleInterop": true,
"allowSyntheticDefaultImports": true,
"target": "es6",
"target": "es2018",
"declaration": true,
"noImplicitAny": true,
"moduleResolution": "node",
Expand Down

0 comments on commit 430236e

Please sign in to comment.