Skip to content

Commit

Permalink
SITES-26591 [Import Assistant] Expand GPT client to user chat endpoint
Browse files Browse the repository at this point in the history
- PR suggestion (more in options, better README)
  • Loading branch information
atopper committed Nov 5, 2024
1 parent f231622 commit 4091cb8
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 67 deletions.
23 changes: 14 additions & 9 deletions packages/spacecat-shared-gpt-client/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,9 @@ try {
#### Via Capability Execution endpoint

```javascript
/**
* Fetch insights using the Firefall's capability execution endpoint.
*/
async function fetchInsights(prompt) {
try {
const client = FirefallClient.createFrom({
Expand All @@ -60,7 +63,6 @@ async function fetchInsights(prompt) {
log: console,
});

// Internally, use Firefall's .../capability_execution/job endpoint.
const insights = await client.fetchCapabilityExecution(prompt);
console.log('Insights:', insights);
} catch (error) {
Expand All @@ -74,13 +76,15 @@ fetchInsights('How can we improve customer satisfaction?');
#### Via Chat Completions endpoint

```javascript
async function fetchInsights(prompt) {
/**
* Fetch completions using the Firefall's chat completions endpoint.
*/
async function fetchCompletions(prompt) {
try {
const client = FirefallClient.createFrom({
env: {
FIREFALL_API_ENDPOINT: 'https://api.firefall.example.com',
FIREFALL_API_KEY: 'yourApiKey',
FIREFALL_API_CAPABILITY_NAME: 'yourCapabilityName',
IMS_HOST: 'ims.example.com',
IMS_CLIENT_ID: 'yourClientId',
IMS_CLIENT_CODE: 'yourClientCode',
Expand All @@ -89,18 +93,19 @@ async function fetchInsights(prompt) {
log: console,
});
const options = {
imageUrls: ['data:image/png;base64,iVBORw0KGgoAAAA...=']
imageUrls: ['data:image/png;base64,iVBORw0KGgoAAAA...='],
model:'gpt-4-vision',
responseFormat: undefined,
};

// // Internally, use Firefall's .../chat/completions endpoint.
const insights = await client.fetchChatCompletion(prompt, { options });
console.log('Insights:', insights);
const response = await client.fetchChatCompletion(prompt, { options });
console.log('Response:', JSON.stringify(response));
} catch (error) {
console.error('Failed to fetch insights:', error.message);
console.error('Failed to fetch chat completion:', error.message);
}
}

fetchInsights('Identify all food items in this image', { imageUrls: ['data:image/png;base64,iVBORw0KGgoAAAA...='] });
fetchCompletions('Identify all food items in this image', { imageUrls: ['data:image/png;base64,iVBORw0KGgoAAAA...='] });
```

Ensure that you replace `'path/to/firefall-client'` with the actual path to the `FirefallClient` class in your project and adjust the configuration parameters according to your Firefall API credentials.
Expand Down
100 changes: 58 additions & 42 deletions packages/spacecat-shared-gpt-client/src/clients/firefall-client.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@ function validateChatCompletionResponse(response) {
}

export default class FirefallClient {
static STAGE_FIREFALL_API_ENDPOINT = 'http://firefall-stage.adobe.io';

static PROD_FIREFALL_API_ENDPOINT = 'http://firefall.adobe.io/';

static createFrom(context) {
const { log = console } = context;
const imsClient = ImsClient.createFrom(context);
Expand Down Expand Up @@ -96,7 +100,7 @@ export default class FirefallClient {
this.log.debug(`${message}: took ${duration}ms`);
}

async #submitPrompt(prompt, body, path) {
async #submitPrompt(body, path) {
const apiAuth = await this.#getApiAuth();

const url = createUrl(`${this.config.apiEndpoint}${path}`);
Expand Down Expand Up @@ -168,16 +172,13 @@ export default class FirefallClient {
* @param prompt The text prompt to provide to Firefall
* @param options The options for the call:
* - imageUrls: An array of URLs of the images to provide to Firefall
* @returns {Promise<*>}
* @returns {Object} - AI response
*/
async fetchChatCompletion(prompt, options = {}) {
if (!hasText(prompt)) {
throw new Error('Invalid prompt received');
}
const hasImageUrls = options?.imageUrls && options.imageUrls.length > 0;
const getBody = () => {
const { imageUrls, responseFormat, model: llmModel = 'gpt-4-turbo' } = options || {};
const hasImageUrls = imageUrls && imageUrls.length > 0;

try {
const startTime = process.hrtime.bigint();
const userRole = {
role: 'user',
content: [
Expand All @@ -187,27 +188,11 @@ export default class FirefallClient {
},
],
};
const body = {
llm_metadata: {
model_name: this.config.FIREFALL_API_CAPABILITY_NAME,
llm_type: 'azure_chat_openai',
},
messages: [
{
role: 'system',
content: 'You are a helpful assistant designed to output JSON.',
},
userRole,
],
};

if (hasImageUrls) {
if (!Array.isArray(options?.imageUrls)) {
if (!Array.isArray(imageUrls)) {
throw new Error('imageUrls must be an array.');
}
// Warn if model might not handle images.
if (!this.config.capabilityName.includes('vision') && this.log?.warn) {
this.log.warn(`Image URLs were provided but capability (${this.config.capabilityName}) may not handle vision prompts. Continuing...`);
}
options.imageUrls.forEach((imageUrl) => {
userRole.content.push({
type: 'image_url',
Expand All @@ -217,34 +202,65 @@ export default class FirefallClient {
});
});
}
const chatSubmissionResponse = await this.#submitPrompt(
prompt,

const body = {
llm_metadata: {
model_name: llmModel,
llm_type: 'azure_chat_openai',
},
messages: [
userRole,
],
};
if (responseFormat === 'json_object') {
body.response_format = {
type: 'json_object',
};
body.messages.push({
role: 'system',
content: 'You are a helpful assistant designed to output JSON.',
});
}

return body;
};

if (!hasText(prompt)) {
throw new Error('Invalid prompt received');
}

let chatSubmissionResponse;
try {
const startTime = process.hrtime.bigint();
const body = getBody();

chatSubmissionResponse = await this.#submitPrompt(
JSON.stringify(body),
'/v2/chat/completions',
);
this.#logDuration('Firefall API Chat Completion call', startTime);

if (!validateChatCompletionResponse(chatSubmissionResponse)) {
this.log.error(
'Could not obtain data from Firefall: Invalid response format.',
);
throw new Error('Invalid response format.');
}
if (!chatSubmissionResponse.choices.some((ch) => hasText(ch?.message?.content))) {
throw new Error('Prompt completed but no output was found.');
}

return chatSubmissionResponse;
} catch (error) {
this.log.error('Error while fetching data from Firefall chat API: ', error.message);
throw error;
}

if (!validateChatCompletionResponse(chatSubmissionResponse)) {
this.log.error(
'Could not obtain data from Firefall: Invalid response format.',
);
throw new Error('Invalid response format.');
}
if (!chatSubmissionResponse.choices.some((ch) => hasText(ch?.message?.content))) {
throw new Error('Prompt completed but no output was found.');
}

return chatSubmissionResponse;
}

/**
* Fetches data from Firefall API.
* @param prompt The text prompt to provide to Firefall
* @returns {Promise<*>}
* @returns {string} - AI response
*/
async fetchCapabilityExecution(prompt) {
if (!hasText(prompt)) {
Expand All @@ -260,7 +276,7 @@ export default class FirefallClient {
});
const path = '/v2/capability_execution/job';

const jobSubmissionResponse = await this.#submitPrompt(prompt, body, path);
const jobSubmissionResponse = await this.#submitPrompt(body, path);
const jobStatusResponse = await this.#pollJobStatus(jobSubmissionResponse.job_id, path);
this.#logDuration('Firefall API Capability Execution call', startTime);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ describe('FirefallClient', () => {
mockContext = {
log: mockLog.object,
env: {
FIREFALL_API_ENDPOINT: 'https://api.firefall.example.com',
FIREFALL_API_ENDPOINT: FirefallClient.STAGE_FIREFALL_API_ENDPOINT,
FIREFALL_API_KEY: 'apiKeyExample',
FIREFALL_API_POLL_INTERVAL: 100,
FIREFALL_API_CAPABILITY_NAME: 'gpt4_32k_completions_capability',
Expand Down Expand Up @@ -76,7 +76,8 @@ describe('FirefallClient', () => {
});
});

describe('fetchCapabilityExecution', () => {
// eslint-disable-next-line func-names
describe('fetchCapabilityExecution', function () {
this.timeout(3000);
let client;

Expand Down Expand Up @@ -177,7 +178,8 @@ describe('FirefallClient', () => {
});
});

describe('fetchChatCompletion', () => {
// eslint-disable-next-line func-names
describe('fetchChatCompletion', function () {
const chatPath = '/v2/chat/completions';
const chatResponse = {
choices: [
Expand Down Expand Up @@ -231,34 +233,29 @@ describe('FirefallClient', () => {
.post(chatPath)
.reply(200, chatResponseDup);
const imageUrl = 'iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYA...=';
const options = {
imageUrls: [imageUrl],
model: 'gpt-4-turbo',
responseFormat: 'json_object',
};

mockLog.expects('warn').once().withArgs('Image URLs were provided but capability (gpt4_32k_completions_capability) may not handle vision prompts. Continuing...');
await expect(client.fetchChatCompletion('Test prompt', { imageUrls: [imageUrl] }))
await expect(client.fetchChatCompletion('Test prompt', options))
.to.be.rejectedWith('Invalid response format.');
mockLog.verify();
});

it('should handle a missing response message', async () => {
// Run this with a different capability.
mockContext.env.FIREFALL_API_CAPABILITY_NAME = 'gpt-4-vision';
const clientCapacity = FirefallClient.createFrom(mockContext);
const logSpy = sinon.spy(mockContext.log, 'warn');

const chatResponseDup = JSON.parse(JSON.stringify(chatResponse));
delete chatResponseDup.choices[0].message;
nock(mockContext.env.FIREFALL_API_ENDPOINT)
.post(chatPath)
.reply(200, chatResponseDup);
const imageUrl = 'iVBORw0KGgoAAAANSUhEUgAAABgAAAAYCAYA...=';

await expect(clientCapacity.fetchChatCompletion(
await expect(client.fetchChatCompletion(
'Test prompt',
{ imageUrls: [imageUrl] },
{ imageUrls: [imageUrl], model: 'gpt-4-vision', responseFormat: 'ignored' },
))
.to.be.rejectedWith('Invalid response format.');

// Modal (capacity) handles vision prompts, so no warning should be logged.
expect(logSpy.callCount).to.equal(0);
});

it('should handle a missing response content', async () => {
Expand Down

0 comments on commit 4091cb8

Please sign in to comment.