Skip to content

Commit

Permalink
Merge pull request #54 from vlm-run/sh/add_zod
Browse files Browse the repository at this point in the history
fix: add response model
  • Loading branch information
shahrear33 authored Feb 13, 2025
2 parents b3dc04d + 692979d commit f31b1de
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 14 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ const response = await client.image.generate({
images: [imageUrl],
domain: "document.invoice",
config: {
jsonSchema: schema,
responseModel: schema,
},
});
const response = response.response as z.infer<typeof schema>;
Expand Down Expand Up @@ -146,7 +146,7 @@ const schema = z.object({
const response = await client.document.generate({
url: documentUrl,
domain: "document.invoice",
config: { jsonSchema: schema },
config: { responseModel: schema },
});

const response = response.response as z.infer<typeof schema>;
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "vlmrun",
"version": "0.2.1",
"version": "0.2.2",
"description": "The official TypeScript library for the VlmRun API",
"author": "VlmRun <[email protected]>",
"main": "dist/index.js",
Expand Down
15 changes: 8 additions & 7 deletions src/client/predictions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -94,9 +94,9 @@ export class ImagePredictions extends Predictions {
} = params;

const encodedImages = images.map((image) => processImage(image));

if (config?.jsonSchema) {
config.jsonSchema = convertToJsonSchema(config.jsonSchema);
let jsonSchema = config?.jsonSchema;
if (config?.responseModel) {
jsonSchema = convertToJsonSchema(config.responseModel);
}

const [response] = await this.requestor.request<PredictionResponse>(
Expand All @@ -110,7 +110,7 @@ export class ImagePredictions extends Predictions {
batch,
config: {
detail: config?.detail ?? "auto",
json_schema: config?.jsonSchema,
json_schema: jsonSchema,
confidence: config?.confidence ?? false,
grounding: config?.grounding ?? false,
},
Expand Down Expand Up @@ -146,8 +146,9 @@ export class FilePredictions extends Predictions {
callbackUrl,
} = params;

if (config?.jsonSchema) {
config.jsonSchema = convertToJsonSchema(config.jsonSchema);
let jsonSchema = config?.jsonSchema;
if (config?.responseModel) {
jsonSchema = convertToJsonSchema(config.responseModel);
}

const [response] = await this.requestor.request<PredictionResponse>(
Expand All @@ -161,7 +162,7 @@ export class FilePredictions extends Predictions {
batch,
config: {
detail: config?.detail ?? "auto",
json_schema: config?.jsonSchema,
json_schema: jsonSchema,
confidence: config?.confidence ?? false,
grounding: config?.grounding ?? false,
},
Expand Down
9 changes: 9 additions & 0 deletions src/client/types.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import { ZodType } from "zod";

export type JobStatus = string;

export type FilePurpose = string;
Expand Down Expand Up @@ -110,6 +112,7 @@ export type RequestMetadataInput = RequestMetadata | RequestMetadataParams;

export type GenerationConfigParams = {
detail?: "auto" | "hi" | "lo";
responseModel?: ZodType;
jsonSchema?: Record<string, any> | null;
confidence?: boolean;
grounding?: boolean;
Expand Down Expand Up @@ -181,3 +184,9 @@ export class APIError extends Error {
this.name = "APIError";
}
}

export interface VlmRunError extends Error {
message: string;
code?: string;
cause?: Error;
}
8 changes: 4 additions & 4 deletions tests/integration/client/predictions.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ describe("Integration: Predictions", () => {
model: "vlm-1",
domain: "document.invoice",
config: {
jsonSchema: schema,
responseModel: schema,
},
});

Expand All @@ -101,7 +101,7 @@ describe("Integration: Predictions", () => {
model: "vlm-1",
domain: "document.invoice",
config: {
jsonSchema: schema,
responseModel: schema,
},
});

Expand Down Expand Up @@ -206,7 +206,7 @@ describe("Integration: Predictions", () => {
model: "vlm-1",
domain: "document.invoice",
config: {
jsonSchema: schema,
responseModel: schema,
},
});

Expand Down Expand Up @@ -251,7 +251,7 @@ describe("Integration: Predictions", () => {
domain: "document.invoice",
batch: true,
config: {
jsonSchema: schema,
responseModel: schema,
},
});

Expand Down

0 comments on commit f31b1de

Please sign in to comment.