Skip to content

Commit

Permalink
Prettier
Browse files Browse the repository at this point in the history
  • Loading branch information
hmarr committed Aug 24, 2023
1 parent 84cf23a commit 6d8b36d
Show file tree
Hide file tree
Showing 5 changed files with 194 additions and 158 deletions.
16 changes: 16 additions & 0 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

9 changes: 7 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@
"scripts": {
"test": "jest",
"build": "tsc",
"prepublishOnly": "npm run build"
"prepublishOnly": "npm run build",
"lint": "prettier --check src tests"
},
"author": {
"email": "[email protected]",
Expand All @@ -26,10 +27,14 @@
"@types/jest": "^29.5.3",
"jest": "^29.6.1",
"openai": "^4.2.0",
"prettier": "^3.0.2",
"ts-jest": "^29.1.1",
"typescript": "^5.1.6"
},
"dependencies": {
"js-tiktoken": "^1.0.7"
},
"prettier": {
"trailingComma": "all"
}
}
}
18 changes: 9 additions & 9 deletions src/functions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,24 +22,24 @@ interface ObjectProp {
type Prop = {
description?: string;
} & (
| ObjectProp
| {
| ObjectProp
| {
type: "string";
enum?: string[];
}
| {
| {
type: "number" | "integer";
minimum?: number;
maximum?: number;
enum?: number[];
}
| { type: "boolean" }
| { type: "null" }
| {
| { type: "boolean" }
| { type: "null" }
| {
type: "array";
items?: Prop;
}
);
);

// When OpenAI use functions in the prompt, they format them as TypeScript definitions rather than OpenAPI JSON schemas.
// This function converts the JSON schemas into TypeScript definitions.
Expand Down Expand Up @@ -75,7 +75,7 @@ function formatObjectProperties(obj: ObjectProp, indent: number): string {
lines.push(`${name}?: ${formatType(param, indent)},`);
}
}
return lines.map(line => ' '.repeat(indent) + line).join("\n");
return lines.map((line) => " ".repeat(indent) + line).join("\n");
}

// Format a single property type
Expand Down Expand Up @@ -108,4 +108,4 @@ function formatType(param: Prop, indent: number): string {
}
return "any[]";
}
}
}
30 changes: 19 additions & 11 deletions src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,25 @@ let encoder: Tiktoken | undefined;
* @param {Function[]} prompt.functions OpenAI function definitions
* @returns An estimate for the number of tokens the prompt will use
*/
export function promptTokensEstimate({ messages, functions }: { messages: Message[], functions?: Function[] }): number {
export function promptTokensEstimate({
messages,
functions,
}: {
messages: Message[];
functions?: Function[];
}): number {
// It appears that if functions are present, the first system message is padded with a trailing newline. This
// was inferred by trying lots of combinations of messages and functions and seeing what the token counts were.
let paddedSystem = false;
let tokens = messages.map(m => {
if (m.role === "system" && functions && !paddedSystem) {
m = { ...m, content: m.content + "\n" }
paddedSystem = true;
}
return messageTokensEstimate(m);
}).reduce((a, b) => a + b, 0);
let tokens = messages
.map((m) => {
if (m.role === "system" && functions && !paddedSystem) {
m = { ...m, content: m.content + "\n" };
paddedSystem = true;
}
return messageTokensEstimate(m);
})
.reduce((a, b) => a + b, 0);

// Each completion (vs message) seems to carry a 3-token overhead
tokens += 3;
Expand All @@ -37,7 +45,7 @@ export function promptTokensEstimate({ messages, functions }: { messages: Messag
// If there's a system message _and_ functions are present, subtract four tokens. I assume this is because
// functions typically add a system message, but reuse the first one if it's already there. This offsets
// the extra 9 tokens added by the function definitions.
if (functions && messages.find(m => m.role === "system")) {
if (functions && messages.find((m) => m.role === "system")) {
tokens -= 4;
}

Expand Down Expand Up @@ -68,7 +76,7 @@ export function messageTokensEstimate(message: Message): number {
message.content,
message.name,
message.function_call?.name,
message.function_call?.arguments
message.function_call?.arguments,
].filter((v): v is string => !!v);
let tokens = components.map(stringTokens).reduce((a, b) => a + b, 0);
tokens += 3; // Add three per message
Expand All @@ -85,7 +93,7 @@ export function messageTokensEstimate(message: Message): number {
}

/**
* Estimate the number of tokens a function definition will use. Note that using the function definition within
* Estimate the number of tokens a function definition will use. Note that using the function definition within
* a prompt will add extra tokens, so you might want to use `promptTokensEstimate` instead.
* @param funcs An array of OpenAI function definitions
* @returns An estimate for the number of tokens the function definitions will use
Expand Down
Loading

0 comments on commit 6d8b36d

Please sign in to comment.