Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove caching #197

Merged
merged 1 commit into from
Nov 21, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ OPENAI_API_KEY=""
BROWSERBASE_API_KEY=""
BRAINTRUST_API_KEY=""
ANTHROPIC_API_KEY=""
HEADLESS=false
HEADLESS=false
ENABLE_CACHING=false
2 changes: 1 addition & 1 deletion examples/example.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ async function example() {
env: "LOCAL",
verbose: 1,
debugDom: true,
enableCaching: true,
enableCaching: false,
});

await stagehand.init();
Expand Down
14 changes: 9 additions & 5 deletions lib/handlers/actHandler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ export class StagehandActHandler {
private readonly waitForSettledDom: (
domSettleTimeoutMs?: number,
) => Promise<void>;
private readonly actionCache: ActionCache;
private readonly actionCache: ActionCache | undefined;
private readonly defaultModelName: AvailableModel;
private readonly startDomDebug: () => Promise<void>;
private readonly cleanupDomDebug: () => Promise<void>;
Expand Down Expand Up @@ -54,7 +54,7 @@ export class StagehandActHandler {
this.enableCaching = enableCaching;
this.logger = logger;
this.waitForSettledDom = waitForSettledDom;
this.actionCache = new ActionCache(this.logger);
this.actionCache = enableCaching ? new ActionCache(this.logger) : undefined;
this.defaultModelName = defaultModelName;
this.startDomDebug = startDomDebug;
this.cleanupDomDebug = cleanupDomDebug;
Expand Down Expand Up @@ -733,6 +733,10 @@ export class StagehandActHandler {
model: AvailableModel;
domSettleTimeoutMs?: number;
}) {
if (!this.enableCaching) {
return null;
}

const cacheObj = {
url: this.stagehand.page.url(),
action,
Expand Down Expand Up @@ -812,7 +816,7 @@ export class StagehandActHandler {
},
});

await this.actionCache.removeActionStep(cacheObj);
await this.actionCache?.removeActionStep(cacheObj);
return null;
}

Expand Down Expand Up @@ -914,7 +918,7 @@ export class StagehandActHandler {
},
});

await this.actionCache.removeActionStep(cacheObj);
await this.actionCache?.removeActionStep(cacheObj);
return null;
}
}
Expand Down Expand Up @@ -1184,7 +1188,7 @@ export class StagehandActHandler {
} else {
if (this.enableCaching) {
this.llmProvider.cleanRequestCache(requestId);
this.actionCache.deleteCacheForRequestId(requestId);
this.actionCache?.deleteCacheForRequestId(requestId);
}

return {
Expand Down
4 changes: 3 additions & 1 deletion lib/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,9 @@ export class Stagehand {
) {
this.externalLogger = logger;
this.logger = this.log.bind(this);
this.enableCaching = enableCaching ?? false;
this.enableCaching =
enableCaching ??
(process.env.ENABLE_CACHING && process.env.ENABLE_CACHING === "true");
this.llmProvider =
llmProvider || new LLMProvider(this.logger, this.enableCaching);
this.env = env;
Expand Down
4 changes: 2 additions & 2 deletions lib/llm/AnthropicClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ import { LogLine } from "../types";

export class AnthropicClient implements LLMClient {
private client: Anthropic;
private cache: LLMCache;
private cache: LLMCache | undefined;
public logger: (message: LogLine) => void;
private enableCaching: boolean;
private requestId: string;

constructor(
logger: (message: LogLine) => void,
enableCaching = false,
cache: LLMCache,
cache: LLMCache | undefined,
requestId: string,
) {
this.client = new Anthropic({
Expand Down
8 changes: 6 additions & 2 deletions lib/llm/LLMProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,19 @@ export class LLMProvider {

private logger: (message: LogLine) => void;
private enableCaching: boolean;
private cache: LLMCache;
private cache: LLMCache | undefined;

constructor(logger: (message: LogLine) => void, enableCaching: boolean) {
this.logger = logger;
this.enableCaching = enableCaching;
this.cache = new LLMCache(logger);
this.cache = enableCaching ? new LLMCache(logger) : undefined;
}

cleanRequestCache(requestId: string): void {
if (!this.enableCaching) {
return;
}

this.logger({
category: "llm_cache",
message: "cleaning up cache",
Expand Down
4 changes: 2 additions & 2 deletions lib/llm/OpenAIClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,15 +6,15 @@ import { LogLine } from "../types";

export class OpenAIClient implements LLMClient {
private client: OpenAI;
private cache: LLMCache;
private cache: LLMCache | undefined;
public logger: (message: LogLine) => void;
private enableCaching: boolean;
private requestId: string;

constructor(
logger: (message: LogLine) => void,
enableCaching = false,
cache: LLMCache,
cache: LLMCache | undefined,
requestId: string,
) {
this.client = new OpenAI();
Expand Down