From 2b5d518ca5e028c1c2733fd0db4177f8ca05d3c5 Mon Sep 17 00:00:00 2001 From: ponderingdemocritus Date: Fri, 24 Jan 2025 08:08:21 +1100 Subject: [PATCH 1/4] refactor --- packages/core/src/core/orchestrator.ts | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/packages/core/src/core/orchestrator.ts b/packages/core/src/core/orchestrator.ts index 37bc8e0d..48494ab5 100644 --- a/packages/core/src/core/orchestrator.ts +++ b/packages/core/src/core/orchestrator.ts @@ -7,19 +7,24 @@ import { LogLevel, type LoggerConfig } from "../types"; import type { z } from "zod"; /** - * A single interface for all Inputs, Outputs, and even Actions if desired. + * A single interface for all Inputs, Outputs. */ export interface IOHandler { /** Unique name for this handler */ name: string; + /** "input" | "output" | (optionally "action") if you want more roles */ role: "input" | "output"; + /** For input handlers with recurring scheduling */ interval?: number; + /** The schema for the input handler */ schema: z.ZodType; + /** Next run time (timestamp in ms); for input scheduling. */ nextRun?: number; + /** The main function. For inputs, no payload is typically passed. For outputs, pass the data. */ handler: (payload?: unknown) => Promise; } From fabc9e03f100d01e052b27cc35abfbef7a627b92 Mon Sep 17 00:00:00 2001 From: ponderingdemocritus Date: Fri, 24 Jan 2025 12:23:02 +1100 Subject: [PATCH 2/4] processor cleanup and polish --- examples/example-api.ts | 238 +++++++++++++++ examples/example-twitter.ts | 28 +- package.json | 1 + packages/core/src/core/orchestrator.ts | 192 +++++++++++-- packages/core/src/core/processor.ts | 383 +++++++++---------------- 5 files changed, 562 insertions(+), 280 deletions(-) create mode 100644 examples/example-api.ts diff --git a/examples/example-api.ts b/examples/example-api.ts new file mode 100644 index 00000000..6f081f4f --- /dev/null +++ b/examples/example-api.ts @@ -0,0 +1,238 @@ +/** + * Example demonstrating a Twitter bot using the Daydreams package. + * This bot can: + * - Monitor Twitter mentions and auto-reply + * - Generate autonomous thoughts and tweet them + * - Maintain conversation memory using ChromaDB + * - Process inputs through a character-based personality + */ + +import { + HandlerRole, + Orchestrator, +} from "../packages/core/src/core/orchestrator"; +import { TwitterClient } from "../packages/core/src/io/twitter"; +import { RoomManager } from "../packages/core/src/core/room-manager"; +import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; +import { Processor } from "../packages/core/src/core/processor"; +import { LLMClient } from "../packages/core/src/core/llm-client"; +import { env } from "../packages/core/src/core/env"; +import { LogLevel } from "../packages/core/src/types"; +import chalk from "chalk"; +import { defaultCharacter } from "../packages/core/src/core/character"; +import { Consciousness } from "../packages/core/src/core/consciousness"; +import { z } from "zod"; +import readline from "readline"; + +async function main() { + const loglevel = LogLevel.INFO; + // Initialize core dependencies + const vectorDb = new ChromaVectorDB("twitter_agent", { + chromaUrl: "http://localhost:8000", + logLevel: loglevel, + }); + + await vectorDb.purge(); // Clear previous session data + + const roomManager = new RoomManager(vectorDb); + + const llmClient = new LLMClient({ + model: "anthropic/claude-3-5-sonnet-latest", // Using a known supported model + temperature: 0.3, + }); + + // Initialize processor with default character personality + const processor = new Processor( + vectorDb, + llmClient, + defaultCharacter, + loglevel + ); + + // Initialize core system + const orchestrator = new Orchestrator(roomManager, vectorDb, processor, { + level: loglevel, + enableColors: true, + enableTimestamp: true, + }); + + // Initialize autonomous thought generation + const consciousness = new Consciousness(llmClient, roomManager, { + intervalMs: 300000, // Think every 5 minutes + minConfidence: 0.7, + logLevel: loglevel, + }); + + orchestrator.registerIOHandler({ + name: "fetchGithubIssues", + role: HandlerRole.ACTION, + schema: z.object({ + repo: z.string(), + }), + handler: async (payload) => { + // 1. Fetch some info from GitHub + // 2. Return the fetched data so it can be processed as "new input" + // to the next step in the chain. + const { repo } = payload as { repo: string }; + const response = await fetch( + `https://api.github.com/repos/${repo}/issues` + ); + const issues = await response.json(); + // The data returned here is fed back into the Orchestrator's chain flow. + return issues; + }, + }); + + orchestrator.registerIOHandler({ + name: "universalApiCall", + role: HandlerRole.ACTION, + // The agent must fill out these fields to make a valid request + schema: z + .object({ + method: z.enum(["GET", "POST", "PUT", "PATCH", "DELETE"]), + url: z.string().url(), + headers: z.record(z.string()).optional(), + body: z.union([z.string(), z.record(z.any())]).optional(), + }) + .describe( + "Use this to fetch data from an API. It should include the method, url, headers, and body." + ), + handler: async (payload) => { + const { method, url, headers, body } = payload as { + method: string; + url: string; + headers?: Record; + body?: any; + }; + + // Make the HTTP call + const response = await fetch(url, { + method, + headers: { + "Content-Type": "application/json", + ...headers, + }, + body: body ? JSON.stringify(body) : undefined, + }); + + // Return JSON or text + let responseData; + try { + responseData = await response.json(); + } catch { + responseData = await response.text(); + } + + // Return the result so the agent can process it further + return { + status: response.status, + statusText: response.statusText, + data: responseData, + }; + }, + }); + + orchestrator.registerIOHandler({ + name: "user_chat", + role: HandlerRole.INPUT, + // This schema describes what a user message looks like + schema: z.object({ + content: z.string(), + userId: z.string().optional(), + }), + // For "on-demand" input handlers, the `handler()` can be a no-op. + // We'll call it manually with data, so we don't need an interval. + handler: async (payload) => { + // We simply return the payload so the Orchestrator can process it + return payload; + }, + }); + + orchestrator.registerIOHandler({ + name: "ui_chat_reply", + role: HandlerRole.OUTPUT, + schema: z.object({ + userId: z.string().optional(), + message: z.string(), + }), + handler: async (payload) => { + const { userId, message } = payload as { + userId?: string; + message: string; + }; + + // In a real app, you might push this to a WebSocket, or store it in a DB, + // or just log it to the console: + console.log(`Reply to user ${userId ?? "??"}: ${message}`); + + // No need to return anything if it's a final "output" + }, + }); + + // Set up readline interface + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + // Function to prompt for user input + const promptUser = () => { + rl.question( + 'Enter your message (or "exit" to quit): ', + async (userMessage) => { + if (userMessage.toLowerCase() === "exit") { + rl.close(); + process.exit(0); + } + + // Dispatch the message + const userId = "console-user"; + const outputs: any = await orchestrator.dispatchToInput("user_chat", { + content: userMessage, + userId, + }); + + // Now `outputs` is an array of suggestions with role=output that got triggered + if (outputs && outputs.length > 0) { + for (const out of outputs) { + if (out.name === "ui_chat_reply") { + // Our "ui_chat_reply" handler data has { userId, message } + console.log(chalk.green(`AI says: ${out.data.message}`)); + } + } + } + + // Continue prompting + promptUser(); + } + ); + }; + + // Start the prompt loop + console.log(chalk.cyan("🤖 Bot is now running and monitoring Twitter...")); + console.log(chalk.cyan("You can type messages in the console.")); + console.log(chalk.cyan('Type "exit" to quit')); + promptUser(); + + // Handle graceful shutdown + process.on("SIGINT", async () => { + console.log(chalk.yellow("\n\nShutting down...")); + + // Clean up resources + await consciousness.stop(); + orchestrator.removeIOHandler("twitter_mentions"); + orchestrator.removeIOHandler("consciousness_thoughts"); + orchestrator.removeIOHandler("twitter_reply"); + orchestrator.removeIOHandler("twitter_thought"); + rl.close(); + + console.log(chalk.green("✅ Shutdown complete")); + process.exit(0); + }); +} + +// Run the example +main().catch((error) => { + console.error(chalk.red("Fatal error:"), error); + process.exit(1); +}); diff --git a/examples/example-twitter.ts b/examples/example-twitter.ts index bf59c88a..38d09e2b 100644 --- a/examples/example-twitter.ts +++ b/examples/example-twitter.ts @@ -7,7 +7,10 @@ * - Process inputs through a character-based personality */ -import { Orchestrator } from "../packages/core/src/core/orchestrator"; +import { + HandlerRole, + Orchestrator, +} from "../packages/core/src/core/orchestrator"; import { TwitterClient } from "../packages/core/src/io/twitter"; import { RoomManager } from "../packages/core/src/core/room-manager"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; @@ -19,6 +22,7 @@ import chalk from "chalk"; import { defaultCharacter } from "../packages/core/src/core/character"; import { Consciousness } from "../packages/core/src/core/consciousness"; import { z } from "zod"; +import readline from "readline"; async function main() { const loglevel = LogLevel.INFO; @@ -69,10 +73,10 @@ async function main() { logLevel: loglevel, }); - // Register input handler for Twitter mentions + // Register input handler for Twitter mentions core.registerIOHandler({ name: "twitter_mentions", - role: "input", + role: HandlerRole.INPUT, handler: async () => { console.log(chalk.blue("🔍 Checking Twitter mentions...")); // Create a static mentions input handler @@ -97,7 +101,7 @@ async function main() { // Register input handler for autonomous thoughts core.registerIOHandler({ name: "consciousness_thoughts", - role: "input", + role: HandlerRole.INPUT, handler: async () => { console.log(chalk.blue("🧠 Generating thoughts...")); const thought = await consciousness.start(); @@ -120,7 +124,7 @@ async function main() { // Register output handler for posting thoughts to Twitter core.registerIOHandler({ name: "twitter_thought", - role: "output", + role: HandlerRole.OUTPUT, handler: async (data: unknown) => { const thoughtData = data as { content: string }; @@ -142,7 +146,7 @@ async function main() { // Register output handler for Twitter replies core.registerIOHandler({ name: "twitter_reply", - role: "output", + role: HandlerRole.OUTPUT, handler: async (data: unknown) => { const tweetData = data as { content: string; inReplyTo: string }; @@ -161,9 +165,16 @@ async function main() { ), }); - // Start monitoring + // Set up readline interface + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + + // Start the prompt loop console.log(chalk.cyan("🤖 Bot is now running and monitoring Twitter...")); - console.log(chalk.cyan("Press Ctrl+C to stop")); + console.log(chalk.cyan("You can type messages in the console.")); + console.log(chalk.cyan('Type "exit" to quit')); // Handle graceful shutdown process.on("SIGINT", async () => { @@ -175,6 +186,7 @@ async function main() { core.removeIOHandler("consciousness_thoughts"); core.removeIOHandler("twitter_reply"); core.removeIOHandler("twitter_thought"); + rl.close(); console.log(chalk.green("✅ Shutdown complete")); process.exit(0); diff --git a/package.json b/package.json index 2cfb9085..bd54ac65 100644 --- a/package.json +++ b/package.json @@ -5,6 +5,7 @@ "goal": "bun run examples/example-goal.ts", "task": "bun run examples/example-basic.ts", "twitter": "bun run examples/example-twitter.ts", + "api": "bun run examples/example-api.ts", "test": "bun run packages/core", "build": "pnpm --dir packages/core run build", "release": "pnpm build && pnpm prettier && npx lerna publish --no-private --force-publish", diff --git a/packages/core/src/core/orchestrator.ts b/packages/core/src/core/orchestrator.ts index 48494ab5..7bffe5fa 100644 --- a/packages/core/src/core/orchestrator.ts +++ b/packages/core/src/core/orchestrator.ts @@ -6,6 +6,12 @@ import type { VectorDB } from "../types"; // If you rely on VectorDB from here import { LogLevel, type LoggerConfig } from "../types"; import type { z } from "zod"; +export enum HandlerRole { + INPUT = "input", + OUTPUT = "output", + ACTION = "action", +} + /** * A single interface for all Inputs, Outputs. */ @@ -14,7 +20,7 @@ export interface IOHandler { name: string; /** "input" | "output" | (optionally "action") if you want more roles */ - role: "input" | "output"; + role: HandlerRole; /** For input handlers with recurring scheduling */ interval?: number; @@ -160,7 +166,6 @@ export class Orchestrator { data, }); - // Optionally validate data with a schema if you had one try { const result = await handler.handler(data); return result; @@ -184,11 +189,10 @@ export class Orchestrator { private async processInputTask(handler: IOHandler): Promise { try { const result = await handler.handler(); - if (!result) return; + // Re-schedule if it’s a recurring input if (handler.interval && handler.interval > 0) { - // Create a new handler object with definite nextRun const scheduledHandler = { ...handler, nextRun: Date.now() + handler.interval, @@ -198,28 +202,12 @@ export class Orchestrator { this.removeIOHandler(handler.name); } - const room = await this.roomManager.ensureRoom(handler.name, "core"); - const items = Array.isArray(result) ? result : [result]; - - for (const item of items) { - const processed = await this.processor.process(item, room); - - if (!processed.alreadyProcessed) { - for (const output of processed.suggestedOutputs) { - await this.dispatchToOutput(output.name, output.data); - } - - await this.roomManager.addMemory( - room.id, - JSON.stringify(processed.content), - { - source: handler.name, - type: "input", - ...processed.metadata, - ...processed.enrichedContext, - } - ); + if (Array.isArray(result)) { + for (const item of result) { + await this.runAutonomousFlow(item, handler.name); } + } else { + await this.runAutonomousFlow(result, handler.name); } } catch (error) { this.logger.error( @@ -241,6 +229,160 @@ export class Orchestrator { ); } } + /** + * Dispatches data to a registered action handler and returns its result. + * + * @param name - The name of the registered action handler to dispatch to + * @param data - The data to pass to the action handler + * @returns Promise resolving to the action handler's result + * @throws Error if no handler is found with the given name or if it's not an action handler + * + * @example + * ```ts + * // Register an action handler + * orchestrator.registerIOHandler({ + * name: "sendEmail", + * role: "action", + * handler: async (data: {to: string, body: string}) => { + * // Send email logic + * return {success: true}; + * } + * }); + * + * // Dispatch to the action + * const result = await orchestrator.dispatchToAction("sendEmail", { + * to: "user@example.com", + * body: "Hello world" + * }); + * ``` + */ + public async dispatchToAction(name: string, data: T): Promise { + const handler = this.ioHandlers.get(name); + if (!handler) { + throw new Error(`No IOHandler registered with name: ${name}`); + } + if (handler.role !== "action") { + throw new Error(`Handler "${name}" is not an action handler`); + } + this.logger.debug("Orchestrator.dispatchToAction", "Executing action", { + name, + data, + }); + try { + const result = await handler.handler(data); + return result; + } catch (error) { + this.logger.error( + "Orchestrator.dispatchToAction", + "Handler threw an error", + { + name, + error, + } + ); + throw error; + } + } + + /** + * Takes some incoming piece of data, processes it through the system, + * and handles any follow-on "action" or "output" suggestions in a chain. + */ + private async runAutonomousFlow(initialData: unknown, sourceName: string) { + // We keep a queue of "items" to process + const queue: Array<{ data: unknown; source: string }> = [ + { data: initialData, source: sourceName }, + ]; + + const outputs: Array<{ name: string; data: any }> = []; + + while (queue.length > 0) { + const { data, source } = queue.shift()!; + + // 1) Ensure there's a room + const room = await this.roomManager.ensureRoom(source, "core"); + + // 2) Process with your existing processor logic + const processed = await this.processor.process(data, room); + + // If the processor thinks we've already processed it, we skip + if (processed.alreadyProcessed) { + continue; + } + + // 3) Save to memory (like you do in processInputTask) + await this.roomManager.addMemory( + room.id, + JSON.stringify(processed.content), + { + source, + type: "input", + ...processed.metadata, + ...processed.enrichedContext, + } + ); + + // 4) For each suggested output, see if it’s an action or an output + for (const output of processed.suggestedOutputs) { + const handler = this.ioHandlers.get(output.name); + if (!handler) { + this.logger.warn( + "No handler found for suggested output", + output.name + ); + continue; + } + + if (handler.role === "output") { + outputs.push({ name: output.name, data: output.data }); + + // Dispatch to an output handler (e.g. send a Slack message) + await this.dispatchToOutput(output.name, output.data); + } else if (handler.role === "action") { + // Execute an action (e.g. fetch data from an API), wait for the result + const actionResult = await this.dispatchToAction( + output.name, + output.data + ); + // Then feed the result back into the queue, so it will be processed + if (actionResult) { + queue.push({ + data: actionResult, + source: output.name, // or keep the same source, your choice + }); + } + } else { + this.logger.warn( + "Suggested output has an unrecognized role", + handler.role + ); + } + } + } + } + + public async dispatchToInput(name: string, data: T): Promise { + const handler = this.ioHandlers.get(name); + if (!handler) throw new Error(`No IOHandler: ${name}`); + if (handler.role !== "input") { + throw new Error(`Handler "${name}" is not role=input`); + } + try { + const result = await handler.handler(data); + if (result) { + // Use our runAutonomousFlow chain approach + return await this.runAutonomousFlow(result, handler.name); + } + return []; + } catch (error) { + this.logger.error( + "dispatchToInput Error", + `dispatchToInput Error: ${ + error instanceof Error ? error.message : String(error) + }` + ); + } + } /** * Stops all scheduled tasks and shuts down the orchestrator. diff --git a/packages/core/src/core/processor.ts b/packages/core/src/core/processor.ts index 92350424..8ad2f284 100644 --- a/packages/core/src/core/processor.ts +++ b/packages/core/src/core/processor.ts @@ -5,7 +5,6 @@ import type { VectorDB } from "../types"; import type { Character, - EnrichedContent, ProcessedResult, SearchResult, SuggestedOutput, @@ -15,7 +14,7 @@ import { LogLevel } from "../types"; import { hashString, validateLLMResponseSchema } from "./utils"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; -import type { IOHandler } from "./orchestrator"; +import { HandlerRole, type IOHandler } from "./orchestrator"; export class Processor { private logger: Logger; @@ -42,28 +41,13 @@ export class Processor { this.logger.debug("Processor.process", "Processing content", { content, roomId: room.id, - contentType: typeof content, - isString: typeof content === "string", - contentStr: - typeof content === "string" ? content : JSON.stringify(content), }); - // Check if this content was already processed const contentId = this.generateContentId(content); - this.logger.debug("Processor.process", "Generated content ID", { - contentId, - content: typeof content === "string" ? content : JSON.stringify(content), - }); - - const alreadyProcessed = await this.hasProcessedContent(contentId, room); - - this.logger.info("Processor.process", "Already processed", { - contentId, - alreadyProcessed, - }); + const hasProcessed = await this.hasProcessedContent(contentId, room); - if (alreadyProcessed) { + if (hasProcessed) { return { content, metadata: {}, @@ -72,246 +56,145 @@ export class Processor { summary: "", topics: [], relatedMemories: [], + sentiment: "neutral", + entities: [], + intent: "unknown", + availableOutputs: Array.from(this.ioHandlers.keys()), }, suggestedOutputs: [], alreadyProcessed: true, }; } - // First, classify the content - const contentClassification = await this.classifyContent(content); - - // Second, enrich content - const enrichedContent = await this.enrichContent(content, room, new Date()); - - // Third, determine potential outputs - const suggestedOutputs = await this.determinePotentialOutputs( - content, - enrichedContent, - contentClassification - ); - - this.logger.info("Processor.process", "Suggested outputs", { - contentId, - suggestedOutputs, - }); - - await this.markContentAsProcessed(contentId, room); - - return { - content, - metadata: { - ...contentClassification.context, - contentType: contentClassification.contentType, - }, - enrichedContext: { - ...enrichedContent.context, - availableOutputs: Array.from(this.ioHandlers.keys()), - }, - suggestedOutputs, - alreadyProcessed: false, - }; - } - - private async determinePotentialOutputs( - content: any, - enrichedContent: EnrichedContent, - classification: { contentType: string; context: Record } - ): Promise[]> { - const availableOutputs = Array.from(this.ioHandlers.entries()); - - if (availableOutputs.length === 0) return []; - - const prompt = `You are an AI assistant that analyzes content and suggests appropriate outputs. - -Content to analyze: -${typeof content === "string" ? content : JSON.stringify(content, null, 2)} - -Content Classification: -${JSON.stringify(classification, null, 2)} - -Context: -${JSON.stringify(enrichedContent.context, null, 2)} - -If this is feedback from a previous output: -1. First determine if the output was successful -2. If successful, return an empty array - no new actions needed -3. Only suggest new outputs if the previous action failed or requires follow-up - -# Content Outputs Schemas - select the appropriate schema for the output. You can select one or more or none. -${availableOutputs - .map( - ([name, output]) => `${name}: - ${JSON.stringify(zodToJsonSchema(output.schema, name), null, 2)} - ` - ) - .join("\n\n")} - -If the output is for a message, use the personality of the character to determine if the output was successful. - -${JSON.stringify(this.character, null, 2)} - -Based on the content and context, determine which outputs should be triggered. - -For each appropriate output, provide: -1. The output name -2. The data that matches the output's schema -3. A confidence score (0-1) -4. Reasoning for the suggestion - -Only return the JSON object, no other text. -`; - - try { - return (await validateLLMResponseSchema({ - prompt, - systemPrompt: - "You are an expert system that analyzes content and suggests appropriate automated responses. You are precise and careful to ensure all data matches the required schemas.", - schema: z.array( - z.object({ - name: z.string(), - data: z.any().describe("The data that matches the output's schema"), - confidence: z.number(), - reasoning: z.string(), - }) - ), - llmClient: this.llmClient, - logger: this.logger, - })) as SuggestedOutput[]; - } catch (error) { - this.logger.error("Processor.determinePotentialOutputs", "Error", { - error, - }); - return []; - } - } - - private async classifyContent(content: any): Promise<{ - contentType: string; - context: Record; - }> { - const prompt = ` - # Content Classification Task - -## Input Content -"${typeof content === "string" ? content : JSON.stringify(content)}" - -## Classification Requirements -Please analyze the content and determine: - -1. Content Type -- Identify if this is data, a message, an event, or another type -- Consider the structure and format -- Consider if you should respond to the content - -2. Processing Requirements -- Determine what kind of processing would be most appropriate -- Consider if any special handling is needed - -3. Contextual Analysis -- Extract any relevant context that would aid in processing -- Note any patterns or special characteristics - -## Output Format -Provide a structured classification with clear reasoning for each determination. - -Only return the JSON object, no other text. -`; - - return await validateLLMResponseSchema({ - prompt, - systemPrompt: - "You are an expert content classifier with deep experience analyzing and categorizing different types of data. Your role is to carefully examine the provided content and determine its key characteristics.", - schema: z.object({ - contentType: z.string(), - requiresProcessing: z.boolean(), - context: z.object({ - topic: z.string(), - urgency: z.enum(["high", "medium", "low"]), - additionalContext: z.string(), - }), - }), - llmClient: this.llmClient, - logger: this.logger, - }); - } - - private async enrichContent( - content: T, - room: Room, - timestamp: Date - ): Promise { const contentStr = typeof content === "string" ? content : JSON.stringify(content); - // Get related memories if supported + // Get related memories first since we'll need them for context const relatedMemories = await this.vectorDb.findSimilarInRoom( contentStr, room.id, 3 ); - const prompt = `Analyze the following content and provide enrichment: + const prompt = `Analyze the following content and provide a complete analysis: -Content: "${contentStr}" +# New Content to process: +${contentStr} -Related Context: +# Related Context: ${relatedMemories.map((m: SearchResult) => `- ${m.content}`).join("\n")} -Return a JSON object with the following fields: -1. A brief summary (max 100 chars) -2. Key topics mentioned (max 5) -3. Sentiment analysis -4. Named entities -5. Detected intent/purpose +# Use the Character's voice and tone to analyze the content. +${JSON.stringify(this.character)} + +# Available Outputs: +${Array.from(this.ioHandlers.entries()) + .filter(([_, handler]) => handler.role === HandlerRole.OUTPUT) + .map( + ([name, output]) => + `${name}: ${JSON.stringify(zodToJsonSchema(output.schema, name))}` + ) + .join("\n")} + +#Available Actions: +${Array.from(this.ioHandlers.entries()) + .filter(([_, handler]) => handler.role === HandlerRole.ACTION) + .map( + ([name, action]) => + `${name}: ${JSON.stringify(zodToJsonSchema(action.schema, name))}` + ) + .join("\n")} + + + 1. Content classification and type + 2. Content enrichment (summary, topics, sentiment, entities, intent) + + + + 1. Suggested outputs/actions based on the available handlers based on the content and the available handlers. + 2. If the content is a message, use the personality of the character to determine if the output was successful. + 3. If possible you should include summary of the content in the output for the user to avoid more processing. + `; try { const result = await validateLLMResponseSchema({ prompt, - systemPrompt: this.character.voice.tone, + systemPrompt: + "You are an expert system that analyzes content and provides comprehensive analysis with appropriate automated responses.", schema: z.object({ - summary: z.string().max(300), - topics: z.array(z.string()).max(20), - sentiment: z.enum(["positive", "negative", "neutral"]), - entities: z.array(z.string()), - intent: z.string(), + classification: z.object({ + contentType: z.string(), + requiresProcessing: z.boolean(), + context: z.object({ + topic: z.string(), + urgency: z.enum(["high", "medium", "low"]), + additionalContext: z.string(), + }), + }), + enrichment: z.object({ + summary: z.string().max(1000), + topics: z.array(z.string()).max(20), + sentiment: z.enum(["positive", "negative", "neutral"]), + entities: z.array(z.string()), + intent: z.string().describe("The intent of the content"), + }), + suggestedOutputs: z.array( + z.object({ + name: z.string().describe("The name of the output or action"), + data: z + .any() + .describe("The data that matches the output's schema"), + confidence: z.number().describe("The confidence score (0-1)"), + reasoning: z + .string() + .describe("The reasoning for the suggestion"), + }) + ), }), llmClient: this.llmClient, logger: this.logger, }); + await this.markContentAsProcessed(contentId, room); + return { - originalContent: contentStr, - timestamp, - context: { - timeContext: this.getTimeContext(timestamp), - summary: result.summary || contentStr.slice(0, 100), - topics: Array.isArray(result.topics) ? result.topics : [], + content, + metadata: { + ...result.classification.context, + contentType: result.classification.contentType, + }, + enrichedContext: { + timeContext: this.getTimeContext(new Date()), + summary: result.enrichment.summary, + topics: result.enrichment.topics, relatedMemories: relatedMemories.map((m: SearchResult) => m.content), - sentiment: result.sentiment || "neutral", - entities: Array.isArray(result.entities) ? result.entities : [], - intent: result.intent || "unknown", + sentiment: result.enrichment.sentiment, + entities: result.enrichment.entities, + intent: result.enrichment.intent, + availableOutputs: Array.from(this.ioHandlers.keys()), }, + suggestedOutputs: result.suggestedOutputs as SuggestedOutput[], + alreadyProcessed: false, }; } catch (error) { - this.logger.error("Processor.enrichContent", "Enrichment failed", { - error, - }); - + this.logger.error("Processor.process", "Processing failed", { error }); return { - originalContent: contentStr, - timestamp, - context: { - timeContext: this.getTimeContext(timestamp), + content, + metadata: {}, + enrichedContext: { + timeContext: this.getTimeContext(new Date()), summary: contentStr.slice(0, 100), topics: [], - relatedMemories: relatedMemories.map((m: SearchResult) => m.content), + relatedMemories: [], sentiment: "neutral", entities: [], intent: "unknown", + availableOutputs: Array.from(this.ioHandlers.keys()), }, + suggestedOutputs: [], + alreadyProcessed: false, }; } } @@ -330,37 +213,37 @@ Return a JSON object with the following fields: // Helper method to generate a consistent ID for content private generateContentId(content: any): string { try { - // Special handling for Twitter mentions/tweets + // 1. Special handling for Twitter mentions/tweets array if (Array.isArray(content) && content[0]?.type === "tweet") { - // For Twitter content, use the newest tweet's ID as the marker + // Use the newest tweet's ID as the marker const newestTweet = content[0]; return `tweet_batch_${newestTweet.metadata.tweetId}`; } - // Single tweet handling + // 2. Single tweet handling if (content?.type === "tweet") { return `tweet_${content.metadata.tweetId}`; } - // Keep existing logic for other content types + // 3. If it's a plain string, fallback to hashing the string but also add a small random/time factor. + // This ensures repeated user messages with the same text won't collapse to the same ID. if (typeof content === "string") { - return `content_${hashString(content)}`; + // Add a short suffix: e.g. timestamp + small random + const suffix = `${Date.now()}_${Math.random() + .toString(36) + .slice(2, 6)}`; + return `content_${hashString(content)}_${suffix}`; } - // For arrays of non-tweet content + // 4. For arrays (non-tweets), attempt to find known IDs or hash the items if (Array.isArray(content)) { const ids = content.map((item) => { + // Check if there's an explicit .id if (item.id) return item.id; + // Check for item.metadata?.id if (item.metadata?.id) return item.metadata.id; - // Look for common ID patterns - for (const [key, value] of Object.entries(item.metadata || {})) { - if (key.toLowerCase().endsWith("id") && value) { - return value; - } - } - - // If no ID found, hash the content + // Otherwise, hash the item const relevantData = { content: item.content || item, type: item.type, @@ -368,15 +251,19 @@ Return a JSON object with the following fields: return hashString(JSON.stringify(relevantData)); }); - return `array_${ids.join("_").slice(0, 100)}`; // Limit length of combined IDs + // Join them, but also add a short suffix so different array orders don’t collide + const suffix = `${Date.now()}_${Math.random() + .toString(36) + .slice(2, 6)}`; + return `array_${ids.join("_").slice(0, 100)}_${suffix}`; } - // For single objects, try to find an ID first + // 5. For single objects, check .id first if (content.id) { return `obj_${content.id}`; } - // Special handling for consciousness-generated content + // 6. Special handling for "internal_thought" or "consciousness" if ( content.type === "internal_thought" || content.source === "consciousness" @@ -388,11 +275,12 @@ Return a JSON object with the following fields: return `thought_${hashString(JSON.stringify(thoughtData))}`; } + // 7. Then check if there's a metadata.id if (content.metadata?.id) { return `obj_${content.metadata.id}`; } - // Look for common ID patterns in metadata + // 8. Or any metadata key ending with 'id' if (content.metadata) { for (const [key, value] of Object.entries(content.metadata)) { if (key.toLowerCase().endsWith("id") && value) { @@ -401,25 +289,26 @@ Return a JSON object with the following fields: } } - // If no ID found, fall back to hashing relevant content + // 9. Finally, fallback to hashing the object, + // but add a random/time suffix so repeated content isn’t auto-deduplicated. const relevantData = { content: content.content || content, type: content.type, - // Include source if available, but exclude room IDs + // Include source if available ...(content.source && - content.source !== "consciousness" && { source: content.source }), + content.source !== "consciousness" && { + source: content.source, + }), }; - return `obj_${hashString(JSON.stringify(relevantData))}`; + const baseHash = hashString(JSON.stringify(relevantData)); + const suffix = `${Date.now()}_${Math.random().toString(36).slice(2, 6)}`; + return `obj_${baseHash}_${suffix}`; } catch (error) { - this.logger.error( - "Processor.generateContentId", - "Error generating content ID", - { - error, - content: - typeof content === "object" ? JSON.stringify(content) : content, - } - ); + this.logger.error("Processor.generateContentId", "Error generating ID", { + error, + content: + typeof content === "object" ? JSON.stringify(content) : content, + }); return `fallback_${Date.now()}`; } } From 874b51e62224a6d121feba0cb41b6dc314c58007 Mon Sep 17 00:00:00 2001 From: ponderingdemocritus Date: Fri, 24 Jan 2025 12:24:49 +1100 Subject: [PATCH 3/4] cleanup --- examples/example-api.ts | 8 +--- examples/example-twitter.ts | 6 +-- packages/core/src/core/orchestrator.ts | 62 ++++++++++++++------------ packages/core/src/core/processor.ts | 2 +- packages/core/src/types/index.ts | 29 ++++++++++++ 5 files changed, 67 insertions(+), 40 deletions(-) diff --git a/examples/example-api.ts b/examples/example-api.ts index 6f081f4f..57b57d55 100644 --- a/examples/example-api.ts +++ b/examples/example-api.ts @@ -7,16 +7,12 @@ * - Process inputs through a character-based personality */ -import { - HandlerRole, - Orchestrator, -} from "../packages/core/src/core/orchestrator"; -import { TwitterClient } from "../packages/core/src/io/twitter"; +import { Orchestrator } from "../packages/core/src/core/orchestrator"; +import { HandlerRole } from "../packages/core/src/types"; import { RoomManager } from "../packages/core/src/core/room-manager"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; import { Processor } from "../packages/core/src/core/processor"; import { LLMClient } from "../packages/core/src/core/llm-client"; -import { env } from "../packages/core/src/core/env"; import { LogLevel } from "../packages/core/src/types"; import chalk from "chalk"; import { defaultCharacter } from "../packages/core/src/core/character"; diff --git a/examples/example-twitter.ts b/examples/example-twitter.ts index 38d09e2b..0078644f 100644 --- a/examples/example-twitter.ts +++ b/examples/example-twitter.ts @@ -7,10 +7,8 @@ * - Process inputs through a character-based personality */ -import { - HandlerRole, - Orchestrator, -} from "../packages/core/src/core/orchestrator"; +import { Orchestrator } from "../packages/core/src/core/orchestrator"; +import { HandlerRole } from "../packages/core/src/types"; import { TwitterClient } from "../packages/core/src/io/twitter"; import { RoomManager } from "../packages/core/src/core/room-manager"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; diff --git a/packages/core/src/core/orchestrator.ts b/packages/core/src/core/orchestrator.ts index 7bffe5fa..e57febd7 100644 --- a/packages/core/src/core/orchestrator.ts +++ b/packages/core/src/core/orchestrator.ts @@ -5,35 +5,7 @@ import type { Processor } from "./processor"; import type { VectorDB } from "../types"; // If you rely on VectorDB from here import { LogLevel, type LoggerConfig } from "../types"; import type { z } from "zod"; - -export enum HandlerRole { - INPUT = "input", - OUTPUT = "output", - ACTION = "action", -} - -/** - * A single interface for all Inputs, Outputs. - */ -export interface IOHandler { - /** Unique name for this handler */ - name: string; - - /** "input" | "output" | (optionally "action") if you want more roles */ - role: HandlerRole; - - /** For input handlers with recurring scheduling */ - interval?: number; - - /** The schema for the input handler */ - schema: z.ZodType; - - /** Next run time (timestamp in ms); for input scheduling. */ - nextRun?: number; - - /** The main function. For inputs, no payload is typically passed. For outputs, pass the data. */ - handler: (payload?: unknown) => Promise; -} +import type { IOHandler } from "../types"; /** * Orchestrator system that manages both "input" and "output" handlers @@ -361,6 +333,38 @@ export class Orchestrator { } } + /** + * Dispatches data to a registered input handler and processes the result through the autonomous flow. + * + * @param name - The name of the input handler to dispatch to + * @param data - The data to pass to the input handler + * @returns An array of output suggestions generated from processing the input + * + * @example + * ```ts + * // Register a chat input handler + * orchestrator.registerIOHandler({ + * name: "user_chat", + * role: "input", + * handler: async (message) => { + * return { + * type: "chat", + * content: message.content, + * metadata: { userId: message.userId } + * }; + * } + * }); + * + * // Dispatch a message to the chat handler + * const outputs = await orchestrator.dispatchToInput("user_chat", { + * content: "Hello AI!", + * userId: "user123" + * }); + * ``` + * + * @throws {Error} If no handler is found with the given name + * @throws {Error} If the handler's role is not "input" + */ public async dispatchToInput(name: string, data: T): Promise { const handler = this.ioHandlers.get(name); if (!handler) throw new Error(`No IOHandler: ${name}`); diff --git a/packages/core/src/core/processor.ts b/packages/core/src/core/processor.ts index 8ad2f284..6efdc0b1 100644 --- a/packages/core/src/core/processor.ts +++ b/packages/core/src/core/processor.ts @@ -14,7 +14,7 @@ import { LogLevel } from "../types"; import { hashString, validateLLMResponseSchema } from "./utils"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; -import { HandlerRole, type IOHandler } from "./orchestrator"; +import { HandlerRole, type IOHandler } from "../types"; export class Processor { private logger: Logger; diff --git a/packages/core/src/types/index.ts b/packages/core/src/types/index.ts index d2782b19..65bed599 100644 --- a/packages/core/src/types/index.ts +++ b/packages/core/src/types/index.ts @@ -529,3 +529,32 @@ export interface IChain { */ write(call: unknown): Promise; } + +export enum HandlerRole { + INPUT = "input", + OUTPUT = "output", + ACTION = "action", +} + +/** + * A single interface for all Inputs, Outputs. + */ +export interface IOHandler { + /** Unique name for this handler */ + name: string; + + /** "input" | "output" | (optionally "action") if you want more roles */ + role: HandlerRole; + + /** For input handlers with recurring scheduling */ + interval?: number; + + /** The schema for the input handler */ + schema: z.ZodType; + + /** Next run time (timestamp in ms); for input scheduling. */ + nextRun?: number; + + /** The main function. For inputs, no payload is typically passed. For outputs, pass the data. */ + handler: (payload?: unknown) => Promise; +} From 274db30e7d431f95065aff8c5a8a0bcfd1a688a2 Mon Sep 17 00:00:00 2001 From: ponderingdemocritus Date: Fri, 24 Jan 2025 12:34:33 +1100 Subject: [PATCH 4/4] cleanup --- examples/example-api.ts | 4 ++-- examples/example-goal.ts | 2 +- examples/example-twitter.ts | 6 +++--- packages/core/src/core/chain-of-thought.ts | 9 +++++---- packages/core/src/core/chains/evm.ts | 2 +- packages/core/src/core/chains/solana.ts | 2 +- packages/core/src/core/chains/starknet.ts | 2 +- packages/core/src/core/character.ts | 2 +- packages/core/src/core/consciousness.ts | 2 +- packages/core/src/core/goal-manager.ts | 2 +- packages/core/src/core/index.ts | 7 +++++-- packages/core/src/{ => core}/io/index.ts | 0 packages/core/src/{ => core}/io/system-prompt.ts | 0 packages/core/src/{ => core}/io/twitter.ts | 4 ++-- packages/core/src/core/llm-client.ts | 2 +- packages/core/src/core/logger.ts | 4 ++-- packages/core/src/core/orchestrator.ts | 7 +++---- packages/core/src/core/processor.ts | 8 ++++---- packages/core/src/core/room-manager.ts | 4 ++-- packages/core/src/core/room.ts | 4 ++-- packages/core/src/core/step-manager.ts | 2 +- packages/core/src/{ => core}/types/index.ts | 4 ++-- packages/core/src/core/utils.ts | 2 +- packages/core/src/core/vector-db.ts | 2 +- packages/core/src/index.ts | 2 -- 25 files changed, 43 insertions(+), 42 deletions(-) rename packages/core/src/{ => core}/io/index.ts (100%) rename packages/core/src/{ => core}/io/system-prompt.ts (100%) rename packages/core/src/{ => core}/io/twitter.ts (98%) rename packages/core/src/{ => core}/types/index.ts (99%) diff --git a/examples/example-api.ts b/examples/example-api.ts index 57b57d55..efe65449 100644 --- a/examples/example-api.ts +++ b/examples/example-api.ts @@ -8,12 +8,12 @@ */ import { Orchestrator } from "../packages/core/src/core/orchestrator"; -import { HandlerRole } from "../packages/core/src/types"; +import { HandlerRole } from "../packages/core/src/core/types"; import { RoomManager } from "../packages/core/src/core/room-manager"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; import { Processor } from "../packages/core/src/core/processor"; import { LLMClient } from "../packages/core/src/core/llm-client"; -import { LogLevel } from "../packages/core/src/types"; +import { LogLevel } from "../packages/core/src/core/types"; import chalk from "chalk"; import { defaultCharacter } from "../packages/core/src/core/character"; import { Consciousness } from "../packages/core/src/core/consciousness"; diff --git a/examples/example-goal.ts b/examples/example-goal.ts index ad040bc9..4d5f865c 100644 --- a/examples/example-goal.ts +++ b/examples/example-goal.ts @@ -14,7 +14,7 @@ import * as readline from "readline"; import chalk from "chalk"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; -import { GoalStatus, LogLevel } from "../packages/core/src/types"; +import { GoalStatus, LogLevel } from "../packages/core/src/core/types"; import { fetchGraphQL } from "../packages/core/src/core/providers"; import { StarknetChain } from "../packages/core/src/core/chains/starknet"; import { z } from "zod"; diff --git a/examples/example-twitter.ts b/examples/example-twitter.ts index 0078644f..7cf2c80d 100644 --- a/examples/example-twitter.ts +++ b/examples/example-twitter.ts @@ -8,14 +8,14 @@ */ import { Orchestrator } from "../packages/core/src/core/orchestrator"; -import { HandlerRole } from "../packages/core/src/types"; -import { TwitterClient } from "../packages/core/src/io/twitter"; +import { HandlerRole } from "../packages/core/src/core/types"; +import { TwitterClient } from "../packages/core/src/core/io/twitter"; import { RoomManager } from "../packages/core/src/core/room-manager"; import { ChromaVectorDB } from "../packages/core/src/core/vector-db"; import { Processor } from "../packages/core/src/core/processor"; import { LLMClient } from "../packages/core/src/core/llm-client"; import { env } from "../packages/core/src/core/env"; -import { LogLevel } from "../packages/core/src/types"; +import { LogLevel } from "../packages/core/src/core/types"; import chalk from "chalk"; import { defaultCharacter } from "../packages/core/src/core/character"; import { Consciousness } from "../packages/core/src/core/consciousness"; diff --git a/packages/core/src/core/chain-of-thought.ts b/packages/core/src/core/chain-of-thought.ts index 238dd44a..3c5e0cd8 100644 --- a/packages/core/src/core/chain-of-thought.ts +++ b/packages/core/src/core/chain-of-thought.ts @@ -5,12 +5,14 @@ import type { Goal, HorizonType, RefinedGoal, -} from "../types"; + VectorDB, + Output, +} from "./types"; import { Logger } from "./logger"; import { EventEmitter } from "events"; import { GoalManager } from "./goal-manager"; import { StepManager, type Step, type StepType } from "./step-manager"; -import { LogLevel } from "../types"; + import { calculateImportance, determineEmotions, @@ -19,10 +21,9 @@ import { injectTags, } from "./utils"; import Ajv from "ajv"; -import type { VectorDB } from "../types"; import { zodToJsonSchema } from "zod-to-json-schema"; import { z } from "zod"; -import type { Output } from "../types"; +import { LogLevel } from "./types"; const ajv = new Ajv(); diff --git a/packages/core/src/core/chains/evm.ts b/packages/core/src/core/chains/evm.ts index c513906e..f0cd7167 100644 --- a/packages/core/src/core/chains/evm.ts +++ b/packages/core/src/core/chains/evm.ts @@ -1,5 +1,5 @@ import { ethers } from "ethers"; -import type { IChain } from "../../types"; +import type { IChain } from "../types"; /** * Configuration options for initializing an EVM chain connection diff --git a/packages/core/src/core/chains/solana.ts b/packages/core/src/core/chains/solana.ts index e8174d42..fda192b6 100644 --- a/packages/core/src/core/chains/solana.ts +++ b/packages/core/src/core/chains/solana.ts @@ -5,7 +5,7 @@ import { Transaction, sendAndConfirmTransaction, } from "@solana/web3.js"; -import type { IChain } from "../../types"; +import type { IChain } from "../types"; import bs58 from "bs58"; export interface SolanaChainConfig { diff --git a/packages/core/src/core/chains/starknet.ts b/packages/core/src/core/chains/starknet.ts index ed1f015e..e9390034 100644 --- a/packages/core/src/core/chains/starknet.ts +++ b/packages/core/src/core/chains/starknet.ts @@ -1,5 +1,5 @@ import { RpcProvider, Account, type Call, CallData } from "starknet"; -import type { IChain } from "../../types"; +import type { IChain } from "../types"; /** * Configuration options for initializing a Starknet chain connection diff --git a/packages/core/src/core/character.ts b/packages/core/src/core/character.ts index 59c8837e..d6e958cb 100644 --- a/packages/core/src/core/character.ts +++ b/packages/core/src/core/character.ts @@ -1,4 +1,4 @@ -import type { Character } from "../types"; +import type { Character } from "./types"; // Example character configuration export const defaultCharacter: Character = { diff --git a/packages/core/src/core/consciousness.ts b/packages/core/src/core/consciousness.ts index ed725022..ae296587 100644 --- a/packages/core/src/core/consciousness.ts +++ b/packages/core/src/core/consciousness.ts @@ -7,7 +7,7 @@ import { type Thought, type ThoughtTemplate, type ThoughtType, -} from "../types"; +} from "./types"; import { validateLLMResponseSchema } from "./utils"; import { z } from "zod"; diff --git a/packages/core/src/core/goal-manager.ts b/packages/core/src/core/goal-manager.ts index 657d7467..aa749cae 100644 --- a/packages/core/src/core/goal-manager.ts +++ b/packages/core/src/core/goal-manager.ts @@ -1,4 +1,4 @@ -import type { Goal, GoalStatus, HorizonType } from "../types"; +import type { Goal, GoalStatus, HorizonType } from "./types"; /** * Manages a collection of goals, their relationships, and their lifecycle states. * Provides methods for creating, updating, and querying goals and their hierarchies. diff --git a/packages/core/src/core/index.ts b/packages/core/src/core/index.ts index 8110fb31..d06df307 100644 --- a/packages/core/src/core/index.ts +++ b/packages/core/src/core/index.ts @@ -7,7 +7,6 @@ import { GoalManager } from "./goal-manager"; import { ChainOfThought } from "./chain-of-thought"; import { TaskScheduler } from "./task-scheduler"; import { Logger } from "./logger"; -import { LogLevel } from "../types"; import { Consciousness } from "./consciousness"; import { LLMClient } from "./llm-client"; import { StepManager } from "./step-manager"; @@ -15,6 +14,9 @@ import { defaultCharacter } from "./character"; import * as Utils from "./utils"; import * as Providers from "./providers"; import * as Chains from "./chains"; +import * as IO from "./io"; +import * as Types from "./types"; + export { Orchestrator, Consciousness, @@ -22,7 +24,6 @@ export { StepManager, TaskScheduler, Logger, - LogLevel, RoomManager, Room, ChromaVectorDB, @@ -33,4 +34,6 @@ export { defaultCharacter, Providers, Chains, + IO, + Types, }; diff --git a/packages/core/src/io/index.ts b/packages/core/src/core/io/index.ts similarity index 100% rename from packages/core/src/io/index.ts rename to packages/core/src/core/io/index.ts diff --git a/packages/core/src/io/system-prompt.ts b/packages/core/src/core/io/system-prompt.ts similarity index 100% rename from packages/core/src/io/system-prompt.ts rename to packages/core/src/core/io/system-prompt.ts diff --git a/packages/core/src/io/twitter.ts b/packages/core/src/core/io/twitter.ts similarity index 98% rename from packages/core/src/io/twitter.ts rename to packages/core/src/core/io/twitter.ts index 5937bc84..884cd052 100644 --- a/packages/core/src/io/twitter.ts +++ b/packages/core/src/core/io/twitter.ts @@ -1,8 +1,8 @@ import { Scraper, SearchMode, type Tweet } from "agent-twitter-client"; import type { JSONSchemaType } from "ajv"; -import { Logger } from "../core/logger"; +import { Logger } from "../../core/logger"; import { LogLevel } from "../types"; -import { env } from "../core/env"; +import { env } from "../../core/env"; export interface TwitterCredentials { username: string; diff --git a/packages/core/src/core/llm-client.ts b/packages/core/src/core/llm-client.ts index dc090ea3..3889e3eb 100644 --- a/packages/core/src/core/llm-client.ts +++ b/packages/core/src/core/llm-client.ts @@ -10,7 +10,7 @@ import type { LLMClientConfig, LLMResponse, StructuredAnalysis, -} from "../types"; +} from "./types"; import { generateText } from "ai"; import { openai } from "@ai-sdk/openai"; diff --git a/packages/core/src/core/logger.ts b/packages/core/src/core/logger.ts index 717a942f..c15f594e 100644 --- a/packages/core/src/core/logger.ts +++ b/packages/core/src/core/logger.ts @@ -1,6 +1,6 @@ -import { LogLevel } from "../types"; +import { LogLevel } from "./types"; -import type { LogEntry, LoggerConfig } from "../types"; +import type { LogEntry, LoggerConfig } from "./types"; export class Logger { private config: Required; diff --git a/packages/core/src/core/orchestrator.ts b/packages/core/src/core/orchestrator.ts index e57febd7..f505e218 100644 --- a/packages/core/src/core/orchestrator.ts +++ b/packages/core/src/core/orchestrator.ts @@ -2,10 +2,9 @@ import { Logger } from "./logger"; import { RoomManager } from "./room-manager"; import { TaskScheduler } from "./task-scheduler"; import type { Processor } from "./processor"; -import type { VectorDB } from "../types"; // If you rely on VectorDB from here -import { LogLevel, type LoggerConfig } from "../types"; -import type { z } from "zod"; -import type { IOHandler } from "../types"; +import type { VectorDB } from "./types"; +import { LogLevel, type LoggerConfig } from "./types"; +import type { IOHandler } from "./types"; /** * Orchestrator system that manages both "input" and "output" handlers diff --git a/packages/core/src/core/processor.ts b/packages/core/src/core/processor.ts index 6efdc0b1..ec039093 100644 --- a/packages/core/src/core/processor.ts +++ b/packages/core/src/core/processor.ts @@ -1,20 +1,20 @@ import { LLMClient } from "./llm-client"; import { Logger } from "./logger"; import { Room } from "./room"; -import type { VectorDB } from "../types"; +import type { VectorDB } from "./types"; import type { Character, ProcessedResult, SearchResult, SuggestedOutput, -} from "../types"; -import { LogLevel } from "../types"; +} from "./types"; +import { LogLevel } from "./types"; import { hashString, validateLLMResponseSchema } from "./utils"; import { z } from "zod"; import { zodToJsonSchema } from "zod-to-json-schema"; -import { HandlerRole, type IOHandler } from "../types"; +import { HandlerRole, type IOHandler } from "./types"; export class Processor { private logger: Logger; diff --git a/packages/core/src/core/room-manager.ts b/packages/core/src/core/room-manager.ts index 88acb27c..bf7741b3 100644 --- a/packages/core/src/core/room-manager.ts +++ b/packages/core/src/core/room-manager.ts @@ -1,8 +1,8 @@ import { Room } from "./room"; -import type { Memory, RoomMetadata } from "../types"; +import type { Memory, RoomMetadata } from "./types"; import { ChromaVectorDB } from "./vector-db"; import { Logger } from "./logger"; -import { LogLevel } from "../types"; +import { LogLevel } from "./types"; export class RoomManager { private logger: Logger; diff --git a/packages/core/src/core/room.ts b/packages/core/src/core/room.ts index c50021ca..d357b4bc 100644 --- a/packages/core/src/core/room.ts +++ b/packages/core/src/core/room.ts @@ -1,6 +1,6 @@ import { createHash } from "crypto"; -import type { RoomMetadata } from "../types"; -import type { Memory } from "../types"; +import type { RoomMetadata } from "./types"; +import type { Memory } from "./types"; /** * Represents a room/conversation context that can store memories and metadata. diff --git a/packages/core/src/core/step-manager.ts b/packages/core/src/core/step-manager.ts index 5397fcba..e8825318 100644 --- a/packages/core/src/core/step-manager.ts +++ b/packages/core/src/core/step-manager.ts @@ -1,4 +1,4 @@ -import type { Step, StepType } from "../types"; +import type { Step, StepType } from "./types"; /** * Manages a collection of steps with unique IDs. diff --git a/packages/core/src/types/index.ts b/packages/core/src/core/types/index.ts similarity index 99% rename from packages/core/src/types/index.ts rename to packages/core/src/core/types/index.ts index 65bed599..a55e4d31 100644 --- a/packages/core/src/types/index.ts +++ b/packages/core/src/core/types/index.ts @@ -1,6 +1,6 @@ import type { z } from "zod"; -import type { LLMClient } from "../core/llm-client"; -import type { Logger } from "../core/logger"; +import type { LLMClient } from "../../core/llm-client"; +import type { Logger } from "../../core/logger"; /** * ChainOfThoughtContext can hold any relevant data diff --git a/packages/core/src/core/utils.ts b/packages/core/src/core/utils.ts index 0175d49a..8e12e24f 100644 --- a/packages/core/src/core/utils.ts +++ b/packages/core/src/core/utils.ts @@ -1,6 +1,6 @@ import { Ajv, type JSONSchemaType } from "ajv"; import zodToJsonSchema from "zod-to-json-schema"; -import type { LLMValidationOptions } from "../types"; +import type { LLMValidationOptions } from "./types"; export const injectTags = ( tags: Record = {}, diff --git a/packages/core/src/core/vector-db.ts b/packages/core/src/core/vector-db.ts index d6158125..06e9e7b9 100644 --- a/packages/core/src/core/vector-db.ts +++ b/packages/core/src/core/vector-db.ts @@ -16,7 +16,7 @@ import { type HierarchicalCluster, type SearchResult, type VectorDB, -} from "../types"; +} from "./types"; import { isValidDateValue } from "./utils"; export class ChromaVectorDB implements VectorDB { diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index cd26f035..8d119dee 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -1,3 +1 @@ export * from "./core"; -export * from "./io"; -export * from "./types";