Skip to content

Commit

Permalink
fix auto destruct on windows
Browse files Browse the repository at this point in the history
  • Loading branch information
louis030195 committed Sep 27, 2024
1 parent 89f8c6c commit c000fbd
Show file tree
Hide file tree
Showing 12 changed files with 720 additions and 51 deletions.
12 changes: 7 additions & 5 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,17 @@ license = "MIT OR Apache-2.0"
edition = "2021"

[workspace.dependencies]
# AI
candle = { package = "candle-core", version = "0.7.1" }
candle-nn = { package = "candle-nn", version = "0.7.1" }
candle-transformers = { package = "candle-transformers", version = "0.7.1" }
tokenizers = "0.20.0"
hf-hub = "0.3.0"

log = "0.4"
candle = { package = "candle-core", version = "0.7.0" }
candle-nn = { package = "candle-nn", version = "0.7.0" }
candle-transformers = { package = "candle-transformers", version = "0.7.0" }
tokenizers = "0.19.1"
tracing = "0.1.40"
tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
tokio = { version = "1.15", features = ["full", "tracing"] }
hf-hub = "0.3.0"
crossbeam = "0.8.4"
image = "0.25"
reqwest = { version = "0.11", features = ["blocking", "multipart", "json"] }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
# pipe-llama32-sync-user-conversation-to-notion

This pipe automatically summarizes your user conversations (screen text only) and sends the summaries to a Notion database. It uses AI to analyze screen content from your meetings and generate structured tables.

## Quick Setup

1. Run Ollama:
```
ollama run llama3.2:3b-instruct-q4_K_M
```

2. Set up Notion:
a. Create a new integration at https://www.notion.so/my-integrations
b. Copy the API key for later use
c. Create a new database in Notion with the following properties:
- Summary (Title)
- Key Points (Rich text)
- Action Items (Rich text)
- Pain Points (Rich text)
- Needs (Rich text)
- Sentiment (Select)
- Timestamp (Date)
d. Share the database with your integration:
- Open the database
- Click the '...' menu in the top right
- Go to 'Add connections' and select your integration
e. Copy the database ID from the URL:
- Open the database in full-page view
- The URL will look like: https://www.notion.so/your-workspace/database-id?v=...
- Copy the 'database-id' part

3. Configure the pipe:
a. Open the Screenpipe app
b. Go to the Pipes section
c. Find or add the "pipe-meeting-summary-by-notion" pipe
d. Configure the following fields:
- Polling Interval (default: 3600000 ms / 1 hour)
- Notion API Key (from step 2b)
- Notion Database ID (from step 2e)
- AI API URL (default: http://localhost:11434/api/chat for Ollama)
- AI Model (default: llama3.2:3b-instruct-q4_K_M)
- Custom Summary Prompt (optional)
e. Save the configuration
f. Enable the pipe

4. Restart Screenpipe recording

That's it! The pipe will now periodically check for new audio content, summarize it using AI, and send the summaries to your Notion database.

## Customization

To customize the pipe's behavior, you can modify the `pipe.ts` file. Key areas you might want to adjust include:
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"fields": [
{
"name": "pollingInterval",
"type": "number",
"default": 60000,
"description": "interval in milliseconds to poll conversation content (default: 2 minutes)"
},
{
"name": "pollingSize",
"type": "number",
"default": 100,
"description": "number of records to retrieve from Screenpipe per page"
},
{
"name": "windowName",
"type": "window",
"default": "",
"description": "specific window name to filter the conversation data, e.g., 'slack', 'gmail', etc."
},
{
"name": "aiApiUrl",
"type": "string",
"default": "http://localhost:11434/api/chat",
"description": "ai api url, can be ollama, openai, or any openai compatible api"
},
{
"name": "aiModel",
"type": "string",
"default": "llama3.2:3b-instruct-q4_K_M",
"description": "ai model to use for summarizing conversations"
},
{
"name": "notionApiKey",
"type": "string",
"default": "<fill your notion api key here>",
"description": "notion api key for authentication"
},
{
"name": "notionDatabaseId",
"type": "string",
"default": "<fill your notion database id here>",
"description": "id of the notion database to sync conversations to"
},
{
"name": "customSummaryPrompt",
"type": "string",
"default": "you are an expert in user research and product development, trained in 'the mom test' methodology. analyze this user conversation with the following guidelines:\n\n1. focus on extracting specific examples and stories, not opinions.\n2. identify problems and frustrations the user is currently facing.\n3. uncover the user's current behaviors and workflows.\n4. look for emotional responses and strong reactions.\n5. detect any mentions of money or time investments.\n6. note any alternative solutions the user has tried.\n7. capture any future plans or intended actions.\n8. identify potential product ideas or feature requests.\n9. highlight any misunderstandings or areas of confusion.\n10. detect any 'maybes' or non-committal responses that need further exploration.\n\nremember, good user research is about listening and observing, not pitching or leading the user. extract insights that can drive product decisions and improvements.",
"description": "custom prompt for the ai to analyze and summarize the conversation"
}
]
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,208 @@
interface ConversationEntry {
summary: string;
keyPoints: string[];
actionItems: string[];
painPoints: string[];
needs: string[];
sentiment: string;
timestamp: string;
}

function extractJsonFromLlmResponse(response: string): any {
// Remove any markdown code block syntax
let cleaned = response.replace(/^```(?:json)?\s*|\s*```$/g, "");

// Try to find JSON-like content
const jsonMatch = cleaned.match(/\{[\s\S]*\}/);
if (jsonMatch) {
cleaned = jsonMatch[0];
}

// Remove any non-JSON content before or after the main object
cleaned = cleaned.replace(/^[^{]*/, "").replace(/[^}]*$/, "");

// Replace any escaped newlines and remove actual newlines
cleaned = cleaned.replace(/\\n/g, "").replace(/\n/g, "");

try {
return JSON.parse(cleaned);
} catch (error) {
console.warn("failed to parse json:", error);
console.warn("cleaned content:", cleaned);

// Attempt to fix common issues
cleaned = cleaned
.replace(/,\s*}/g, "}") // Remove trailing commas
.replace(/'/g, '"') // Replace single quotes with double quotes
.replace(/(\w+):/g, '"$1":') // Add quotes to keys
.replace(/:\s*'([^']*)'/g, ': "$1"'); // Replace single-quoted values with double-quoted values

try {
return JSON.parse(cleaned);
} catch (secondError) {
console.warn("failed to parse json after attempted fixes:", secondError);
throw new Error("invalid json format in llm response");
}
}
}

async function summarizeConversation(
conversationData: ContentItem[],
aiApiUrl: string,
aiModel: string,
customSummaryPrompt: string
): Promise<ConversationEntry | null> {
const prompt = `${customSummaryPrompt}
analyze the following conversation:
${JSON.stringify(conversationData)}
return a json object with the following structure:
{
"summary": "brief summary of the conversation",
"keyPoints": ["key point 1", "key point 2", ...],
"actionItems": ["action item 1", "action item 2", ...],
"painPoints": ["pain point 1", "pain point 2", ...],
"needs": ["need 1", "need 2", ...],
"sentiment": "positive/neutral/negative"
}
rules:
- do not add backticks to the json eg \`\`\`json\`\`\` is wrong
- do not return anything but json. no comments below the json.
- if the data does not seem to be related to user conversation, return "false"
`;

const response = await fetch(aiApiUrl, {
method: "POST",
body: JSON.stringify({
model: aiModel,
messages: [{ role: "user", content: prompt }],
stream: false,
response_format: { type: "json_object" },
}),
});

if (!response.ok) {
console.log("ai response:", await response.text());
throw new Error(`http error! status: ${response.status}`);
}

const result = await response.json();
console.log("ai answer:", result);
if (result.message.content === "false") {
return null;
}

let content;
try {
content = extractJsonFromLlmResponse(result.message.content);
} catch (error) {
console.warn("failed to parse ai response:", error, result);
throw new Error("invalid ai response format");
}

return {
...content,
timestamp: new Date().toISOString(),
};
}

async function addToNotion(
notionApiKey: string,
databaseId: string,
entry: ConversationEntry
): Promise<void> {
const response = await fetch(`https://api.notion.com/v1/pages`, {
method: "POST",
headers: {
Authorization: `Bearer ${notionApiKey}`,
"Notion-Version": "2022-06-28",
"Content-Type": "application/json",
},
body: JSON.stringify({
parent: { database_id: databaseId },
properties: {
"Summary": { title: [{ text: { content: entry.summary } }] },
"Key Points": {
rich_text: [{ text: { content: entry.keyPoints.join(", ") } }],
},
"Action Items": {
rich_text: [{ text: { content: entry.actionItems.join(", ") } }],
},
"Pain Points": {
rich_text: [{ text: { content: entry.painPoints.join(", ") } }],
},
"Needs": { rich_text: [{ text: { content: entry.needs.join(", ") } }] },
"Sentiment": { select: { name: entry.sentiment } },
"Timestamp": { date: { start: entry.timestamp } },
},
}),
});

if (!response.ok) {
throw new Error(`failed to add to notion: ${response.statusText}`);
}

console.log("successfully added to notion");
}

async function syncConversationPipeline(): Promise<void> {
console.log("starting conversation sync pipeline");

const config = await pipe.loadConfig();
console.log("loaded config:", JSON.stringify(config, null, 2));

const {
pollingInterval,
windowName,
aiApiUrl,
aiModel,
notionApiKey,
notionDatabaseId,
customSummaryPrompt,
} = config;

while (true) {
try {
const now = new Date();
const fiveMinutesAgo = new Date(now.getTime() - 5 * 60 * 1000);

const conversationData = await pipe.queryScreenpipe({
start_time: fiveMinutesAgo.toISOString(),
end_time: now.toISOString(),
window_name: windowName,
content_type: "ocr",
limit: 1000,
});

if (
conversationData &&
conversationData.data &&
conversationData.data.length > 0
) {
const summary = await summarizeConversation(
conversationData.data,
aiApiUrl,
aiModel,
customSummaryPrompt
);
if (!summary) {
console.log("no summary found");
continue;
}
console.log("conversation summary:", summary);

await addToNotion(notionApiKey, notionDatabaseId, summary);
console.log("added to notion");
}
} catch (error) {
console.warn("error in conversation sync pipeline:", error);
}
console.log("sleeping for", pollingInterval, "ms");
await new Promise((resolve) => setTimeout(resolve, pollingInterval));
}
}

syncConversationPipeline();
Loading

0 comments on commit c000fbd

Please sign in to comment.