Skip to content

Commit

Permalink
removed model name, added replace/append options
Browse files Browse the repository at this point in the history
  • Loading branch information
manimohans committed Jun 22, 2024
1 parent 07c466f commit 49fa8de
Show file tree
Hide file tree
Showing 4 changed files with 53 additions and 44 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@
This plugin integrates a local Large Language Model (LLM) service with Obsidian for summarizing and transforming text.

### Features
v1.0.8
* Removed model name specification - it doesn't matter if you're using LMStudio.
* You can now choose whether to replace or append to the selected text.

v1.0.7
* Generate text button is updated to more meaningful text
* Command palette can now be accessed to use all the functionalities that were present before.
Expand Down
87 changes: 46 additions & 41 deletions main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,17 @@ import {
interface OLocalLLMSettings {
serverAddress: string;
serverPort: string;
llmModel: string;
stream: boolean;
customPrompt: string;
outputMode: string;
}

const DEFAULT_SETTINGS: OLocalLLMSettings = {
serverAddress: "localhost",
serverPort: "1234",
llmModel: "TheBloke/Mistral-7B-Instruct-v0.2-GGUF",
stream: false,
customPrompt: "create a todo list from the following text:",
outputMode: "replace"
};

export default class OLocalLLMPlugin extends Plugin {
Expand All @@ -46,9 +46,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Summarize the following text (maintain verbs and pronoun forms, also retain the markdowns):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
},
Expand All @@ -64,9 +64,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Make the following sound professional (maintain verbs and pronoun forms, also retain the markdowns):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
},
Expand All @@ -82,9 +82,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Generate action items based on the following text (use or numbers based on context):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
},
Expand All @@ -101,9 +101,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
this.settings.customPrompt,
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
},
Expand All @@ -119,9 +119,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Generate response based on the following text. This is your prompt:",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
},
Expand All @@ -141,9 +141,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Summarize the following text (maintain verbs and pronoun forms, also retain the markdowns):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
})
Expand All @@ -160,9 +160,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Make the following sound professional (maintain verbs and pronoun forms, also retain the markdowns):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
})
Expand All @@ -179,9 +179,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Generate response based on the following text. This is your prompt:",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
})
Expand All @@ -198,9 +198,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
"Generate action items based on the following text (use or numbers based on context):",
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
})
Expand All @@ -220,9 +220,9 @@ export default class OLocalLLMPlugin extends Plugin {
selectedText,
this.settings.serverAddress,
this.settings.serverPort,
this.settings.llmModel,
this.settings.customPrompt,
this.settings.stream
this.settings.stream,
this.settings.outputMode
);
}
})
Expand Down Expand Up @@ -316,19 +316,6 @@ class OLLMSettingTab extends PluginSettingTab {
})
);

new Setting(containerEl)
.setName("LLM model")
.setDesc("currently works with LM Studio - find model name there")
.addText((text) =>
text
.setPlaceholder("Model name")
.setValue(this.plugin.settings.llmModel) // Assuming there's a serverPort property in settings
.onChange(async (value) => {
this.plugin.settings.llmModel = value;
await this.plugin.saveSettings();
})
);

new Setting(containerEl)
.setName("Custom prompt")
.setDesc("create your own prompt - for your specific niche needs")
Expand Down Expand Up @@ -357,16 +344,31 @@ class OLLMSettingTab extends PluginSettingTab {
await this.plugin.saveSettings();
})
);

new Setting(containerEl)
.setName("Output Mode")
.setDesc("Choose how to handle generated text")
.addDropdown((dropdown) =>
dropdown
.addOption("replace", "Replace selected text")
.addOption("append", "Append after selected text")
.setValue(this.plugin.settings.outputMode)
.onChange(async (value) => {
this.plugin.settings.outputMode = value;
await this.plugin.saveSettings();
})
);
}
}

async function processText(
selectedText: string,
serverAddress: string,
serverPort: string,
modelName: string,
prompt: string,
stream: boolean
stream: boolean,
outputMode: string

) {
new Notice("Generating response. This takes a few seconds..");
const statusBarItemEl = document.querySelector(
Expand All @@ -379,7 +381,7 @@ async function processText(
}

const body = {
model: modelName,
model: "",
messages: [
{ role: "system", content: "You are my text editor AI agent" },
{ role: "user", content: prompt + ": " + selectedText },
Expand All @@ -390,6 +392,9 @@ async function processText(
};

try {
if (outputMode === "append") {
modifySelectedText(selectedText);
}
if (stream) {
const response = await fetch(
`http://${serverAddress}:${serverPort}/v1/chat/completions`,
Expand Down Expand Up @@ -435,7 +440,7 @@ async function processText(
if (data.choices[0].delta.content) {
let word =
data.choices[0].delta.content;
replaceSelectedText(word);
modifySelectedText(word);
}
}
} catch (error) {
Expand Down Expand Up @@ -465,7 +470,7 @@ async function processText(
const summarizedText = data.choices[0].message.content;
console.log(summarizedText);
new Notice("Text generated. Voila!");
replaceSelectedText(summarizedText);
modifySelectedText(summarizedText);
} else {
throw new Error(
"Error summarizing text (requestUrl): " + response.text
Expand All @@ -485,7 +490,7 @@ async function processText(
}
}

function replaceSelectedText(text: any) {
function modifySelectedText(text: any) {
let view = this.app.workspace.getActiveViewOfType(MarkdownView);
if (!view) {
new Notice("No active view");
Expand Down
2 changes: 1 addition & 1 deletion manifest.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"id": "local-llm-helper",
"name": "Local LLM Helper",
"version": "1.0.7",
"version": "1.0.8",
"minAppVersion": "1.5.12",
"description": "Use your own secure local LLM server to work with your text!",
"author": "Mani Mohan",
Expand Down
4 changes: 2 additions & 2 deletions updatetags.sh
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
#make sure to edit manifest.json - the version numbers must match!
git tag -a 1.0.7 -m "1.0.7"
git push origin 1.0.7
git tag -a 1.0.8 -m "1.0.8"
git push origin 1.0.8

0 comments on commit 49fa8de

Please sign in to comment.