Skip to content

Commit

Permalink
Merge pull request #17 from ashu565/final-pr
Browse files Browse the repository at this point in the history
init commits
  • Loading branch information
dresslife-shbh authored Sep 12, 2024
2 parents eaef0c2 + 19492ab commit 163c30a
Show file tree
Hide file tree
Showing 7 changed files with 39 additions and 32 deletions.
Empty file added .gitignore
Empty file.
2 changes: 1 addition & 1 deletion Golem/composables/conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -325,7 +325,7 @@ export const useConversations = () => {
const waiting = () => new Promise((resolve, reject) => {
setTimeout(() => {
resolve('ok')
}, 500)
}, 1000)
})

const upsertAssistantMessage = async (messageResponse: types.Message, finalUpdate?: boolean) => {
Expand Down
2 changes: 1 addition & 1 deletion LLM App/BaseCommand.ts
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ import {
},
value: "create_new_chat",
style: "primary",
url: "https://safe.dev.rocket.chat/api/apps/public/8d4acc61-d871-46e2-94b5-db161448483c/prompt-editor/chat"
url: "http://localhost:3000/api/apps/public/8d4acc61-d871-46e2-94b5-db161448483c/prompt-editor/chat"
},
}

Expand Down
35 changes: 20 additions & 15 deletions LLM App/LLMPromptApp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -161,25 +161,30 @@ export class ConversateEndpoint extends ApiEndpoint {
let oldMsgData : IMessageLLM[] = [
...conversationData.messages,
{
sentBy: "user",
message
role: "user",
content: message
}
]

const reply = await conversateWithLLM(http, oldMsgData, messageId, read)
const [currentReply, history] = await conversateWithLLM(http, oldMsgData, messageId, read)

const newMsgData = [
{
sentBy: "user",
message
},
{
sentBy: "assistant",
message: reply
}
]
console.log(currentReply);
console.log(history);

await addNewMessageToConversation(read, persis, request.user.id, conversationId, newMsgData)
if(currentReply.includes("%ended%")){
const newMsgData = [
{
role: "user",
content: message
},
{
role: "assistant",
content: history
}
]

await addNewMessageToConversation(read, persis, request.user.id, conversationId, newMsgData)
}

return {
status: 200,
Expand All @@ -190,7 +195,7 @@ export class ConversateEndpoint extends ApiEndpoint {
},
content: {
sentBy: "assistant",
message: reply,
message: currentReply,
conversationId: conversationId,
fromMsg : message
},
Expand Down
2 changes: 1 addition & 1 deletion LLM App/bundle.ts

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions LLM App/db/schemas/Conversation.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,6 @@ export type IConversation = {
}

export type IMessageLLM = {
message : string;
sentBy : string;
content : string;
role : string;
}
26 changes: 14 additions & 12 deletions LLM App/helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ export const checkOrCreateUser = async(read: IRead, persistence: IPersistence, u
const processingData: any = {
"test": {
startedStreaming: false,
chunks: []
chunks: [],
history: ""
},
}

Expand All @@ -184,15 +185,15 @@ const processingData: any = {
const startChat = async(read, http, messages, msgId) => {

const postData = JSON.stringify({
model: "./dist/Llama-2-7b-chat-hf-q4f16_1-MLC/",
model: "./dist/gemma-2b-it-q4f16_0-MLC",
stream: true,
messages
});


const options = {
hostname: "llama3-8b",
port: 80,
hostname: "localhost",
port: 8000,
path: "/v1/chat/completions",
method: "POST",
headers: {
Expand All @@ -218,6 +219,7 @@ const startChat = async(read, http, messages, msgId) => {
const parsedChunk = JSON.parse(jsonStr);
const content = parsedChunk.choices[0].delta.content || "";
// await exportData(http, content)
console.log('content', content);
processingData[msgId].chunks.push(content);

} catch (e) {
Expand Down Expand Up @@ -246,21 +248,22 @@ req.end();

}

const splitChunks = (msgId) => {
const splitChunks = (msgId): Array<string> => {
const current = processingData[msgId].chunks.join(" ")
processingData[msgId].history += current
processingData[msgId].chunks = []

return current
return [current, processingData[msgId].history]
}

export const conversateWithLLM = async (http: IHttp, messages: Array<any>, msgId: string, read: IRead): Promise<string> => {
export const conversateWithLLM = async (http: IHttp, messages: Array<any>, msgId: string, read: IRead): Promise<Array<string>> => {

try {
return "Hey wssup %ended%"
if(!processingData[msgId]) {
processingData[msgId] = {
startedStreaming: false,
chunks: []
chunks: [],
history: ""
}
}

Expand All @@ -271,18 +274,17 @@ export const conversateWithLLM = async (http: IHttp, messages: Array<any>, msgId
}
processingData[msgId].startedStreaming = true;
startChat(read, http, messages, msgId)
return ""
return splitChunks(msgId)
}

if(processingData[msgId].startedStreaming) {
return splitChunks(msgId)
}
} catch(error) {
return "Hey wssup %ended%"
processingData[msgId].chunks.push("%ended%")
processingData[msgId].startedStreaming = false;
return splitChunks(msgId)
}

return ""
return splitChunks(msgId)
}

0 comments on commit 163c30a

Please sign in to comment.