Skip to content

Commit

Permalink
[Example] Add simple-chat-js example
Browse files Browse the repository at this point in the history
  • Loading branch information
Neet-Nestor committed May 30, 2024
1 parent 44edca1 commit c7950a4
Show file tree
Hide file tree
Showing 16 changed files with 350 additions and 35 deletions.
10 changes: 9 additions & 1 deletion .eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,13 @@ module.exports = {
rules: {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/no-empty-function": "off"
}
},
overrides: [
{
"files": ["examples/**/*.js"],
"rules": {
"no-undef": "off"
}
}
]
};
2 changes: 1 addition & 1 deletion .lintstagedrc.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
{
"./**/*.{js,ts,jsx,tsx,json,css}": ["eslint --fix", "prettier --write"]
"./**/*.{js,ts,jsx,tsx,json}": ["eslint --fix", "prettier --write"]
}
12 changes: 8 additions & 4 deletions examples/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,20 @@
This page contains a curated list of examples, tutorials, blogs about WebLLM usecases.
Please send a pull request if you find things that belongs to here.

## Tutorial Examples
## Example Projects

Note that all examples below run in-browser and use WebGPU as a backend.

#### Basic Chat Completion
#### Project List
- [get-started](get-started): minimum get started example with chat completion.
- [get-started-web-worker](get-started-web-worker): same as get-started, but using web worker.
- [multi-round-chat](multi-round-chat): while APIs are functional, we internally optimize so that multi round chat usage can reuse KV cache
- [simple-chat](simple-chat): a mininum and complete chat bot app.
- [simple-chat-js](simple-chat-js): a mininum and complete chat bot app in vanilla JavaScript.

[![Open on JSFiddle](https://img.shields.io/badge/open-JSFiddle-blue?logo=jsfiddle&logoColor=white)](https://jsfiddle.net/neetnestor/4nmgvsa2/)
[![Open on Codepen](https://img.shields.io/badge/open-codepen-black?logo=codepen)](https://codepen.io/neetnestor/pen/vYwgZaG)
- [simple-chat-ts](simple-chat-ts): a mininum and complete chat bot app.
- [next-simple-chat](next-simple-chat): a mininum and complete chat bot app with [Next.js](https://nextjs.org/).
- [multi-round-chat](multi-round-chat): while APIs are functional, we internally optimize so that multi round chat usage can reuse KV cache

#### Advanced OpenAI API Capabilities
These examples demonstrate various capabilities via WebLLM's OpenAI-like API.
Expand Down
104 changes: 104 additions & 0 deletions examples/simple-chat-js/index.css
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
body,
html {
font-family: Arial, sans-serif;
padding: 10px 20px;
}

.download-container {
display: flex;
justify-content: space-between;
margin-bottom: 20px;
}

#download-status {
border: solid 1px black;
box-shadow: 0 10px 15px -3px rgba(0, 0, 0, .1), 0 4px 6px -2px rgba(0, 0, 0, .05);
padding: 10px;
}

.chat-container {
height: 400px;
width: 100%;
border: 2px solid black;
display: flex;
flex-direction: column;
}

.chat-box {
overflow-y: scroll;
background-color: #c3c3c3;
border: 1px solid #ccc;
padding: 5px;
flex: 1 1;
}

.chat-stats {
background-color: #D3ECEB;
flex: 0 0;
padding: 10px;
font-size: 0.75rem;
}

.message-container {
width: 100%;
display: flex;
}

.message {
padding: 10px;
margin: 10px 0;
border-radius: 10px;
width: fit-content;
}

.message-container.user {
justify-content: end;
}

.message-container.assistant {
justify-content: start;
}

.message-container.user .message {
background: #007bff;
color: #fff;
}

.message-container.assistant .message {
background: #f1f0f0;
color: #333;
}

.chat-input-container {
min-height: 40px;
flex: 0 0;
display: flex;
}

#user-input {
width: 70%;
padding: 10px;
border: 1px solid #ccc;
}

button {
width: 25%;
padding: 10px;
border: none;
background-color: #007bff;
color: white;
cursor: pointer;
}

button:disabled {
background-color: lightgray;
cursor: not-allowed;
}

button:hover:not(:disabled) {
background-color: #0056b3;
}

.hidden {
display: none;
}
38 changes: 38 additions & 0 deletions examples/simple-chat-js/index.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
<!DOCTYPE html>
<html>

<head>
<title>Simple Chatbot</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<meta charset="UTF-8" />
<link rel="stylesheet" href="./index.css" />
</head>

<body>
<p>
Step 1: Initialize WebLLM and Download Model
</p>
<div class="download-container">
<select id="model-selection"></select>
<button id="download">
Download
</button>
</div>
<p id="download-status" class="hidden"></p>

<p>
Step 2: Chat
</p>
<div class="chat-container">
<div id="chat-box" class="chat-box"></div>
<div id="chat-stats" class="chat-stats hidden"></div>
<div class="chat-input-container">
<input type="text" id="user-input" placeholder="Type a message..." />
<button id="send" disabled>Send</button>
</div>
</div>

<script src="./index.js" type="module"></script>
</body>

</html>
142 changes: 142 additions & 0 deletions examples/simple-chat-js/index.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import * as webllm from "https://esm.run/@mlc-ai/web-llm";

/*************** WebLLM logic ***************/
const messages = [
{
content: "You are a helpful AI agent helping users.",
role: "system",
},
];

const availableModels = webllm.prebuiltAppConfig.model_list.map(
(m) => m.model_id,
);
let selectedModel = "TinyLlama-1.1B-Chat-v0.4-q4f32_1-1k";

// Callback function for initializing progress
function updateEngineInitProgressCallback(report) {
console.log("initialize", report.progress);
document.getElementById("download-status").textContent = report.text;
}

// Create engine instance
const engine = new webllm.MLCEngine();
engine.setInitProgressCallback(updateEngineInitProgressCallback);

async function initializeWebLLMEngine() {
document.getElementById("download-status").classList.remove("hidden");
selectedModel = document.getElementById("model-selection").value;
const config = {
temperature: 1.0,
top_p: 1,
};
await engine.reload(selectedModel, config);
}

async function streamingGenerating(messages, onUpdate, onFinish, onError) {
try {
let curMessage = "";
const completion = await engine.chat.completions.create({
stream: true,
messages,
});
for await (const chunk of completion) {
const curDelta = chunk.choices[0].delta.content;
if (curDelta) {
curMessage += curDelta;
}
onUpdate(curMessage);
}
const finalMessage = await engine.getMessage();
onFinish(finalMessage);
} catch (err) {
onError(err);
}
}

/*************** UI logic ***************/
function onMessageSend() {
const input = document.getElementById("user-input").value.trim();
const message = {
content: input,
role: "user",
};
if (input.length === 0) {
return;
}
document.getElementById("send").disabled = true;

messages.push(message);
appendMessage(message);

document.getElementById("user-input").value = "";
document
.getElementById("user-input")
.setAttribute("placeholder", "Generating...");

const aiMessage = {
content: "typing...",
role: "assistant",
};
appendMessage(aiMessage);

const onFinishGenerating = (finalMessage) => {
updateLastMessage(finalMessage);
document.getElementById("send").disabled = false;
engine.runtimeStatsText().then((statsText) => {
document.getElementById("chat-stats").classList.remove("hidden");
document.getElementById("chat-stats").textContent = statsText;
});
};

streamingGenerating(
messages,
updateLastMessage,
onFinishGenerating,
console.error,
);
}

function appendMessage(message) {
const chatBox = document.getElementById("chat-box");
const container = document.createElement("div");
container.classList.add("message-container");
const newMessage = document.createElement("div");
newMessage.classList.add("message");
newMessage.textContent = message.content;

if (message.role === "user") {
container.classList.add("user");
} else {
container.classList.add("assistant");
}

container.appendChild(newMessage);
chatBox.appendChild(container);
chatBox.scrollTop = chatBox.scrollHeight; // Scroll to the latest message
}

function updateLastMessage(content) {
const messageDoms = document
.getElementById("chat-box")
.querySelectorAll(".message");
const lastMessageDom = messageDoms[messageDoms.length - 1];
lastMessageDom.textContent = content;
}

/*************** UI binding ***************/
availableModels.forEach((modelId) => {
const option = document.createElement("option");
option.value = modelId;
option.textContent = modelId;
document.getElementById("model-selection").appendChild(option);
});
document.getElementById("model-selection").value = selectedModel;
document.getElementById("download").addEventListener("click", function () {
initializeWebLLMEngine().then(() => {
document.getElementById("send").disabled = false;
});
});
document.getElementById("send").addEventListener("click", function () {
onMessageSend();
});
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { prebuiltAppConfig } from "@mlc-ai/web-llm";

export default {
"model_list": prebuiltAppConfig.model_list,
"use_web_worker": true
}
model_list: prebuiltAppConfig.model_list,
use_web_worker: true,
};
File renamed without changes
File renamed without changes
File renamed without changes.
File renamed without changes.
Loading

0 comments on commit c7950a4

Please sign in to comment.