Skip to content

Commit 9f78626

Browse files
committed
init commit for ai feature
1 parent 4f32492 commit 9f78626

File tree

8 files changed

+549
-0
lines changed

8 files changed

+549
-0
lines changed

src/llms/cloudflare-ai.ts

+59
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import type { LLMProviders, Session } from "~llms";
2+
3+
export default class CloudflareAI implements LLMProviders {
4+
5+
constructor(
6+
private readonly accountId: string,
7+
private readonly apiToken: String,
8+
private readonly model = '@cf/facebook/bart-large-cnn' // text summarization model
9+
) { }
10+
11+
async validate(): Promise<void> {
12+
const res = await fetch(`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/ai/models/search?per_page=1`, {
13+
headers: {
14+
Authorization: `Bearer ${this.apiToken}`
15+
}
16+
})
17+
const json = await res.json()
18+
if (!json.success) throw new Error('Cloudflare API 验证失败')
19+
}
20+
21+
async prompt(chat: string): Promise<string> {
22+
const res = await fetch(`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/ai/run/${this.model}`, {
23+
headers: {
24+
Authorization: `Bearer ${this.apiToken}`
25+
},
26+
body: JSON.stringify({ prompt: chat })
27+
})
28+
const json = await res.json()
29+
return json.response
30+
}
31+
32+
async *promptStream(chat: string): AsyncGenerator<string> {
33+
const res = await fetch(`https://api.cloudflare.com/client/v4/accounts/${this.accountId}/ai/run/${this.model}`, {
34+
headers: {
35+
Authorization: `Bearer ${this.apiToken}`
36+
},
37+
body: JSON.stringify({ prompt: chat, stream: true })
38+
})
39+
if (!res.body) throw new Error('Cloudflare AI response body is not readable')
40+
const reader = res.body.getReader()
41+
const decoder = new TextDecoder('utf-8', { ignoreBOM: true })
42+
while (true) {
43+
const { done, value } = await reader.read()
44+
if (done) break
45+
const { response } = JSON.parse(decoder.decode(value, { stream: true }))
46+
yield response
47+
}
48+
}
49+
50+
async asSession(): Promise<Session<LLMProviders>> {
51+
console.warn('Cloudflare AI session is not supported')
52+
return {
53+
...this,
54+
[Symbol.dispose]: () => { }
55+
}
56+
}
57+
58+
59+
}

src/llms/gemini-nano.ts

+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
import type { LLMProviders, Session } from "~llms"
2+
3+
export default class GeminiNano implements LLMProviders {
4+
5+
async validate(): Promise<void> {
6+
if (!window.ai) throw new Error('你的浏览器没有启用 AI 功能')
7+
if (!window.ai.languageModel &&
8+
!window.ai.assistant &&
9+
!window.ai.summarizer
10+
) throw new Error('你的浏览器没有启用 AI 功能')
11+
}
12+
13+
async prompt(chat: string): Promise<string> {
14+
using session = await this.asSession()
15+
return session.prompt(chat)
16+
}
17+
18+
async *promptStream(chat: string): AsyncGenerator<string> {
19+
using session = await this.asSession()
20+
return session.promptStream(chat)
21+
}
22+
23+
async asSession(): Promise<Session<LLMProviders>> {
24+
25+
if (window.ai.assistant || window.ai.languageModel) {
26+
const assistant = window.ai.assistant ?? window.ai.languageModel
27+
const capabilities = await assistant.capabilities()
28+
if (capabilities.available === 'readily') {
29+
return new GeminiAssistant(await assistant.create())
30+
} else {
31+
console.warn('AI Assistant 当前不可用: ', capabilities)
32+
}
33+
}
34+
35+
if (window.ai.summarizer) {
36+
const summarizer = window.ai.summarizer
37+
const capabilities = await summarizer.capabilities()
38+
if (capabilities.available === 'readily') {
39+
return new GeminiSummarizer(await summarizer.create())
40+
} else {
41+
console.warn('AI Summarizer 当前不可用: ', capabilities)
42+
}
43+
}
44+
45+
throw new Error('你的浏览器 AI 功能当前不可用')
46+
}
47+
}
48+
49+
class GeminiAssistant implements Session<LLMProviders> {
50+
51+
constructor(private readonly assistant: AIAssistant) { }
52+
53+
prompt(chat: string): Promise<string> {
54+
return this.assistant.prompt(chat)
55+
}
56+
57+
async *promptStream(chat: string): AsyncGenerator<string> {
58+
const stream = this.assistant.promptStreaming(chat)
59+
for await (const chunk of stream) {
60+
yield chunk
61+
}
62+
}
63+
64+
[Symbol.dispose](): void {
65+
this.assistant.destroy()
66+
}
67+
}
68+
69+
70+
class GeminiSummarizer implements Session<LLMProviders> {
71+
72+
constructor(private readonly summarizer: AISummarizer) { }
73+
74+
prompt(chat: string): Promise<string> {
75+
return this.summarizer.summarize(chat)
76+
}
77+
78+
async *promptStream(chat: string): AsyncGenerator<string> {
79+
const stream = this.summarizer.summarizeStreaming(chat)
80+
for await (const chunk of stream) {
81+
yield chunk
82+
}
83+
}
84+
85+
[Symbol.dispose](): void {
86+
this.summarizer.destroy()
87+
}
88+
89+
}

src/llms/index.ts

+27
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
import cloudflare from './cloudflare-ai'
2+
import nano from './gemini-nano'
3+
import worker from './remote-worker'
4+
5+
export interface LLMProviders {
6+
validate(): Promise<void>
7+
prompt(chat: string): Promise<string>
8+
promptStream(chat: string): AsyncGenerator<string>
9+
asSession(): Promise<Session<LLMProviders>>
10+
}
11+
12+
export type Session<T> = Disposable & Omit<T, 'asSession' | 'validate'>
13+
14+
const llms = {
15+
cloudflare,
16+
nano,
17+
worker
18+
}
19+
20+
export type LLMTypes = keyof typeof llms
21+
22+
export async function createLLMProvider(type: LLMTypes, ...args: any[]): Promise<LLMProviders> {
23+
const LLM = llms[type].bind(this, ...args)
24+
return new LLM()
25+
}
26+
27+
export default createLLMProvider

src/llms/remote-worker.ts

+54
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,54 @@
1+
import type { LLMProviders, Session } from "~llms";
2+
3+
4+
// for my worker, so limited usage
5+
export default class RemoteWorker implements LLMProviders {
6+
7+
async validate(): Promise<void> {
8+
const res = await fetch('https://llm.ericlamm.xyz/status')
9+
const json = await res.json()
10+
if (json.status !== 'working') {
11+
throw new Error('Remote worker is not working')
12+
}
13+
}
14+
15+
async prompt(chat: string): Promise<string> {
16+
const res = await fetch('https://llm.ericlamm.xyz/', {
17+
method: 'POST',
18+
headers: {
19+
'Content-Type': 'application/json'
20+
},
21+
body: JSON.stringify({ prompt: chat })
22+
})
23+
const json = await res.json()
24+
return json.response
25+
}
26+
27+
async *promptStream(chat: string): AsyncGenerator<string> {
28+
const res = await fetch('https://llm.ericlamm.xyz/', {
29+
method: 'POST',
30+
headers: {
31+
'Content-Type': 'application/json'
32+
},
33+
body: JSON.stringify({ prompt: chat, stream: true })
34+
})
35+
if (!res.body) throw new Error('Remote worker response body is not readable')
36+
const reader = res.body.getReader()
37+
const decoder = new TextDecoder('utf-8', { ignoreBOM: true })
38+
while (true) {
39+
const { done, value } = await reader.read()
40+
if (done) break
41+
const { response } = JSON.parse(decoder.decode(value, { stream: true }))
42+
yield response
43+
}
44+
}
45+
46+
async asSession(): Promise<Session<LLMProviders>> {
47+
console.warn('Remote worker session is not supported')
48+
return {
49+
...this,
50+
[Symbol.dispose]: () => { }
51+
}
52+
}
53+
54+
}

0 commit comments

Comments
 (0)