Skip to content

Commit 48d015a

Browse files
committed
convert ai response with markdown convert
1 parent 86efdab commit 48d015a

13 files changed

+120
-64
lines changed

package.json

+1
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@
3636
"dexie-react-hooks": "^1.1.7",
3737
"hash-wasm": "^4.11.0",
3838
"hls.js": "^1.5.8",
39+
"markdown-to-jsx": "^7.5.0",
3940
"media-chrome": "^2.2.5",
4041
"mpegts.js": "^1.7.3",
4142
"n-danmaku": "^2.2.1",

pnpm-lock.yaml

+15-20
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/components/ChatBubble.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ function ChatBubble(props: ChatBubbleProps): JSX.Element {
2727
{messages.map((message, index) => (
2828
<div key={index} className="max-w-full grid">
2929
<div data-testid={`${name}-bubble-chat-${index}`} className="px-3.5 py-2 bg-gray-100 rounded justify-start items-center gap-3 inline-flex">
30-
<h5 className={`text-gray-900 text-sm font-normal leading-snug ${loading ? 'animate-pulse' : ''}`}>{message.text}</h5>
30+
<div className={`text-gray-900 text-sm font-normal leading-snug list-inside ${loading ? 'animate-pulse' : ''}`}>{message.text}</div>
3131
</div>
3232
{message.time && (
3333
<div data-testid={`${name}-bubble-time-${index}`} className="justify-end items-center inline-flex mb-2.5">

src/hooks/input.ts src/hooks/form.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
import { useCallback, useRef } from "react";
22

3-
export function useFileInput(onFileChange: (files: FileList) => Promise<void>, onError?: (e: Error | any) => void, deps: any[] = []){
3+
export function useFileInput(onFileChange: (files: FileList) => Promise<void>, onError?: (e: Error | any) => void, deps: any[] = []) {
44

55
const inputRef = useRef<HTMLInputElement>()
6-
const selectFiles = useCallback(function(): Promise<void> {
6+
const selectFiles = useCallback(function (): Promise<void> {
77
return new Promise((resolve, reject) => {
88
const listener = async (e: Event) => {
99
try {

src/llms/cloudflare-ai.ts

+2
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,8 @@ export default class CloudFlareAI implements LLMProviders {
1919
this.model = settings.model || CloudFlareAI.DEFAULT_MODEL
2020
}
2121

22+
cumulative: boolean = true
23+
2224
async validate(): Promise<void> {
2325
const success = await validateAIToken(this.accountId, this.apiToken, this.model)
2426
if (typeof success === 'boolean' && !success) throw new Error('Cloudflare API 验证失败')

src/llms/gemini-nano.ts

+26-11
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@ import type { LLMProviders, Session } from "~llms"
22

33
export default class GeminiNano implements LLMProviders {
44

5+
cumulative: boolean = false
6+
57
async validate(): Promise<void> {
68
if (!window.ai) throw new Error('你的浏览器没有启用 AI 功能')
79
if (!window.ai.languageModel &&
@@ -13,43 +15,52 @@ export default class GeminiNano implements LLMProviders {
1315
async prompt(chat: string): Promise<string> {
1416
const session = await this.asSession()
1517
try {
18+
console.debug('[gemini nano] prompting: ', chat)
1619
return session.prompt(chat)
1720
} finally {
21+
console.debug('[gemini nano] done')
1822
session[Symbol.dispose]()
1923
}
2024
}
2125

2226
async *promptStream(chat: string): AsyncGenerator<string> {
2327
const session = await this.asSession()
2428
try {
25-
return session.promptStream(chat)
29+
console.debug('[gemini nano] prompting stream: ', chat)
30+
const res = session.promptStream(chat)
31+
for await (const chunk of res) {
32+
yield chunk
33+
}
2634
} finally {
35+
console.debug('[gemini nano] done')
2736
session[Symbol.dispose]()
2837
}
2938
}
3039

3140
async asSession(): Promise<Session<LLMProviders>> {
3241

33-
if (window.ai.assistant || window.ai.languageModel) {
34-
const assistant = window.ai.assistant ?? window.ai.languageModel
35-
const capabilities = await assistant.capabilities()
36-
if (capabilities.available === 'readily') {
37-
return new GeminiAssistant(await assistant.create())
38-
} else {
39-
console.warn('AI Assistant 当前不可用: ', capabilities)
40-
}
41-
}
42-
4342
if (window.ai.summarizer) {
4443
const summarizer = window.ai.summarizer
4544
const capabilities = await summarizer.capabilities()
4645
if (capabilities.available === 'readily') {
46+
console.debug('using gemini summarizer')
4747
return new GeminiSummarizer(await summarizer.create())
4848
} else {
4949
console.warn('AI Summarizer 当前不可用: ', capabilities)
5050
}
5151
}
5252

53+
if (window.ai.assistant || window.ai.languageModel) {
54+
const assistant = window.ai.assistant ?? window.ai.languageModel
55+
const capabilities = await assistant.capabilities()
56+
if (capabilities.available === 'readily') {
57+
console.debug('using gemini assistant')
58+
return new GeminiAssistant(await assistant.create())
59+
} else {
60+
console.warn('AI Assistant 当前不可用: ', capabilities)
61+
}
62+
}
63+
5364
throw new Error('你的浏览器 AI 功能当前不可用')
5465
}
5566
}
@@ -59,10 +70,12 @@ class GeminiAssistant implements Session<LLMProviders> {
5970
constructor(private readonly assistant: AIAssistant) { }
6071

6172
prompt(chat: string): Promise<string> {
73+
console.debug('[assistant] prompting: ', chat)
6274
return this.assistant.prompt(chat)
6375
}
6476

6577
async *promptStream(chat: string): AsyncGenerator<string> {
78+
console.debug('[assistant] prompting stream: ', chat)
6679
const stream = this.assistant.promptStreaming(chat)
6780
for await (const chunk of stream) {
6881
yield chunk
@@ -80,10 +93,12 @@ class GeminiSummarizer implements Session<LLMProviders> {
8093
constructor(private readonly summarizer: AISummarizer) { }
8194

8295
prompt(chat: string): Promise<string> {
96+
console.debug('[summarizer] summarizing: ', chat)
8397
return this.summarizer.summarize(chat)
8498
}
8599

86100
async *promptStream(chat: string): AsyncGenerator<string> {
101+
console.debug('[summarizer] summarizing stream: ', chat)
87102
const stream = this.summarizer.summarizeStreaming(chat)
88103
for await (const chunk of stream) {
89104
yield chunk

src/llms/index.ts

+2-1
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,14 @@ import nano from './gemini-nano'
55
import worker from './remote-worker'
66

77
export interface LLMProviders {
8+
cumulative: boolean
89
validate(): Promise<void>
910
prompt(chat: string): Promise<string>
1011
promptStream(chat: string): AsyncGenerator<string>
1112
asSession(): Promise<Session<LLMProviders>>
1213
}
1314

14-
export type Session<T> = Disposable & Omit<T, 'asSession' | 'validate'>
15+
export type Session<T> = Disposable & Omit<T, 'asSession' | 'validate' | 'cumulative'>
1516

1617
const llms = {
1718
cloudflare,

src/llms/remote-worker.ts

+3-1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,8 @@ export default class RemoteWorker implements LLMProviders {
1111
this.model = settings.model || undefined
1212
}
1313

14+
cumulative: boolean = true
15+
1416
async validate(): Promise<void> {
1517
const res = await fetch('https://llm.ericlamm.xyz/status')
1618
const json = await res.json()
@@ -55,5 +57,5 @@ export default class RemoteWorker implements LLMProviders {
5557
[Symbol.dispose]: () => { }
5658
}
5759
}
58-
60+
5961
}

src/options/fragments/llm.tsx

+33-18
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
import { Button, Input, Tooltip, Typography } from "@material-tailwind/react"
2-
import { Fragment, useState, type ChangeEvent } from "react"
2+
import { Fragment, useState, type ChangeEvent, type ReactNode } from "react"
33
import { toast } from "sonner/dist"
44
import type { StateProxy } from "~hooks/binding"
55
import type { LLMTypes } from "~llms"
@@ -29,6 +29,29 @@ export const description = [
2929
'技术提供默认为公共的服务器,质量可能不稳定,建议设置为 Cloudflare 作为技术提供来源。'
3030
]
3131

32+
33+
function Hints({ children }: { children: ReactNode }): JSX.Element {
34+
return (
35+
<Typography
36+
className="flex items-center gap-1 font-normal dark:text-gray-200 col-span-2"
37+
>
38+
<svg
39+
xmlns="http://www.w3.org/2000/svg"
40+
viewBox="0 0 24 24"
41+
fill="currentColor"
42+
className="-mt-px h-6 w-6"
43+
>
44+
<path
45+
fillRule="evenodd"
46+
d="M2.25 12c0-5.385 4.365-9.75 9.75-9.75s9.75 4.365 9.75 9.75-4.365 9.75-9.75 9.75S2.25 17.385 2.25 12zm8.706-1.442c1.146-.573 2.437.463 2.126 1.706l-.709 2.836.042-.02a.75.75 0 01.67 1.34l-.04.022c-1.147.573-2.438-.463-2.127-1.706l.71-2.836-.042.02a.75.75 0 11-.671-1.34l.041-.022zM12 9a.75.75 0 100-1.5.75.75 0 000 1.5z"
47+
clipRule="evenodd"
48+
/>
49+
</svg>
50+
{children}
51+
</Typography>
52+
)
53+
}
54+
3255
function LLMSettings({ state, useHandler }: StateProxy<SettingSchema>): JSX.Element {
3356

3457
const [validating, setValidating] = useState(false)
@@ -63,24 +86,10 @@ function LLMSettings({ state, useHandler }: StateProxy<SettingSchema>): JSX.Elem
6386
/>
6487
{state.provider === 'cloudflare' && (
6588
<Fragment>
66-
<Typography
67-
className="flex items-center gap-1 font-normal dark:text-gray-200 col-span-2"
68-
>
69-
<svg
70-
xmlns="http://www.w3.org/2000/svg"
71-
viewBox="0 0 24 24"
72-
fill="currentColor"
73-
className="-mt-px h-6 w-6"
74-
>
75-
<path
76-
fillRule="evenodd"
77-
d="M2.25 12c0-5.385 4.365-9.75 9.75-9.75s9.75 4.365 9.75 9.75-4.365 9.75-9.75 9.75S2.25 17.385 2.25 12zm8.706-1.442c1.146-.573 2.437.463 2.126 1.706l-.709 2.836.042-.02a.75.75 0 01.67 1.34l-.04.022c-1.147.573-2.438-.463-2.127-1.706l.71-2.836-.042.02a.75.75 0 11-.671-1.34l.041-.022zM12 9a.75.75 0 100-1.5.75.75 0 000 1.5z"
78-
clipRule="evenodd"
79-
/>
80-
</svg>
89+
<Hints>
8190
<Typography className="underline" as="a" href="https://linux.do/t/topic/34037" target="_blank">点击此处</Typography>
8291
查看如何获得 Cloudflare API Token 和 Account ID
83-
</Typography>
92+
</Hints>
8493
<Input
8594
data-testid="cf-account-id"
8695
crossOrigin="anonymous"
@@ -113,9 +122,15 @@ function LLMSettings({ state, useHandler }: StateProxy<SettingSchema>): JSX.Elem
113122
{ label: '@cf/qwen/qwen1.5-1.8b-chat', value: '@cf/qwen/qwen1.5-1.8b-chat' },
114123
{ label: '@hf/google/gemma-7b-it', value: '@hf/google/gemma-7b-it' },
115124
{ label: '@hf/nousresearch/hermes-2-pro-mistral-7b', value: '@hf/nousresearch/hermes-2-pro-mistral-7b' }
116-
]}
125+
]}
117126
/>
118127
)}
128+
{state.provider === 'nano' && (
129+
<Hints>
130+
<Typography className="underline" as="a" href="https://juejin.cn/post/7401036139384143910" target="_blank">点击此处</Typography>
131+
查看如何启用 Chrome 浏览器内置 AI
132+
</Hints>
133+
)}
119134
<div className="col-span-2">
120135
<Button disabled={validating} onClick={onValidate} color="blue" size="lg" className="group flex items-center justify-center gap-3 text-[1rem] hover:shadow-lg">
121136
验证是否可用

src/options/index.tsx

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ import { toast } from 'sonner/dist';
1717
import PromiseHandler from '~components/PromiseHandler';
1818
import Tutorial, { type TutorialRefProps, type TutorialStep } from '~components/Tutorial';
1919
import GenericContext from '~contexts/GenericContext';
20-
import { useFileInput } from '~hooks/input';
20+
import { useFileInput } from '~hooks/form';
2121
import { useStorageWatch } from '~hooks/storage';
2222
import { getMV2Settings, removeAllMV2Settings } from '~migrations';
2323
import injectToaster from '~toaster';

src/style.css

+7-3
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,13 @@
44
@tailwind components;
55
@tailwind utilities;
66

7-
8-
* {
9-
font-family: Microsoft JhengHei;
7+
@layer base {
8+
ul, ol, menu {
9+
list-style: revert;
10+
}
11+
* {
12+
font-family: Microsoft JhengHei;
13+
}
1014
}
1115

1216

0 commit comments

Comments
 (0)