Skip to content

Commit

Permalink
feat: support gpt3.5-turbo-16k and gpt4
Browse files Browse the repository at this point in the history
  • Loading branch information
zhangferry committed Jun 15, 2023
1 parent cb2d221 commit 32a706c
Show file tree
Hide file tree
Showing 9 changed files with 117 additions and 87 deletions.
Binary file modified packages/SummarAI-chromium.zip
Binary file not shown.
36 changes: 25 additions & 11 deletions packages/chromium/options.js
Original file line number Diff line number Diff line change
Expand Up @@ -23938,6 +23938,28 @@ https://www.viki.com
function getExtensionVersion() {
return import_webextension_polyfill2.default.runtime.getManifest().version;
}
var availableModels = [
{
name: "gpt-3.5-turbo",
maxTokens: 4096
},
{
name: "gpt-3.5-turbo-16k",
maxTokens: 16384
},
{
name: "gpt-4",
maxTokens: 8192
},
{
name: "gpt-4-32k",
maxTokens: 32768
},
{
name: "text-davinci-003",
maxTokens: 4097
}
];

// node_modules/preact/jsx-runtime/dist/jsxRuntime.module.js
init_preact_module();
Expand Down Expand Up @@ -24065,15 +24087,7 @@ https://www.viki.com
const [config] = await Promise.all([getProviderConfigs()]);
return { config };
});
const models = [
"gpt-3.5-turbo",
"gpt-3.5-turbo-0301",
"text-davinci-003"
// 'text-curie-001',
// 'text-babbage-001',
// 'text-ada-001',
// 'text-chat-davinci-002-20221122',
];
const models = availableModels.map((model) => model.name);
if (query.isLoading) {
return /* @__PURE__ */ o3(spinner_default2, {});
}
Expand All @@ -24087,13 +24101,13 @@ https://www.viki.com
// src/options/components/Header.tsx
function Header() {
return /* @__PURE__ */ o3(k, { children: /* @__PURE__ */ o3("nav", { className: "glarity--flex glarity--flex-row glarity--justify-between glarity--items-center glarity--mt-5 glarity--px-2", children: [
/* @__PURE__ */ o3("div", { className: "glarity--flex glarity--flex-row glarity--items-center glarity--gap-2", children: /* @__PURE__ */ o3("a", { href: "https://glarity.app/", target: "_blank", rel: "noreferrer", children: [
/* @__PURE__ */ o3("div", { className: "glarity--flex glarity--flex-row glarity--items-center glarity--gap-2", children: /* @__PURE__ */ o3("a", { href: "https://github.com/zhangferry/SummarAI", target: "_blank", rel: "noreferrer", children: [
/* @__PURE__ */ o3(
"img",
{
src: logo_default,
className: "glarity--w-10 glarity--h-10 glarity--rounded-lg",
style: { "vertical-align": "middle" }
style: { verticalAlign: "middle" }
}
),
/* @__PURE__ */ o3("span", { className: "font-semibold", children: [
Expand Down
67 changes: 46 additions & 21 deletions packages/chromium/popup.js
Original file line number Diff line number Diff line change
Expand Up @@ -28226,13 +28226,46 @@

// src/config/index.ts
var import_webextension_polyfill = __toESM(require_browser_polyfill());
async function getProviderConfigs() {
const { provider = "chatgpt" /* ChatGPT */ } = await import_webextension_polyfill.default.storage.local.get("provider");
const configKey = `provider:${"gpt3" /* GPT3 */}`;
const result = await import_webextension_polyfill.default.storage.local.get(configKey);
return {
provider,
configs: {
["gpt3" /* GPT3 */]: result[configKey]
}
};
}
var BASE_URL = "https://chat.openai.com";

// src/utils/utils.ts
var import_webextension_polyfill2 = __toESM(require_browser_polyfill());
var isSafari = /^((?!chrome|android).)*safari/i.test(navigator.userAgent);
var isFirefox = navigator.userAgent.indexOf("Firefox") != -1;
var isIOS = /iPad|iPhone|iPod/.test(navigator.userAgent);
var availableModels = [
{
name: "gpt-3.5-turbo",
maxTokens: 4096
},
{
name: "gpt-3.5-turbo-16k",
maxTokens: 16384
},
{
name: "gpt-4",
maxTokens: 8192
},
{
name: "gpt-4-32k",
maxTokens: 32768
},
{
name: "text-davinci-003",
maxTokens: 4097
}
];
async function* streamAsyncIterable(stream) {
const reader = stream.getReader();
try {
Expand Down Expand Up @@ -28542,20 +28575,19 @@ ${replylanguagePrompt("zh-CN")}
// src/popup/popup.ts
var import_parser = __toESM(require_mercury());
document.addEventListener("DOMContentLoaded", () => {
const tokenLimit = 4096;
const defaultTokenLimit = 4096;
async function fetchData(response, promptType) {
const loadingElement = document.getElementById("loading");
loadingElement.style.display = "block";
const result = await import_parser.default.parse(response.url, { contentType: "text" });
console.log(`extract content: ${result.content}`);
const question = truncateText(result.content, tokenLimit);
try {
const promptTemplate = promptType === 0 /* Summary */ ? summerDefaultPrompt : zettelkastenPrompt;
const combinedPrompt = articlePrompt({
content: question,
content: result.content,
prompt: promptTemplate
});
await getContentBasedOnType(combinedPrompt, displayAnswer);
await requestSummary(combinedPrompt, displayAnswer);
} catch (error) {
displayError(error.message);
} finally {
Expand Down Expand Up @@ -28587,32 +28619,26 @@ ${replylanguagePrompt("zh-CN")}
}
return truncatedText;
}
async function getContentBasedOnType(prompt, callback) {
async function requestSummary(content, callback) {
const controller = new AbortController();
let allValue = await import_webextension_polyfill4.default.storage.local.get(null);
console.log(`allvalue: ${JSON.stringify(allValue)}`);
const providerKey = "provider";
let providerValue = await import_webextension_polyfill4.default.storage.local.get(providerKey);
providerValue = providerValue[providerKey];
const configKey = `${providerKey}:` + providerValue;
let providerConfig = await import_webextension_polyfill4.default.storage.local.get(configKey);
providerConfig = providerConfig[configKey];
console.log(JSON.stringify(providerConfig));
const providerConfigs = await getProviderConfigs();
console.log(`providerConfigs: ${JSON.stringify(providerConfigs)}`);
let prompt;
let provider;
if (`${providerValue}` == "gpt3") {
const apiKey = providerConfig["apiKey"];
if (providerConfigs.provider == "gpt3" /* GPT3 */) {
const { apiKey, model } = providerConfigs.configs["gpt3" /* GPT3 */];
if (!apiKey) {
throw new Error(`You should config API Key first`);
}
var model = "gpt-3.5-turbo";
if (providerConfig["model"]) {
model = providerConfig["model"];
}
const currentModel = availableModels.find((theModel) => theModel.name === model);
prompt = truncateText(content, currentModel.maxTokens);
provider = new OpenAIProvider(apiKey, model);
} else {
prompt = truncateText(content, defaultTokenLimit);
const token = await getChatGPTAccessToken();
provider = new ChatGPTProvider(token);
}
console.log(`prompt content: ${prompt}`);
const { cleanup } = await provider.generateAnswer({
prompt,
signal: controller.signal,
Expand Down Expand Up @@ -28648,7 +28674,6 @@ ${replylanguagePrompt("zh-CN")}
await import_webextension_polyfill4.default.scripting.executeScript({ target: { tabId: tabs[0].id }, files: ["content.js"] });
const results = await import_webextension_polyfill4.default.tabs.sendMessage(tabs[0].id, { action: "getTextContent" });
const response = results && results.textContent ? results.textContent : "";
console.log(JSON.stringify(response));
await fetchData(response, type);
}
function setupEventListeners() {
Expand Down
12 changes: 2 additions & 10 deletions src/options/ProviderSelect.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import useSWR from 'swr'
import { getProviderConfigs, ProviderConfigs, ProviderType, saveProviderConfigs } from '@/config'
import { Select as Aselect } from 'antd'
const { Option } = Aselect
import { isSafari } from '@/utils/utils'
import { isSafari, availableModels } from '@/utils/utils'

interface ConfigProps {
config: ProviderConfigs
Expand Down Expand Up @@ -137,15 +137,7 @@ function ProviderSelect() {
return { config }
})

const models = [
'gpt-3.5-turbo',
'gpt-3.5-turbo-0301',
'text-davinci-003',
// 'text-curie-001',
// 'text-babbage-001',
// 'text-ada-001',
// 'text-chat-davinci-002-20221122',
]
const models = availableModels.map(model => model.name)

if (query.isLoading) {
return <Spinner />
Expand Down
5 changes: 0 additions & 5 deletions src/options/components/CustomizePrompt.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,6 @@ import {
customizePrompt,
customizePromptSearch,
customizePrompt1,
customizePromptClickbait,
customizePromptPage,
customizePromptComment,
customizePromptCommentAmazon,
customizePromptCommentYoutube,
} from '@/utils/prompt'

interface Props {
Expand Down
11 changes: 1 addition & 10 deletions src/options/components/EnableGlarity.tsx
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
import React from 'react'
import { useCallback, useEffect, useState } from 'preact/hooks'
import { SearchEngine } from '@/content-script/search-engine-configs'
import { Text, Card, Button, Spacer, useToasts, Checkbox } from '@geist-ui/core'
import { updateUserConfig } from '@/config'
import { changeToast, isIOS } from '@/utils/utils'
Expand All @@ -9,11 +8,10 @@ interface Props {
enableSites: string[]
setEnableSites: (site: string[]) => void
allSites: string[]
supportSites: Record<string, SearchEngine>
}

function EnableGlarity(props: Props) {
const { enableSites, setEnableSites, allSites, supportSites } = props
const { enableSites, setEnableSites, allSites } = props
const { setToast } = useToasts()
const [allSelect, setAllSelect] = useState(true)

Expand Down Expand Up @@ -60,13 +58,6 @@ function EnableGlarity(props: Props) {
onChange={onChangeSites}
className="glarity--support__sites"
>
{Object.entries(supportSites).map(([k, v]) => {
return (
<Checkbox key={k} value={v.siteValue} className="glarity--support__sites--item">
{v.siteName}
</Checkbox>
)
})}
</Checkbox.Group>
</Card.Content>
<Card.Footer>
Expand Down
4 changes: 2 additions & 2 deletions src/options/components/Header.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ function Header() {
<>
<nav className="glarity--flex glarity--flex-row glarity--justify-between glarity--items-center glarity--mt-5 glarity--px-2">
<div className="glarity--flex glarity--flex-row glarity--items-center glarity--gap-2">
<a href="https://glarity.app/" target="_blank" rel="noreferrer">
<a href="https://github.com/zhangferry/SummarAI" target="_blank" rel="noreferrer">
<img
src={logo}
className="glarity--w-10 glarity--h-10 glarity--rounded-lg"
style={{ 'vertical-align': 'middle' }}
style={{ verticalAlign: 'middle' }}
/>
<span className="font-semibold">
{AppName} (v
Expand Down
45 changes: 17 additions & 28 deletions src/popup/popup.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,14 +4,16 @@ import { Answer, Provider } from './types'
import { OpenAIProvider } from './OpenAIProvider'
import { articlePrompt, summerDefaultPrompt, zettelkastenPrompt } from './prompt'
import Parser from "@postlight/parser"
import { availableModels } from '@/utils/utils'
import { ProviderType, getProviderConfigs } from '@/config'

enum PromptType {
Summary,
Zettelkasten
}

document.addEventListener("DOMContentLoaded", () => {
const tokenLimit = 4096 // for gpt-3.5-turbo
const defaultTokenLimit = 4096 // for gpt-3.5-turbo

async function fetchData(response, promptType: PromptType) {

Expand All @@ -20,17 +22,14 @@ document.addEventListener("DOMContentLoaded", () => {

const result = await Parser.parse(response.url, { contentType: "text" });
console.log(`extract content: ${result.content}`)
const question = truncateText(result.content, tokenLimit)

try {

const promptTemplate = promptType === PromptType.Summary ? summerDefaultPrompt : zettelkastenPrompt

const combinedPrompt = articlePrompt({
content: question,
content: result.content,
prompt: promptTemplate})

await getContentBasedOnType(combinedPrompt, displayAnswer)
await requestSummary(combinedPrompt, displayAnswer)
} catch (error) {
displayError(error.message)
} finally {
Expand Down Expand Up @@ -74,40 +73,31 @@ document.addEventListener("DOMContentLoaded", () => {
truncatedText += char
tokenCount += charTokenCount
}

return truncatedText
}

async function getContentBasedOnType(prompt: string, callback) {

async function requestSummary(content: string, callback) {
const controller = new AbortController()

let allValue = await Browser.storage.local.get(null)
console.log(`allvalue: ${JSON.stringify(allValue)}`)

const providerKey = "provider"
let providerValue = await Browser.storage.local.get(providerKey)
providerValue = providerValue[providerKey]
const configKey = `${providerKey}:` + providerValue
let providerConfig = await Browser.storage.local.get(configKey)
providerConfig = providerConfig[configKey]
console.log(JSON.stringify(providerConfig))

const providerConfigs = await getProviderConfigs()
console.log(`providerConfigs: ${JSON.stringify(providerConfigs)}`)

let prompt: string
let provider: Provider
if (`${providerValue}` == "gpt3") {
const apiKey = providerConfig["apiKey"]
if (providerConfigs.provider == ProviderType.GPT3) {
const { apiKey, model } = providerConfigs.configs[ProviderType.GPT3]
if (!apiKey) {
throw new Error(`You should config API Key first`)
}
var model = "gpt-3.5-turbo" // default model
if (providerConfig["model"]) {
model = providerConfig["model"]
}
const currentModel = availableModels.find(theModel => theModel.name === model);
prompt = truncateText(content, currentModel.maxTokens)
provider = new OpenAIProvider(apiKey, model)
} else {
prompt = truncateText(content, defaultTokenLimit)
const token = await getChatGPTAccessToken()
provider = new ChatGPTProvider(token)
}
console.log(`prompt content: ${prompt}`)
const { cleanup } = await provider.generateAnswer({
prompt: prompt,
signal: controller.signal,
Expand All @@ -118,7 +108,6 @@ document.addEventListener("DOMContentLoaded", () => {
callback(event.data)
}
})

cleanup?.()
}

Expand Down Expand Up @@ -149,7 +138,7 @@ document.addEventListener("DOMContentLoaded", () => {
await Browser.scripting.executeScript({target: {tabId: tabs[0].id}, files: ['content.js']})
const results = await Browser.tabs.sendMessage(tabs[0].id, {action: "getTextContent"})
const response = results && results.textContent ? results.textContent : ""
console.log(JSON.stringify(response))
// console.log(JSON.stringify(response))
await fetchData(response, type)
}

Expand Down
Loading

0 comments on commit 32a706c

Please sign in to comment.