Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix/openai error #34

Merged
merged 2 commits into from
Apr 29, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions src/app/api/chat/openai/route.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ import OpenAI, { ClientOptions } from 'openai';

import { OPENAI_API_KEY, OPENAI_END_POINT } from '@/constants/openai';
import { ErrorTypeEnum } from '@/types/api';
import { ChatStreamPayload } from '@/types/openai/chat';

import { createErrorResponse } from './createErrorResponse';

export const POST = async (req: Request) => {
const payload = await req.json();
const data = (await req.json()) as ChatStreamPayload;
const apiKey = (req.headers.get(OPENAI_API_KEY) as string) || process.env.OPENAI_API_KEY;
const baseURL = (req.headers.get(OPENAI_END_POINT) as string) || process.env.OPENAI_PROXY_URL;

Expand All @@ -21,14 +22,14 @@ export const POST = async (req: Request) => {

const openai = new OpenAI(config);

const { model, messages } = payload;
const { model, messages } = data;

try {
const completion = await openai.chat.completions.create({
messages,
model,
stream: true,
});
} as OpenAI.ChatCompletionCreateParamsStreaming);

const stream = OpenAIStream(completion);

Expand Down
15 changes: 15 additions & 0 deletions src/features/ChatItem/Actions/System.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
import { ActionIconGroup, useChatListActionsBar } from '@lobehub/ui';
import { memo } from 'react';

import type { RenderAction } from '@/features/ChatItem/type';

const SystemActionsBar: RenderAction = ({ onActionClick }) => {
const { del } = useChatListActionsBar({
delete: '删除',
});
return (
<ActionIconGroup dropdownMenu={[del]} items={[]} onActionClick={onActionClick} type="ghost" />
);
};

export default memo(SystemActionsBar);
2 changes: 2 additions & 0 deletions src/features/ChatItem/Actions/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,13 @@ import { useSessionStore } from '@/store/session';
import { LLMRoleType } from '@/types/llm';

import AssistantActionsBar from './Assistant';
import SystemActionBar from './System';
import UserActionsBar from './User';

export const renderActions: Record<LLMRoleType, RenderAction> = {
assistant: AssistantActionsBar,
user: UserActionsBar,
system: SystemActionBar,
};

export const useActionsClick = (): OnActionsClick => {
Expand Down
3 changes: 2 additions & 1 deletion src/features/Settings/model/openai.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import React, { useEffect } from 'react';
import { OPENAI_MODEL_LIST } from '@/constants/openai';
import { chatCompletion } from '@/services/chat';
import { configSelectors, useConfigStore } from '@/store/config';
import { ChatMessage } from '@/types/chat';

interface ConfigProps {
className?: string;
Expand Down Expand Up @@ -83,7 +84,7 @@ const Config = (props: ConfigProps) => {
{
content: 'Hi',
role: 'user',
},
} as ChatMessage,
],
model: 'gpt-3.5-turbo',
})
Expand Down
2 changes: 1 addition & 1 deletion src/layout/StoreHydration.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ const StoreHydration = () => {
}, []);

useEffect(() => {
router.prefetch('/home');
router.prefetch('/chat');
}, [router]);
return null;
};
Expand Down
15 changes: 12 additions & 3 deletions src/services/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ import { speakCharacter } from '@/features/messages/speakCharacter';
import { configSelectors, useConfigStore } from '@/store/config';
import { sessionSelectors, useSessionStore } from '@/store/session';
import { useViewerStore } from '@/store/viewer';
import { ChatMessage } from '@/types/chat';
import { ChatStreamPayload } from '@/types/openai/chat';

const createHeader = (header?: any) => {
const config = configSelectors.currentOpenAIConfig(useConfigStore.getState());
Expand All @@ -14,18 +16,25 @@ const createHeader = (header?: any) => {
};
};

export const chatCompletion = async (payload: any) => {
interface ChatCompletionPayload extends Partial<Omit<ChatStreamPayload, 'messages'>> {
messages: ChatMessage[];
}

export const chatCompletion = async (payload: ChatCompletionPayload) => {
const config = configSelectors.currentOpenAIConfig(useConfigStore.getState());
const { messages } = payload;

const postMessages = messages.map((m) => ({ content: m.content, role: m.role }));

const res = await fetch('/api/chat/openai', {
return await fetch('/api/chat/openai', {
body: JSON.stringify({
model: config?.model,
...payload,
messages: postMessages,
}),
headers: createHeader(),
method: 'POST',
});
return res;
};

export const handleSpeakAi = async (message: string) => {
Expand Down
2 changes: 1 addition & 1 deletion src/store/session/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ export const createSessonStore: StateCreator<SessionStore, [['zustand/devtools',
{
content: currentAgent.systemRole,
role: 'system',
},
} as ChatMessage,
...messages,
],
});
Expand Down
2 changes: 1 addition & 1 deletion src/types/llm.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
export type LLMRoleType = 'user' | 'assistant';
export type LLMRoleType = 'user' | 'assistant' | 'system';

export interface LLMMessage {
content: string;
Expand Down
80 changes: 80 additions & 0 deletions src/types/openai/chat.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import { LLMRoleType } from '@/types/llm';
import { OpenAIFunctionCall } from '@/types/openai/functionCall';

interface UserMessageContentPartText {
text: string;
type: 'text';
}

interface UserMessageContentPartImage {
image_url: {
detail?: 'auto' | 'low' | 'high';
url: string;
};
type: 'image_url';
}

export type UserMessageContentPart = UserMessageContentPartText | UserMessageContentPartImage;

export interface OpenAIChatMessage {
/**
* @title 内容
* @description 消息内容
*/
content: string | UserMessageContentPart[];
function_call?: OpenAIFunctionCall;

name?: string;
/**
* 角色
* @description 消息发送者的角色
*/
role: LLMRoleType;
}

/**
* @title Chat Stream Payload
*/
export interface ChatStreamPayload {
/**
* @title 控制生成文本中的惩罚系数,用于减少重复性
* @default 0
*/
frequency_penalty?: number;
/**
* @title 生成文本的最大长度
*/
max_tokens?: number;
/**
* @title 聊天信息列表
*/
messages: OpenAIChatMessage[];
/**
* @title 模型名称
*/
model: string;
/**
* @title 返回的文本数量
*/
n?: number;
/**
* 开启的插件列表
*/
plugins?: string[];
/**
* @title 控制生成文本中的惩罚系数,用于减少主题的变化
* @default 0
*/
presence_penalty?: number;

/**
* @title 是否开启流式请求
* @default true
*/
stream?: boolean;
/**
* @title 生成文本的随机度量,用于控制文本的创造性和多样性
* @default 0.5
*/
temperature: number;
}
10 changes: 10 additions & 0 deletions src/types/openai/functionCall.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export interface OpenAIFunctionCall {
arguments: string;
name: string;
}

export interface OpenAIToolCall {
function: OpenAIFunctionCall;
id: string;
type: 'function';
}
Loading