Commit 52ea0215 authored by 熊洋洋's avatar 熊洋洋

improve: improve code

parent bd6b7f89
import getConfig from 'next/config';
import { backendApi } from '.';
import { getModel } from '@/utils/gpt';
import { fetchEventSource, EventStreamContentType } from '@microsoft/fetch-event-source';
export interface View {
type: 'schema' | 'table';
name?: string;
}
const model = getModel('gpt35');
export function prettyObject(msg: any) {
const obj = msg;
if (typeof msg !== 'string') {
msg = JSON.stringify(msg, null, ' ');
}
if (msg === '{}') {
return obj.toString();
}
if (msg.startsWith('```json')) {
return msg;
}
return ['```json', msg, '```'].join('\n');
}
type message = {
role: string;
content: string;
};
export default class OpenAI {
static async request(
messages: message[],
onFinish: (responseText: string) => any,
onUpdate: (responseText: string, delta: string) => any,
onError: (e: Error) => any,
stream: boolean = true
) {
const requestPayload = {
messages: messages,
model: model.name,
temperature: model.temperature,
frequency_penalty: model.frequency_penalty,
presence_penalty: model.presence_penalty,
stream,
};
const controller = new AbortController();
const chatPayload = {
method: 'POST',
body: JSON.stringify(requestPayload),
signal: controller.signal,
headers: {
'cache-control': 'no-cache',
'Content-Type': 'application/json',
'x-requested-with': 'XMLHttpRequest',
Accept: 'text/event-stream',
},
};
const requestTimeoutId = setTimeout(() => controller.abort(), 1000 * 120);
const chatPath = '/openai/v1/chat/completions';
if (stream) {
let responseText = '';
let finished = false;
const finish = () => {
if (!finished) {
onFinish(responseText);
finished = true;
}
};
controller.signal.onabort = finish;
fetchEventSource(chatPath, {
...chatPayload,
async onopen(res) {
clearTimeout(requestTimeoutId);
const contentType = res.headers.get('content-type');
console.log('[OpenAI] request response content type: ', contentType);
if (contentType?.startsWith('text/plain')) {
responseText = await res.clone().text();
return finish();
}
if (
!res.ok ||
!res.headers.get('content-type')?.startsWith(EventStreamContentType) ||
res.status !== 200
) {
const responseTexts = [responseText];
let extraInfo = await res.clone().text();
try {
const resJson = await res.clone().json();
extraInfo = prettyObject(resJson);
} catch {}
if (extraInfo) {
responseTexts.push(extraInfo);
}
responseText = responseTexts.join('\n\n');
return finish();
}
},
onmessage(msg) {
if (msg.data === '[DONE]' || finished) {
return finish();
}
const text = msg.data;
try {
const json = JSON.parse(text);
const delta = json.choices[0].delta.content;
if (delta) {
responseText += delta;
onUpdate?.(responseText, delta);
}
} catch (e) {
console.error('[Request] parse error', text, msg);
}
},
onclose() {
finish();
},
onerror(e) {
onError?.(e);
throw e;
},
openWhenHidden: true,
});
} else {
const res = await fetch(chatPath, chatPayload);
clearTimeout(requestTimeoutId);
const resJson = await res.json();
const message = resJson.choices?.at(0)?.message?.content ?? '';
console.log(message);
onFinish(message);
}
}
}
......@@ -25,6 +25,7 @@ import React, {
} from 'react';
import { IconRecord, IconRobot, IconSend, IconSwap, IconVoice } from '@arco-design/web-react/icon';
import { getModel } from '@/utils/gpt';
import OpenAI from '@/client/api/openAI';
// import useSpeechToText from 'react-hook-speech-to-text';
let useSpeechToText: any;
......@@ -100,6 +101,17 @@ export function AIWrapper({
// return clearContext;
}, [currentAssistantMessage, loading]);
const toView = useCallback(
debounce(() => {
scrollContainer &&
scrollContainer?.current?.scrollIntoView({
behavior: 'smooth',
block: 'end',
});
}, 50),
[]
);
const handleButtonClick = useCallback(
(message?: string, callBack?: (m: string) => void) => {
const inputRef = input.current?.dom;
......@@ -107,108 +119,40 @@ export function AIWrapper({
if (!inputValue) {
return;
}
setLoading(true);
// @ts-ignore
inputRef.value = '';
setMessageList([
const initMessageList = [
...messageList,
{
role: 'user',
content: inputValue,
},
]);
const toView = debounce(() => {
scrollContainer &&
scrollContainer?.current?.scrollIntoView({
behavior: 'smooth',
block: 'end',
});
}, 50);
];
setMessageList(initMessageList);
// @ts-ignore
inputRef.value = '';
setLoading(true);
toView();
fetch('/openai/v1/chat/completions', {
headers: {
'cache-control': 'no-cache',
'Content-Type': 'application/json',
Accept: 'text/event-stream',
},
body: JSON.stringify({
messages: [
...messageList,
{
role: 'user',
content: inputValue,
},
],
model: model.name,
temperature: model.temperature,
frequency_penalty: model.frequency_penalty,
presence_penalty: model.presence_penalty,
stream: true,
}),
method: 'POST',
}).then(async response => {
if (!response.ok) {
// throw new Error(response.statusText);
Notification.error({
title: 'No Response',
content: undefined,
});
// setLoading(false);
}
// response.text().then((text) => {
// setCurrentAssistantMessage(text);
// });
const data = response.body;
if (!data) {
Notification.error({
title: 'No data',
content: undefined,
});
// setLoading(false);
}
const reader = data.getReader();
const decoder = new TextDecoder('utf-8');
let done = false;
let currentAssistantMessageStr = currentAssistantMessage;
while (!done) {
const content = await reader.read();
const { done: readerDone, value } = content;
if (value) {
const char = decoder.decode(value);
if (char === '\n' && currentAssistantMessageStr.endsWith('\n')) {
continue;
}
const codeBlocks = char.trim().split(`
`);
for (let i = 0; i < codeBlocks.length - 1; i++) {
currentAssistantMessageStr += get(
JSON.parse(codeBlocks[i].replace(/^data:/g, '').trim()),
'choices[0].delta.content',
''
);
setCurrentAssistantMessage(currentAssistantMessageStr);
}
// if (char) {
// currentAssistantMessageStr += char;
// setCurrentAssistantMessage(currentAssistantMessageStr);
// }
}
done = readerDone;
}
OpenAI.request(
initMessageList,
currentAssistantMessageStr => {
setTinking(true);
setTimeout(() => {
setLoading(false);
setTinking(false);
setTimeout(toView, 100);
}, 2000);
setCurrentAssistantMessage(currentAssistantMessageStr);
callBack && callBack(currentAssistantMessageStr);
doneFx && doneFx(currentAssistantMessageStr);
console.log(currentAssistantMessageStr, 'currentAssistantMessageStr');
setTimeout(toView, 100);
},
setCurrentAssistantMessage,
() => {
Notification.error({
title: 'No Response',
content: undefined,
});
}
);
},
[
loading,
......
......@@ -17,6 +17,9 @@ dependencies:
'@digitalocean/do-markdownit':
specifier: ^1.9.0
version: 1.9.0
'@microsoft/fetch-event-source':
specifier: ^2.0.1
version: 2.0.1
'@monaco-editor/react':
specifier: ^4.4.5
version: 4.5.1(monaco-editor@0.39.0)(react-dom@18.2.0)(react@18.2.0)
......@@ -1908,6 +1911,10 @@ packages:
'@jridgewell/sourcemap-codec': 1.4.14
dev: false
/@microsoft/fetch-event-source@2.0.1:
resolution: {integrity: sha512-W6CLUJ2eBMw3Rec70qrsEW0jOm/3twwJv21mrmj2yORiaVmVYGS4sSS5yUwvQc1ZlDLYGPnClVWmUUMagKNsfA==}
dev: false
/@monaco-editor/loader@1.3.3(monaco-editor@0.39.0):
resolution: {integrity: sha512-6KKF4CTzcJiS8BJwtxtfyYt9shBiEv32ateQ9T4UVogwn4HM/uPo9iJd2Dmbkpz8CM6Y0PDUpjnZzCwC+eYo2Q==}
peerDependencies:
......
const gpt35 = {
name: 'gpt-3.5-turbo-16k-0613',
name: 'gpt-3.5-turbo',
temperature: 0,
frequency_penalty: 0.0,
presence_penalty: 0.0,
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment