From 7a26dc381eb8cd3fd7ef14b72e7cece845d7c411 Mon Sep 17 00:00:00 2001 From: Vinlic Date: Tue, 19 Mar 2024 01:45:04 +0800 Subject: [PATCH] =?UTF-8?q?=E6=94=AF=E6=8C=81=E5=AE=89=E5=85=A8=E6=8B=A6?= =?UTF-8?q?=E6=88=AA=E6=8F=90=E7=A4=BA=E3=80=81=E4=BF=AE=E5=A4=8D=E5=A4=9A?= =?UTF-8?q?=E8=BD=AE=E4=B8=8B=E7=9A=84=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/api/controllers/chat.ts | 43 ++++++++++++++++++++++++++----------- 1 file changed, 31 insertions(+), 12 deletions(-) diff --git a/src/api/controllers/chat.ts b/src/api/controllers/chat.ts index 8529ba5..7bc352a 100644 --- a/src/api/controllers/chat.ts +++ b/src/api/controllers/chat.ts @@ -181,6 +181,9 @@ async function createCompletion(messages: any[], refreshToken: string, assistant responseType: 'stream' }); + if(result.headers['content-type'].indexOf('text/event-stream') == -1) + throw new APIException(EX.API_REQUEST_FAILED, `Stream response Content-Type invalid: ${result.headers['content-type']}`); + const streamStartTime = util.timestamp(); // 接收流为输出文本 const answer = await receiveStream(result.data); @@ -247,6 +250,24 @@ async function createCompletionStream(messages: any[], refreshToken: string, ass responseType: 'stream' }); + if(result.headers['content-type'].indexOf('text/event-stream') == -1) { + logger.error(`Invalid response Content-Type:`, result.headers['content-type']); + const transStream = new PassThrough(); + transStream.end(`data: ${JSON.stringify({ + id: '', + model: MODEL_NAME, + object: 'chat.completion.chunk', + choices: [ + { + index: 0, delta: { role: 'assistant', content: '服务暂时不可用,第三方响应错误' }, finish_reason: 'stop' + } + ], + usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 2 }, + created: util.unixTimestamp() + })}\n\n`); + return transStream; + } + const streamStartTime = util.timestamp(); // 创建转换流将消息格式转换为gpt兼容格式 return createTransStream(result.data, (convId: string) => { @@ -304,18 +325,16 @@ function extractRefFileUrls(messages: any[]) { * @param messages 参考gpt系列消息格式,多轮对话请完整提供上下文 */ function messagesPrepare(messages: any[], refs: any[]) { - const headPrompt = '使用”你“这个角色回复”我“这个角色,以第一人称对话,不要携带”我:“以及"你:"\n'; const content = messages.reduce((content, message) => { if (_.isArray(message.content)) { return message.content.reduce((_content, v) => { if (!_.isObject(v) || v['type'] != 'text') return _content; return _content + (v['text'] || ''); - }, content); + }, content) + '\n'; } - const role = message.role.replace('user', '我').replace('assistant', '你') || '我'; - return content += `${role}:${message.content}\n你:`; - }, ''); + return content += `${message.role.replace('sytstem', '<|sytstem|>').replace('assistant', '<|assistant|>').replace('user', '<|user|>')}\n${message.content}\n`; + }, '') + '<|assistant|>\n'; const fileRefs = refs.filter(ref => !ref.width && !ref.height); const imageRefs = refs.filter(ref => ref.width || ref.height).map(ref => { ref.image_url = ref.file_url; @@ -325,7 +344,7 @@ function messagesPrepare(messages: any[], refs: any[]) { { role: 'user', content: [ - { type: 'text', text: headPrompt + content }, + { type: 'text', text: content.replace(/\!\[.+\]\(.+\)/g, '') }, ...(fileRefs.length == 0 ? [] : [{ type: 'file', file: fileRefs @@ -492,7 +511,7 @@ async function receiveStream(stream: any): Promise { return innerStr + searchText; } else if(type == 'image' && _.isArray(image) && status == 'finish') { - const imageText = image.reduce((imgs, v) => imgs + `![图像](${v.image_url || ''})`, '') + '\n'; + const imageText = image.reduce((imgs, v) => imgs + (/^(http|https):\/\//.test(v.image_url) ? `![图像](${v.image_url || ''})` : ''), '') + '\n'; textOffset += imageText.length; toolCall = true; return innerStr + imageText; @@ -537,8 +556,6 @@ function createTransStream(stream: any, endCallback?: Function) { let content = ''; let toolCall = false; let textOffset = 0; - let sourceTagCheck = false; - let outputTemp = ''; !transStream.closed && transStream.write(`data: ${JSON.stringify({ id: '', model: MODEL_NAME, @@ -555,7 +572,7 @@ function createTransStream(stream: any, endCallback?: Function) { const result = _.attempt(() => JSON.parse(event.data)); if (_.isError(result)) throw new Error(`Stream response invalid: ${event.data}`); - if(result.status != 'finish') { + if(result.status != 'finish' && result.status != 'intervene') { const text = result.parts.reduce((str, part) => { const { status, content, meta_data } = part; if(!_.isArray(content)) @@ -577,7 +594,7 @@ function createTransStream(stream: any, endCallback?: Function) { return innerStr + searchText; } else if(type == 'image' && _.isArray(image) && status == 'finish') { - const imageText = image.reduce((imgs, v) => imgs + `![图像](${v.image_url || ''})`, '') + '\n'; + const imageText = image.reduce((imgs, v) => imgs + (/^(http|https):\/\//.test(v.image_url) ? `![图像](${v.image_url || ''})` : ''), '') + '\n'; textOffset += imageText.length; toolCall = true; return innerStr + imageText; @@ -608,7 +625,9 @@ function createTransStream(stream: any, endCallback?: Function) { object: 'chat.completion.chunk', choices: [ { - index: 0, delta: {}, finish_reason: 'stop' + index: 0, + delta: result.status == 'intervene' && result.last_error && result.last_error.intervene_text ? { content: `\n\n${result.last_error.intervene_text}` } : {}, + finish_reason: 'stop' } ], usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 2 },