修复GLM流式输出内容问题

This commit is contained in:
aixianling
2023-06-01 17:12:10 +08:00
parent 311e91a833
commit 32592f3d6d
2 changed files with 10 additions and 33 deletions

View File

@@ -164,7 +164,7 @@ export class ChatGLM extends BaseModel {
const prompt = history.pop()
const url = ChatGLM.base + "/model/v1/open/engines/sse/chatGLM/chatGLM"
return await axios.post(url, JSON.stringify({
history, prompt, temperature: 1, top_p: 0.6, requestTaskNo: this.taskId
history, prompt, temperature: 0.2, requestTaskNo: this.taskId
}), {
headers: this.headers,
}).then(res => res?.body?.getReader());
@@ -193,11 +193,9 @@ export class ChatGLM extends BaseModel {
return;
}
const decode = new TextDecoder().decode(value)
const contents = decode.split("event:finish")[0].split("\n")
if (contents.length > 0) {
console.log(contents)
chat.msg = contents.filter(e => e.startsWith("data:") && e.trim() != "data:")?.map(e => e.replace(/data:/, '')).join("\n") || ""
}
const dialogue = decode.split("event:").at(-1)
const msg = dialogue.split("\n").filter(e => e.startsWith("data:"))?.map(e => e.replace("data:", '')).join("\n")
if (msg?.length > 0) chat.msg = msg
return this.streamOutput(reader, chat)
})
}