修复GLM流式输出内容问题

This commit is contained in:
aixianling
2023-06-01 17:12:10 +08:00
parent 311e91a833
commit 32592f3d6d
2 changed files with 10 additions and 33 deletions

View File

@@ -1,5 +1,5 @@
<template>
<section class="chatContent" @scroll="onScroll">
<section class="chatContent">
<div class="chat-wrapper" v-for="item in list" :key="item.id">
<div class="chat-friend" v-if="item.uid !== 'me'">
<div class="chat-text" v-if="item.chatType == 0">
@@ -41,32 +41,14 @@ export default {
props: {
list: {default: () => []}
},
data() {
return {
demo: "要针对一个CSS滚动到元素的最底部可以使用以下JS代码\n" +
"\n" +
"``` javascript\n" +
"var element = document.getElementById('yourElementId');\n" +
"scrollToBottom(element);\n" +
"\n" +
"function scrollToBottom(element) {\n" +
" element.scrollTop = element.scrollHeight - element.clientHeight;\n" +
"}\n" +
"```\n" +
"\n" +
"这个代码首先获取一个具有给定ID的元素然后调用名为`scrollToBottom`的函数,该函数将元素滚动到底部。在`scrollToBottom`函数中,`scrollTop`属性设置为`element.scrollHeight - element.clientHeight`的结果,这将把元素滚动到它的最底部。"
watch: {
list: {
deep: true, handler() {
this.scrollBottom()
}
}
},
methods: {
onScroll() {
const scrollDom = this.$el;
const scrollTop = scrollDom.scrollTop;
const offsetHeight = scrollDom.offsetHeight;
const scrollHeight = scrollDom.scrollHeight;
// 当滚动到底部,设置 isAutoScroll 为 true
this.isAutoScroll = scrollTop + offsetHeight === scrollHeight;
scrollDom.scrollTop = scrollHeight - scrollDom.clientHeight
},
scrollBottom() {
this.$el.scrollTop = this.$el.scrollHeight - this.$el.clientHeight
},
@@ -75,9 +57,6 @@ export default {
this.$message.success("已复制")
}
}
},
created() {
}
}
</script>

View File

@@ -164,7 +164,7 @@ export class ChatGLM extends BaseModel {
const prompt = history.pop()
const url = ChatGLM.base + "/model/v1/open/engines/sse/chatGLM/chatGLM"
return await axios.post(url, JSON.stringify({
history, prompt, temperature: 1, top_p: 0.6, requestTaskNo: this.taskId
history, prompt, temperature: 0.2, requestTaskNo: this.taskId
}), {
headers: this.headers,
}).then(res => res?.body?.getReader());
@@ -193,11 +193,9 @@ export class ChatGLM extends BaseModel {
return;
}
const decode = new TextDecoder().decode(value)
const contents = decode.split("event:finish")[0].split("\n")
if (contents.length > 0) {
console.log(contents)
chat.msg = contents.filter(e => e.startsWith("data:") && e.trim() != "data:")?.map(e => e.replace(/data:/, '')).join("\n") || ""
}
const dialogue = decode.split("event:").at(-1)
const msg = dialogue.split("\n").filter(e => e.startsWith("data:"))?.map(e => e.replace("data:", '')).join("\n")
if (msg?.length > 0) chat.msg = msg
return this.streamOutput(reader, chat)
})
}