接入数字人和调整界面

This commit is contained in:
aixianling
2023-11-09 15:21:33 +08:00
parent 8eb0db8401
commit ad77470063
18 changed files with 290 additions and 115 deletions

View File

@@ -1,5 +1,6 @@
<template>
<chat class="fill" :config="setting" :showSettings="showSettings"
<meta-human/>
<chat class="fill mar-l8" :config="setting" :showSettings="showSettings"
@setting="showSettings=!showSettings"/>
<settings v-show="showSettings" v-model="setting"/>
</template>
@@ -7,16 +8,17 @@
<script>
import Chat from "./components/chat";
import Settings from "./components/settings";
import {ChatGPT} from "./utils/models";
import {Alpaca, ChatGLM, ChatGPT} from "./utils/models";
import MetaHuman from "./components/metaHuman.vue";
export default {
name: 'App',
components: {Settings, Chat},
components: {MetaHuman, Settings, Chat},
data() {
return {
showSettings: false,
setting: {
model: new ChatGPT(),
model: new ChatGLM(),
stream: true
},
}

View File

@@ -3,7 +3,6 @@
<div class="chat-wrapper" v-for="item in list" :key="item.id">
<div class="chat-friend" v-if="item.uid !== 'me'">
<div class="chat-text" v-if="item.chatType == 0">
<icon-copy @click="copy(item.msg, '已复制')"/>
<v-md-preview :text="optimizeMessage(item.msg)"/>
</div>
<div class="chat-img" v-if="item.chatType == 1">
@@ -13,17 +12,18 @@
<div class="info-time">
<img :src="item.avatar" alt=""/>
<span>{{ item.name }}</span>
<span>{{ item.time }}</span>
<span class="time">{{ item.time }}</span>
<icon-copy @click="copy(item.msg, '已复制')"/>
</div>
</div>
<div class="chat-me" v-else>
<div class="chat-text" v-if="item.chatType == 0">
<icon-copy @click="copy(item.msg, '已复制')"/>
<span v-text="item.msg"/>
</div>
<div class="info-time">
<div class="info-time right">
<icon-copy @click="copy(item.msg, '已复制')"/>
<span>{{ item.name }}</span>
<span>{{ item.time }}</span>
<span class="time">{{ item.time }}</span>
<img :src="item.avatar" alt=""/>
</div>
</div>
@@ -110,32 +110,6 @@ export default {
border-radius: 4px;
}
}
.info-time {
margin: 10px 0;
color: #fff;
font-size: 14px;
display: flex;
justify-content: flex-start;
img {
width: 30px;
height: 30px;
border-radius: 50%;
vertical-align: middle;
margin-right: 10px;
}
span {
line-height: 30px;
}
span:last-child {
color: rgb(101, 104, 115);
margin-left: 10px;
vertical-align: middle;
}
}
}
.chat-me {
@@ -165,29 +139,33 @@ export default {
border-radius: 4px;
}
}
}
.info-time {
margin: 10px 0;
color: #fff;
font-size: 14px;
display: flex;
.info-time {
margin: 10px 0;
color: #fff;
font-size: 14px;
display: flex;
justify-content: flex-start;
align-items: center;
gap: 8px;
&.right {
justify-content: flex-end;
}
img {
width: 30px;
height: 30px;
border-radius: 50%;
vertical-align: middle;
margin-left: 10px;
}
img {
width: 30px;
height: 30px;
border-radius: 50%;
vertical-align: middle;
}
span {
line-height: 30px;
}
span {
line-height: 30px;
span:first-child {
&.time {
color: rgb(101, 104, 115);
margin-right: 10px;
vertical-align: middle;
}
}

View File

@@ -0,0 +1,77 @@
<script>
import * as PIXI from "pixi.js";
import {load} from "../utils/tools.js";
import {Live2DModel} from "pixi-live2d-display";
window.PIXI = PIXI;
export default {
name: "metaHuman",
data() {
return {
ins: null
}
},
methods: {
initLive2d() {
load(window.Live2D && window.Live2DCubismCore)
.then(() => Live2DModel.from("/live2d/ots14_1203/ots14_1203.model.json"))
.then(model => {
this.ins.stage.addChild(model)
model.rotation = Math.PI;
model.skew.x = Math.PI;
model.x = 400
model.y = 200
model.scale.set(0.16, 0.16);
// interaction
model.on('hit', (hitAreas) => {
if (hitAreas.includes('body')) {
model.motion('tap_body');
}
});
this.draggable(model)
}).catch(err => console.log(err));
},
draggable(model) {
model.buttonMode = true;
model.on("pointerdown", (e) => {
model.dragging = true;
model._pointerX = e.data.global.x - model.x;
model._pointerY = e.data.global.y - model.y;
});
model.on("pointermove", (e) => {
if (model.dragging) {
model.position.x = e.data.global.x - model._pointerX;
model.position.y = e.data.global.y - model._pointerY;
}
});
model.on("pointerupoutside", () => (model.dragging = false));
model.on("pointerup", () => (model.dragging = false));
}
},
mounted() {
this.$nextTick(() => {
this.ins = new PIXI.Application({
autoStart: true,
resizeTo: this.$el,
backgroundAlpha: 0,
})
this.$el.appendChild(this.ins.view)
this.initLive2d()
})
}
}
</script>
<template>
<section class="metaHuman">
</section>
</template>
<style scoped lang="scss">
.metaHuman {
width: 375px;
}
</style>

View File

@@ -73,7 +73,7 @@ export default {
},
getModelAccount(c = 0) {
const ai = this.settings.model
if (ai.apiKey) {
if (ai.apiKey && !this.isLocal) {
this.loadingAccount = true
ai.getAccount().then(v => this.settings.account = v).finally(() => this.loadingAccount = false)
} else if (c < 5) setTimeout(() => this.getModelAccount(++c), 1000)

View File

@@ -18,5 +18,8 @@ export default {
<style lang="scss" scoped>
.iconCopy {
height: 22px;
width: 22px;
flex-shrink: 0;
}
</style>

View File

@@ -7,10 +7,10 @@ import 'element-plus/dist/index.css'
import Hljs from "highlight.js"
import {createApp} from 'vue'
import App from './App.vue'
import axios from "./utils/axios";
import http from "./utils/http.js";
const app = createApp(App)
app.config.globalProperties.$http = axios
app.config.globalProperties.$http = http
VMdPreview.use(githubTheme, {Hljs})
app.use(ElementPlus)
app.use(VMdPreview);

View File

@@ -1,7 +1,10 @@
import qs from "query-string"
const ins = {
post: (url, body, config) => fetch(qs.stringifyUrl({url, query: config.params}), {...config, method: "POST", body}),
post: (url, body, config = {}) => {
const {params: query} = config
return fetch(qs.stringifyUrl({url, query}), {...config, method: "POST", body})
},
get: (url, config) => fetch(qs.stringifyUrl({url, query: config.params}), {...config, method: "GET"}),
}
export default ins

View File

@@ -1,6 +1,6 @@
import {dayjs} from "element-plus";
import {nanoid} from "nanoid";
import axios from "./axios";
import http from "./http.js";
import {JSEncrypt} from "jsencrypt";
class BaseModel {
@@ -37,7 +37,7 @@ export class ChatGPT extends BaseModel {
async chat(history) {
const messages = history.map(e => ({role: e.role, content: e.msg}))
return await axios.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), {
return await http.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), {
headers: {
Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json",
},
@@ -46,7 +46,7 @@ export class ChatGPT extends BaseModel {
async chatStream(history) {
const messages = history.map(e => ({role: e.role, content: e.msg}))
return await axios.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({
return await http.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({
messages,
model: this.id,
stream: true
@@ -59,12 +59,12 @@ export class ChatGPT extends BaseModel {
async getAccount() {
const {headers} = this
const usages = await axios.get(ChatGPT.base + "/v1/dashboard/billing/subscription", {headers}).then(res => res.json());
const usages = await http.get(ChatGPT.base + "/v1/dashboard/billing/subscription", {headers}).then(res => res.json());
const endDate = usages.access_until
if (endDate) {
const startDate = new Date(endDate - 90 * 24 * 60 * 60);
const formattedDate = time => dayjs(time).format("YYYY-MM-DD")
return await axios.get(`${ChatGPT.base}/v1/dashboard/billing/usage?start_date=${formattedDate(startDate * 1000)}&end_date=${formattedDate(endDate * 1000)}`, {headers}).then(res => res.json()).then(res => {
return await http.get(`${ChatGPT.base}/v1/dashboard/billing/usage?start_date=${formattedDate(startDate * 1000)}&end_date=${formattedDate(endDate * 1000)}`, {headers}).then(res => res.json()).then(res => {
usages.total_usage = res.total_usage
const names = usages.account_name.split(" ")
return {
@@ -133,7 +133,7 @@ export class ChatGLM extends BaseModel {
async getToken() {
if (this.apiKey) return await this.apiKey
const encrypted = ChatGLM.encrypt(ChatGLM.publicKey)
return await axios.post(ChatGLM.base + "/passApiToken/createApiToken", JSON.stringify({
return await http.post(ChatGLM.base + "/passApiToken/createApiToken", JSON.stringify({
apiKey: "4e3ceff669c143dfa09e763663aa72cd",
encrypted
}), {
@@ -145,7 +145,7 @@ export class ChatGLM extends BaseModel {
const history = messages.map(e => e.msg)
history.pop()
const prompt = history.pop()
return await axios.post(ChatGLM.base + "/model/v1/open/engines/chatGLM/chatGLM", JSON.stringify({
return await http.post(ChatGLM.base + "/model/v1/open/engines/chatGLM/chatGLM", JSON.stringify({
history, prompt, temperature: 1, top_p: 0.6, requestTaskNo: this.taskId
}), {headers: this.headers}).then(res => res.json()).then(data => {
if (data?.data.taskStatus == 'PROCESSING') {
@@ -157,7 +157,7 @@ export class ChatGLM extends BaseModel {
}
async getChatResult(taskOrderNo) {
return await axios.get(ChatGLM.base + `/request-task/query-request-task-result/${taskOrderNo}`, {headers: this.headers}).then(res => res.json()).then(data => {
return await http.get(ChatGLM.base + `/request-task/query-request-task-result/${taskOrderNo}`, {headers: this.headers}).then(res => res.json()).then(data => {
if (data?.data.taskStatus == 'PROCESSING') {
return this.getChatResult(data.data.taskOrderNo)
} else {
@@ -171,7 +171,7 @@ export class ChatGLM extends BaseModel {
history.pop()
const prompt = history.pop()
const url = ChatGLM.base + "/model/v1/open/engines/sse/chatGLM/chatGLM"
return await axios.post(url, JSON.stringify({
return await http.post(url, JSON.stringify({
history, prompt, temperature: 0.2, requestTaskNo: this.taskId
}), {
headers: this.headers,
@@ -186,7 +186,7 @@ export class ChatGLM extends BaseModel {
}
async getAccount() {
const usages = await axios.get("https://open.bigmodel.ai/api/paas/account/query-customer-account-report", {headers: this.headers}).then(res => res.json());
const usages = await http.get("https://open.bigmodel.ai/api/paas/account/query-customer-account-report", {headers: this.headers}).then(res => res.json());
if (usages.code == 200) {
const {data} = usages
return {
@@ -216,7 +216,7 @@ export class ChatGLM extends BaseModel {
* 集成私有的Alpaca
*/
export class Alpaca extends BaseModel {
static base = "https://testai.cunwuyun.cn"
static base = "https://alpaca7b.aceykubbo.workers.dev"
static avatar = "https://cdn.cunwuyun.cn/img/logo.svg"
static name = "Alpaca"
static id = "alpaca-7b-plus"
@@ -230,7 +230,7 @@ export class Alpaca extends BaseModel {
async chat(history) {
const messages = history.map(e => ({role: e.role, content: e.msg}))
return await axios.post(Alpaca.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), {
return await http.post(Alpaca.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), {
headers: {
Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json",
},
@@ -238,51 +238,57 @@ export class Alpaca extends BaseModel {
}
async chatStream(history) {
const messages = history.map(e => ({role: e.role, content: e.msg}))
return await axios.post(Alpaca.base + "/v1/chat/completions", JSON.stringify({
messages,
model: this.id,
stream: true
}), {
headers: {
Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json",
},
}).then(res => res?.body?.getReader());
const prompt = history.map(e => `\n\n### ${e.role}:${e.msg}`).join("")
return await http.post(Alpaca.base + "/completion", JSON.stringify({
prompt,
batch_size: 128,
temperature: 0.2,
top_k: 40,
top_p: 0.9,
n_keep: -1,
n_predict: 2048,
stop: ["### user:\n\n"], // when detect this, stop completion
exclude: [], // no show in the completion
threads: 8,
as_loop: true, // use this to request the completion token by token
interactive: true, // enable the detection of a stop word
})).then(res => res?.text());
}
streamOutput(reader, chat) {
return reader.read().then(({done, value}) => {
if (done) {
return;
}
if (!chat.reminder) {
chat.reminder = ""
}
let decode = new TextDecoder().decode(value)
decode = chat.reminder + decode
let decodedArray = decode.split("data: ");
let longstr = "";
decodedArray.forEach(decoded => {
decoded = decoded.trim();
try {
if (longstr != "") {
decoded = longstr + decoded;
longstr = "";
}
} catch (e) {
longstr = decoded;
decoded = "";
}
if (!!decoded && decoded !== "[DONE]") {
const choices = JSON.parse(decoded).choices
if (choices?.length > 0) {
const response = choices[0].delta.content || "";
chat.msg += response
}
}
})
return this.streamOutput(reader, chat)
})
console.log(reader)
// return reader.read().then(({done, value}) => {
// if (done) {
// return;
// }
// if (!chat.reminder) {
// chat.reminder = ""
// }
// let decode = new TextDecoder().decode(value)
// decode = chat.reminder + decode
// let decodedArray = decode.split("data: ");
// let longstr = "";
// decodedArray.forEach(decoded => {
// decoded = decoded.trim();
// try {
// if (longstr != "") {
// decoded = longstr + decoded;
// longstr = "";
// }
// } catch (e) {
// longstr = decoded;
// decoded = "";
// }
// if (!!decoded && decoded !== "[DONE]") {
// const choices = JSON.parse(decoded).choices
// if (choices?.length > 0) {
// const response = choices[0].delta.content || "";
// chat.msg += response
// }
// }
// })
// return this.streamOutput(reader, chat)
// })
}
}

View File

@@ -29,3 +29,17 @@ export function copyToClipboard(content) {
* @returns {any}
*/
export const scopy = (any = null) => JSON.parse(JSON.stringify(any))
export const load = (ctx, params = {c: 0, delay: 500}) => {
if (ctx) {
return Promise.resolve(ctx)
} else if (params.c < 10) {
return new Promise(resolve => setTimeout(() => resolve(load({...params, c: ++params.c}), params.delay)))
} else return Promise.reject("无法加载内容")
}
export const addJs = url => {
const script = document.createElement("script")
script.src = url
document.body.appendChild(script)
}

13
src/utils/worker.js Normal file
View File

@@ -0,0 +1,13 @@
export default {
async fetch(request, env) {
const url = new URL(request.url);
const pt = request.headers.get("Proxy-Authenticate")
url.host = pt || "testai.cunwuyun.cn"
return fetch(url, {
headers: request.headers,
method: request.method,
body: request.body,
redirect: 'follow'
});
}
}