import {dayjs} from "element-plus"; import {nanoid} from "nanoid"; import http from "./http.js"; import {JSEncrypt} from "jsencrypt"; class BaseModel { constructor(props) { for (const k in props) { this[k] = props[k]; } this.headers = { "Content-Type": "application/json", Accept: "application/json,text/event-stream", } } setApiKey(key) { this.apiKey = key this.headers.Authorization = `Bearer ${key}` } } /** * ChatGPT gpt-3.5-turbo的api. */ export class ChatGPT extends BaseModel { static base = "https://chatwithai.pages.dev" static avatar = 'https://cdn.cunwuyun.cn/chat/chatGPT.png' static name = "ChatGPT" static id = "gpt-3.5-turbo" static desc = "ChatGPT-3.5所基于的模型" constructor(params) { const {avatar, name, desc, id} = ChatGPT super({avatar, name, desc, id, ...params}) this.setApiKey("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") } async chat(history) { const messages = history.map(e => ({role: e.role, content: e.msg})) return await http.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), { headers: { Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json", }, }).then(res => res.json()).then(data => data?.choices?.[0]?.message?.content || "key无效或网络波动,请重新尝试"); } async chatStream(history) { const messages = history.map(e => ({role: e.role, content: e.msg})) return await http.post(ChatGPT.base + "/v1/chat/completions", JSON.stringify({ messages, model: this.id, stream: true }), { headers: { Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json", }, }).then(res => res?.body?.getReader()); } async getAccount() { const {headers} = this const usages = await http.get(ChatGPT.base + "/v1/dashboard/billing/subscription", {headers}).then(res => res.json()); const endDate = usages.access_until if (endDate) { const startDate = new Date(endDate - 90 * 24 * 60 * 60); const formattedDate = time => dayjs(time).format("YYYY-MM-DD") return await http.get(`${ChatGPT.base}/v1/dashboard/billing/usage?start_date=${formattedDate(startDate * 1000)}&end_date=${formattedDate(endDate * 1000)}`, {headers}).then(res => res.json()).then(res => { usages.total_usage = res.total_usage const names = usages.account_name.split(" ") return { ...usages, username: names.at(-1) + names[0], usage: (usages.total_usage / 100)?.toFixed(2), total: usages.hard_limit_usd?.toFixed(2) } }); } else return Promise.reject("没有权限或者网络异常,请重新尝试!") } streamOutput(reader, chat) { return reader.read().then(({done, value}) => { if (done) { return; } if (!chat.reminder) { chat.reminder = "" } let decode = new TextDecoder().decode(value) decode = chat.reminder + decode let decodedArray = decode.split("data: "); let longstr = ""; decodedArray.forEach(decoded => { decoded = decoded.trim(); try { if (longstr != "") { decoded = longstr + decoded; longstr = ""; } } catch (e) { longstr = decoded; decoded = ""; } if (!!decoded && decoded !== "[DONE]") { const choices = JSON.parse(decoded).choices if (choices?.length > 0) { const response = choices[0].delta.content || ""; chat.msg += response } } }) return this.streamOutput(reader, chat) }) } } /** * ChatGLM 基于chatGLM-130B的api */ export class ChatGLM extends BaseModel { static base = "https://maas.aminer.cn/api/paas" static avatar = "https://cdn.cunwuyun.cn/chat/chatglm.svg" static name = "ChatGLM" static id = "chatglm-130b" static desc = "ChatGLM-130B所基于的模型" static publicKey = "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAMZXxmDh2Rs1lh3Ymud1eVBjds/9SfjczHJFpNe9+0FsUffILVMTBcTqmdPZxjC6M1Ad2EHaHMWXZuc0fIc4Lh8CAwEAAQ==" constructor(params) { const {avatar, name, desc, id} = ChatGLM super({avatar, name, desc, id, taskId: nanoid(), ...params}) this.getToken().then(e => this.setApiKey(e)) } async getToken() { if (this.apiKey) return await this.apiKey const encrypted = ChatGLM.encrypt(ChatGLM.publicKey) return await http.post(ChatGLM.base + "/passApiToken/createApiToken", JSON.stringify({ apiKey: "4e3ceff669c143dfa09e763663aa72cd", encrypted }), { headers: this.headers, }).then(res => res.json()).then(data => data?.data); } async chat(messages) { const history = messages.map(e => e.msg) history.pop() const prompt = history.pop() return await http.post(ChatGLM.base + "/model/v1/open/engines/chatGLM/chatGLM", JSON.stringify({ history, prompt, temperature: 1, top_p: 0.6, requestTaskNo: this.taskId }), {headers: this.headers}).then(res => res.json()).then(data => { if (data?.data.taskStatus == 'PROCESSING') { return this.getChatResult(data.data.taskOrderNo) } else { return data?.data?.outputText || "key无效或网络波动,请重新尝试" } }); } async getChatResult(taskOrderNo) { return await http.get(ChatGLM.base + `/request-task/query-request-task-result/${taskOrderNo}`, {headers: this.headers}).then(res => res.json()).then(data => { if (data?.data.taskStatus == 'PROCESSING') { return this.getChatResult(data.data.taskOrderNo) } else { return data?.data?.outputText || "key无效或网络波动,请重新尝试" } }) } async chatStream(messages) { const history = messages.map(e => e.msg) history.pop() const prompt = history.pop() const url = ChatGLM.base + "/model/v1/open/engines/sse/chatGLM/chatGLM" return await http.post(url, JSON.stringify({ history, prompt, temperature: 0.2, requestTaskNo: this.taskId }), { headers: this.headers, }).then(res => res?.body?.getReader()); } static encrypt(publicKey, timestamp) { timestamp = Date.now().toFixed(0) const encryptor = new JSEncrypt() encryptor.setPublicKey(publicKey) return encryptor.encrypt(timestamp); } async getAccount() { const usages = await http.get("https://open.bigmodel.ai/api/paas/account/query-customer-account-report", {headers: this.headers}).then(res => res.json()); if (usages.code == 200) { const {data} = usages return { ...data, username: "Kubbo", usage: data.totalSpendAmount?.toFixed(4), total: data.rechargeAmount?.toFixed(4) } } else return Promise.reject("没有权限或者网络异常,请重新尝试!") } streamOutput(reader, chat, cb) { return reader.read().then(({done, value}) => { if (done) { return; } const decode = new TextDecoder().decode(value) const dialogue = decode.split("event:").at(-1) const msg = dialogue.split("\n").filter(e => e.startsWith("data:"))?.map(e => e.replace("data:", '')).join("\n") if (msg?.length > 0) { chat.msg = msg if (typeof cb == "function") { cb(msg) } } return this.streamOutput(reader, chat, cb) }) } } /** * 集成私有的Alpaca */ export class Alpaca extends BaseModel { static base = "https://alpaca7b.aceykubbo.workers.dev" static avatar = "https://cdn.cunwuyun.cn/img/logo.svg" static name = "Alpaca" static id = "alpaca-7b-plus" static desc = "llama所基于的中文权重本地模型" constructor(params) { const {avatar, name, desc, id} = Alpaca super({avatar, name, desc, id, ...params}) this.setApiKey("alpaca-7b-plus") } async chat(history) { const messages = history.map(e => ({role: e.role, content: e.msg})) return await http.post(Alpaca.base + "/v1/chat/completions", JSON.stringify({messages, model: this.id}), { headers: { Authorization: 'Bearer ' + this.apiKey, "Content-Type": "application/json", Accept: "application/json", }, }).then(res => res.json()).then(data => data?.choices?.[0]?.message?.content || "key无效或网络波动,请重新尝试"); } async chatStream(history) { const prompt = history.map(e => `\n\n### ${e.role}:${e.msg}`).join("") return await http.post(Alpaca.base + "/completion", JSON.stringify({ prompt, batch_size: 128, temperature: 0.2, top_k: 40, top_p: 0.9, n_keep: -1, n_predict: 2048, stop: ["### user:\n\n"], // when detect this, stop completion exclude: [], // no show in the completion threads: 8, as_loop: true, // use this to request the completion token by token interactive: true, // enable the detection of a stop word })).then(res => res?.text()); } streamOutput(reader, chat) { console.log(reader) // return reader.read().then(({done, value}) => { // if (done) { // return; // } // if (!chat.reminder) { // chat.reminder = "" // } // let decode = new TextDecoder().decode(value) // decode = chat.reminder + decode // let decodedArray = decode.split("data: "); // let longstr = ""; // decodedArray.forEach(decoded => { // decoded = decoded.trim(); // try { // if (longstr != "") { // decoded = longstr + decoded; // longstr = ""; // } // } catch (e) { // longstr = decoded; // decoded = ""; // } // if (!!decoded && decoded !== "[DONE]") { // const choices = JSON.parse(decoded).choices // if (choices?.length > 0) { // const response = choices[0].delta.content || ""; // chat.msg += response // } // } // }) // return this.streamOutput(reader, chat) // }) } } /** * 集成免费的New Bing搜索,TODO */ class NewBing extends BaseModel { static base = "https://maas.aminer.cn/api/paas" static avatar = "https://cdn.cunwuyun.cn/chat/newBing.png" static name = "NewBing" static id = "gpt-4" static desc = "GPT-4所基于的互联网模型" }