先提交
This commit is contained in:
@@ -11,6 +11,7 @@
|
||||
"dependencies": {
|
||||
"@kangc/v-md-editor": "^2.3.15",
|
||||
"element-plus": "^2.3.4",
|
||||
"query-string": "^8.1.0",
|
||||
"sass": "^1.62.1",
|
||||
"sass-loader": "^13.2.2",
|
||||
"vue": "^3.2.47"
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import qs from "query-string"
|
||||
|
||||
const ins = {
|
||||
post: (url, body, config) => fetch(url, {...config, method: "POST", body}),
|
||||
get: (url, config) => fetch(url, {...config, method: "GET"}),
|
||||
post: (url, body, config) => fetch(qs.stringifyUrl({url, query: config.params}), {...config, method: "POST", body}),
|
||||
get: (url, config) => fetch(qs.stringifyUrl({url, query: config.params}), {...config, method: "GET"}),
|
||||
}
|
||||
export default ins
|
||||
|
||||
@@ -17,17 +17,16 @@ export class ChatGPT extends BaseModel {
|
||||
super({
|
||||
avatar: AI_AVATAR, name: 'ChatGPT', id: "gpt-3.5-turbo", desc: "ChatGPT-3.5所基于的模型",
|
||||
});
|
||||
this.apiKey = OPEN_AI_KEY
|
||||
this.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${this.apiKey}`
|
||||
}
|
||||
this.setApiKey(OPEN_AI_KEY)
|
||||
}
|
||||
|
||||
setApiKey(key) {
|
||||
this.apiKey = key
|
||||
this.headers["Authorization"] = `Bearer ${this.apiKey}`
|
||||
this.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${key}`
|
||||
}
|
||||
}
|
||||
|
||||
async chat(history) {
|
||||
@@ -69,17 +68,39 @@ export class ChatGPT extends BaseModel {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* ChatGLM 基于chatGLM-130B的api
|
||||
*/
|
||||
export class ChatGLM extends BaseModel {
|
||||
static base = "https://chatglm.cn/chatglm/backend-api"
|
||||
static base = "https://maas.aminer.cn/api/paas"
|
||||
|
||||
constructor() {
|
||||
super({
|
||||
avatar: "https://cdn.cunwuyun.cn/chat/chatglm.svg", name: 'ChatGLM', id: "chatglm-6b", desc: "ChatGLM-6B所基于的模型"
|
||||
avatar: "https://cdn.cunwuyun.cn/chat/chatglm.svg",
|
||||
name: 'ChatGLM', id: "chatglm-130b", desc: "ChatGLM-130B所基于的模型"
|
||||
});
|
||||
this.getToken().then(this.setApiKey)
|
||||
}
|
||||
|
||||
setApiKey(key) {
|
||||
this.apiKey = key
|
||||
this.headers = {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json",
|
||||
Authorization: `Bearer ${key}`
|
||||
}
|
||||
}
|
||||
|
||||
getToken() {
|
||||
const encrypted = "MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAMZXxmDh2Rs1lh3Ymud1eVBjds/9SfjczHJFpNe9+0FsUffILVMTBcTqmdPZxjC6M1Ad2EHaHMWXZuc0fIc4Lh8CAwEAAQ=="
|
||||
return axios.post(ChatGLM.base + "/passApiToken/createApiToken", null, {
|
||||
headers: this.headers,
|
||||
params: {apiKey:"4e3ceff669c143dfa09e763663aa72cd",encrypted}
|
||||
}).then(res => res.json()).then(data => data?.token || "key无效或网络波动,请重新尝试");
|
||||
}
|
||||
|
||||
async chat(history, callback) {
|
||||
const context = await axios.post(ChatGPT.base + "/v1/stream_context").then(res => res.json());
|
||||
const context = await axios.post(ChatGLM.base + "/v1/stream_context").then(res => res.json());
|
||||
return await axios.get(ChatGPT.base + "/v1/stream", {params: context.result})
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user