From 682c32a7226d4416d7432ec894a0b6a0a93b9a9a Mon Sep 17 00:00:00 2001 From: TBXark Date: Sat, 7 Oct 2023 15:07:59 +0800 Subject: [PATCH 01/11] =?UTF-8?q?feat:=20=E6=B7=BB=E5=8A=A0=20cloudflare?= =?UTF-8?q?=20workers=20ai=20=E6=94=AF=E6=8C=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 5 +- dist/buildinfo.json | 2 +- dist/index.js | 722 ++++++++++++++++++++------ dist/timestamp | 2 +- doc/CONFIG.md | 2 + src/chat.js | 148 +++++- src/command.js | 4 +- src/env.js | 12 + src/message.js | 11 +- src/openai.js | 134 +---- src/workers-ai.js | 453 +++++++++++++++++ yarn.lock | 1189 +++++++++++++++++++++++++++++++++++++++++++ 12 files changed, 2394 insertions(+), 290 deletions(-) create mode 100644 src/workers-ai.js create mode 100644 yarn.lock diff --git a/README.md b/README.md index cc5799d4..308cf017 100644 --- a/README.md +++ b/README.md @@ -38,9 +38,8 @@ ## 更新日志 -- v1.5.1 - - 添加`azure`支持 - - 移除账单功能 +- v1.6.0 + - 添加`workers ai`支持,具体配置查看[配置文档](./doc/CONFIG.md) 其他更新日志见[CHANGELOG.md](./doc/CHANGELOG.md) diff --git a/dist/buildinfo.json b/dist/buildinfo.json index 785124a1..345a29a4 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "23155ba", "timestamp": 1691479160} +{"sha": "0019c7a", "timestamp": 1696662329} diff --git a/dist/index.js b/dist/index.js index b7dd62ff..65c9afe9 100644 --- a/dist/index.js +++ b/dist/index.js @@ -39,9 +39,9 @@ var ENV = { // 检查更新的分支 UPDATE_BRANCH: "master", // 当前版本 - BUILD_TIMESTAMP: 1691479160, + BUILD_TIMESTAMP: 1696662329, // 当前版本 commit id - BUILD_VERSION: "23155ba", + BUILD_VERSION: "0019c7a", I18N: null, LANGUAGE: "zh-cn", // 使用流模式 @@ -52,10 +52,16 @@ var ENV = { DEBUG_MODE: false, // 开发模式 DEV_MODE: false, + // Telegram API Domain TELEGRAM_API_DOMAIN: "https://api.telegram.org", + // OpenAI API Domain 可替换兼容openai api的其他服务商 OPENAI_API_DOMAIN: "https://api.openai.com", + // Azure API Key AZURE_API_KEY: null, - AZURE_COMPLETIONS_API: null + // Azure Completions API + AZURE_COMPLETIONS_API: null, + // workers ai模型 + WORKERS_AI_MODEL: null }; var CONST = { PASSWORD_KEY: "chat_history_password", @@ -64,14 +70,17 @@ var CONST = { }; var DATABASE = null; var API_GUARD = null; +var AI_LLM = null; var ENV_VALUE_TYPE = { API_KEY: [], AZURE_API_KEY: "string", - AZURE_COMPLETIONS_API: "string" + AZURE_COMPLETIONS_API: "string", + WORKERS_AI_MODEL: "string" }; function initEnv(env, i18n2) { DATABASE = env.DATABASE; API_GUARD = env.API_GUARD; + AI_LLM = env.AI_LLM; for (const key in ENV) { if (env[key]) { switch (ENV_VALUE_TYPE[key] ? typeof ENV_VALUE_TYPE[key] : typeof ENV[key]) { @@ -506,6 +515,151 @@ async function getBot(token) { } } +// src/openai.js +function extractContentFromStreamData(stream) { + const line = stream.split("\n"); + let remainingStr = ""; + let contentStr = ""; + for (const l of line) { + try { + if (l.startsWith("data:") && l.endsWith("}")) { + const data = JSON.parse(l.substring(5)); + contentStr += data.choices[0].delta?.content || ""; + } else { + remainingStr = l; + } + } catch (e) { + remainingStr = l; + } + } + return { + content: contentStr, + pending: remainingStr + }; +} +function isOpenAIEnable(context) { + const key = context.openAIKeyFromContext(); + return key && key.length > 0; +} +async function requestCompletionsFromOpenAI(message, history, context, onStream) { + const key = context.openAIKeyFromContext(); + const body = { + model: ENV.CHAT_MODEL, + ...context.USER_CONFIG.OPENAI_API_EXTRA_PARAMS, + messages: [...history || [], { role: "user", content: message }], + stream: onStream != null + }; + const controller = new AbortController(); + const { signal } = controller; + const timeout = 1e3 * 60 * 5; + setTimeout(() => controller.abort(), timeout); + let url = `${ENV.OPENAI_API_DOMAIN}/v1/chat/completions`; + let header = { + "Content-Type": "application/json", + "Authorization": `Bearer ${key}` + }; + if (ENV.AZURE_COMPLETIONS_API) { + url = ENV.AZURE_COMPLETIONS_API; + header["api-key"] = key; + delete header["Authorization"]; + delete body.model; + } + const resp = await fetch(url, { + method: "POST", + headers: header, + body: JSON.stringify(body), + signal + }); + if (onStream && resp.ok && resp.headers.get("content-type").indexOf("text/event-stream") !== -1) { + const reader = resp.body.getReader({ mode: "byob" }); + const decoder = new TextDecoder("utf-8"); + let data = { done: false }; + let pendingText = ""; + let contentFull = ""; + let lengthDelta = 0; + let updateStep = 20; + while (data.done === false) { + try { + data = await reader.readAtLeast(4096, new Uint8Array(5e3)); + pendingText += decoder.decode(data.value); + const content = extractContentFromStreamData(pendingText); + pendingText = content.pending; + lengthDelta += content.content.length; + contentFull = contentFull + content.content; + if (lengthDelta > updateStep) { + lengthDelta = 0; + updateStep += 5; + await onStream(`${contentFull} +${ENV.I18N.message.loading}...`); + } + } catch (e) { + contentFull += ` + +[ERROR]: ${e.message} + +`; + break; + } + } + return contentFull; + } + const result = await resp.json(); + if (result.error?.message) { + if (ENV.DEBUG_MODE || ENV.DEV_MODE) { + throw new Error(`OpenAI API Error +> ${result.error.message} +Body: ${JSON.stringify(body)}`); + } else { + throw new Error(`OpenAI API Error +> ${result.error.message}`); + } + } + setTimeout(() => updateBotUsage(result.usage, context).catch(console.error), 0); + return result.choices[0].message.content; +} +async function requestImageFromOpenAI(prompt, context) { + const key = context.openAIKeyFromContext(); + const body = { + prompt, + n: 1, + size: "512x512" + }; + const resp = await fetch(`${ENV.OPENAI_API_DOMAIN}/v1/images/generations`, { + method: "POST", + headers: { + "Content-Type": "application/json", + "Authorization": `Bearer ${key}` + }, + body: JSON.stringify(body) + }).then((res) => res.json()); + if (resp.error?.message) { + throw new Error(`OpenAI API Error +> ${resp.error.message}`); + } + return resp.data[0].url; +} +async function updateBotUsage(usage, context) { + if (!ENV.ENABLE_USAGE_STATISTICS) { + return; + } + let dbValue = JSON.parse(await DATABASE.get(context.SHARE_CONTEXT.usageKey)); + if (!dbValue) { + dbValue = { + tokens: { + total: 0, + chats: {} + } + }; + } + dbValue.tokens.total += usage.total_tokens; + if (!dbValue.tokens.chats[context.SHARE_CONTEXT.chatId]) { + dbValue.tokens.chats[context.SHARE_CONTEXT.chatId] = usage.total_tokens; + } else { + dbValue.tokens.chats[context.SHARE_CONTEXT.chatId] += usage.total_tokens; + } + await DATABASE.put(context.SHARE_CONTEXT.usageKey, JSON.stringify(dbValue)); +} + // src/gpt3.js async function resourceLoader(key, url) { try { @@ -793,164 +947,411 @@ function makeResponse200(resp) { } } -// src/openai.js -function extractContentFromStreamData(stream) { - const line = stream.split("\n"); - let remainingStr = ""; - let contentStr = ""; - for (const l of line) { - try { - if (l.startsWith("data:") && l.endsWith("}")) { - const data = JSON.parse(l.substring(5)); - contentStr += data.choices[0].delta?.content || ""; - } else { - remainingStr = l; +// src/workers-ai.js +function isWorkersAIEnable(context) { + return AI_LLM !== null; +} +async function requestCompletionsFromWorkersAI(message, history, context, onStream) { + const ai = new Ai(AI_LLM); + const model = ENV.WORKERS_AI_MODEL || "@cf/meta/llama-2-7b-chat-int8"; + const request = { + messages: [...history || [], { role: "user", content: message }] + }; + const response = await ai.run(model, request); + return response.response; +} +var TypedArrayProto = Object.getPrototypeOf(Uint8Array); +function isArray(value) { + return Array.isArray(value) || value instanceof TypedArrayProto; +} +function arrLength(obj) { + return obj instanceof TypedArrayProto ? obj.length : obj.flat().reduce( + (acc, cur) => acc + (cur instanceof TypedArrayProto ? cur.length : 1), + 0 + ); +} +function ensureShape(shape, value) { + if (shape.length === 0 && !isArray(value)) { + return; + } + const count = shape.reduce((acc, v) => { + if (!Number.isInteger(v)) { + throw new Error( + `expected shape to be array-like of integers but found non-integer element "${v}"` + ); + } + return acc * v; + }, 1); + if (count != arrLength(value)) { + throw new Error( + `invalid shape: expected ${count} elements for shape ${shape} but value array has length ${value.length}` + ); + } +} +function ensureType(type, value) { + if (isArray(value)) { + value.forEach((v) => ensureType(type, v)); + return; + } + switch (type) { + case "bool": { + if (typeof value === "boolean") { + return; } - } catch (e) { - remainingStr = l; + break; + } + case "float16": + case "float32": { + if (typeof value === "number") { + return; + } + break; + } + case "int8": + case "uint8": + case "int16": + case "uint16": + case "int32": + case "uint32": { + if (Number.isInteger(value)) { + return; + } + break; + } + case "int64": + case "uint64": { + if (typeof value === "bigint") { + return; + } + break; + } + case "str": { + if (typeof value === "string") { + return; + } + break; } } - return { - content: contentStr, - pending: remainingStr - }; + throw new Error(`unexpected type "${type}" with value "${value}".`); } -async function requestCompletionsFromOpenAI(message, history, context, onStream) { - const key = context.openAIKeyFromContext(); - const body = { - model: ENV.CHAT_MODEL, - ...context.USER_CONFIG.OPENAI_API_EXTRA_PARAMS, - messages: [...history || [], { role: "user", content: message }], - stream: onStream != null - }; - const controller = new AbortController(); - const { signal } = controller; - const timeout = 1e3 * 60 * 5; - setTimeout(() => controller.abort(), timeout); - let url = `${ENV.OPENAI_API_DOMAIN}/v1/chat/completions`; - let header = { - "Content-Type": "application/json", - "Authorization": `Bearer ${key}` - }; - if (ENV.AZURE_COMPLETIONS_API) { - url = ENV.AZURE_COMPLETIONS_API; - header["api-key"] = key; - delete header["Authorization"]; - delete body.model; +function serializeType(type, value) { + if (isArray(value)) { + return [...value].map((v) => serializeType(type, v)); } - const resp = await fetch(url, { - method: "POST", - headers: header, - body: JSON.stringify(body), - signal - }); - if (onStream && resp.ok && resp.headers.get("content-type").indexOf("text/event-stream") !== -1) { - const reader = resp.body.getReader({ mode: "byob" }); - const decoder = new TextDecoder("utf-8"); - let data = { done: false }; - let pendingText = ""; - let contentFull = ""; - let lengthDelta = 0; - let updateStep = 20; - while (data.done === false) { - try { - data = await reader.readAtLeast(4096, new Uint8Array(5e3)); - pendingText += decoder.decode(data.value); - const content = extractContentFromStreamData(pendingText); - pendingText = content.pending; - lengthDelta += content.content.length; - contentFull = contentFull + content.content; - if (lengthDelta > updateStep) { - lengthDelta = 0; - updateStep += 5; - await onStream(`${contentFull} -${ENV.I18N.message.loading}...`); - } - } catch (e) { - contentFull += ` - -[ERROR]: ${e.message} - -`; - break; + switch (type) { + case "str": + case "bool": + case "float16": + case "float32": + case "int8": + case "uint8": + case "int16": + case "uint16": + case "uint32": + case "int32": { + return value; + } + case "int64": + case "uint64": { + return value.toString(); + } + } + throw new Error(`unexpected type "${type}" with value "${value}".`); +} +function deserializeType(type, value) { + if (isArray(value)) { + return value.map((v) => deserializeType(type, v)); + } + switch (type) { + case "str": + case "bool": + case "float16": + case "float32": + case "int8": + case "uint8": + case "int16": + case "uint16": + case "uint32": + case "int32": { + return value; + } + case "int64": + case "uint64": { + return BigInt(value); + } + } + throw new Error(`unexpected type "${type}" with value "${value}".`); +} +var Tensor = class _Tensor { + constructor(type, value, opts = {}) { + this.type = type; + this.value = value; + ensureType(type, this.value); + if (opts.shape === void 0) { + if (isArray(this.value)) { + this.shape = [arrLength(value)]; + } else { + this.shape = []; } + } else { + this.shape = opts.shape; } - return contentFull; + ensureShape(this.shape, this.value); + this.name = opts.name || null; } - const result = await resp.json(); - if (result.error?.message) { - if (ENV.DEBUG_MODE || ENV.DEV_MODE) { - throw new Error(`OpenAI API Error -> ${result.error.message} -Body: ${JSON.stringify(body)}`); + static fromJSON(obj) { + const { type, shape, value, b64Value, name } = obj; + const opts = { shape, name }; + if (b64Value !== void 0) { + const value2 = b64ToArray(b64Value, type)[0]; + return new _Tensor(type, value2, opts); } else { - throw new Error(`OpenAI API Error -> ${result.error.message}`); + return new _Tensor(type, deserializeType(type, value), opts); } } - setTimeout(() => updateBotUsage(result.usage, context).catch(console.error), 0); - return result.choices[0].message.content; -} -async function requestImageFromOpenAI(prompt, context) { - const key = context.openAIKeyFromContext(); - const body = { - prompt, - n: 1, - size: "512x512" - }; - const resp = await fetch(`${ENV.OPENAI_API_DOMAIN}/v1/images/generations`, { - method: "POST", - headers: { - "Content-Type": "application/json", - "Authorization": `Bearer ${key}` - }, - body: JSON.stringify(body) - }).then((res) => res.json()); - if (resp.error?.message) { - throw new Error(`OpenAI API Error -> ${resp.error.message}`); + toJSON() { + return { + type: this.type, + shape: this.shape, + name: this.name, + value: serializeType(this.type, this.value) + }; + } +}; +function b64ToArray(base64, type) { + const byteString = atob(base64); + const bytes = new Uint8Array(byteString.length); + for (let i = 0; i < byteString.length; i++) { + bytes[i] = byteString.charCodeAt(i); + } + const arrBuffer = new DataView(bytes.buffer).buffer; + switch (type) { + case "float32": + return new Float32Array(arrBuffer); + case "float64": + return new Float64Array(arrBuffer); + case "int32": + return new Int32Array(arrBuffer); + case "int64": + return new BigInt64Array(arrBuffer); + default: + throw Error(`invalid data type for base64 input: ${type}`); } - return resp.data[0].url; } -async function requestCompletionsFromChatGPT(text, context, modifier, onStream) { - const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0; - const historyKey = context.SHARE_CONTEXT.chatHistoryKey; - let history = await loadHistory(historyKey, context); - if (modifier) { - const modifierData = modifier(history, text); - history = modifierData.history; - text = modifierData.text; +function parseInputs(inputs) { + if (Array.isArray(inputs)) { + return inputs.map((input) => input.toJSON()); } - const { real: realHistory, original: originalHistory } = history; - const answer = await requestCompletionsFromOpenAI(text, realHistory, context, onStream); - if (!historyDisable) { - originalHistory.push({ role: "user", content: text || "", cosplay: context.SHARE_CONTEXT.role || "" }); - originalHistory.push({ role: "assistant", content: answer, cosplay: context.SHARE_CONTEXT.role || "" }); - await DATABASE.put(historyKey, JSON.stringify(originalHistory)).catch(console.error); + if (inputs !== null && typeof inputs === "object") { + return Object.keys(inputs).map((key) => { + let tensor = inputs[key].toJSON(); + tensor.name = key; + return tensor; + }); } - return answer; + throw new Error(`invalid inputs, must be Array> | TensorsObject`); } -async function updateBotUsage(usage, context) { - if (!ENV.ENABLE_USAGE_STATISTICS) { - return; +var InferenceSession = class { + constructor(binding, model, options = {}) { + this.binding = binding; + this.model = model; + this.options = options; } - let dbValue = JSON.parse(await DATABASE.get(context.SHARE_CONTEXT.usageKey)); - if (!dbValue) { - dbValue = { - tokens: { - total: 0, - chats: {} + async run(inputs, options) { + const jsonInputs = parseInputs(inputs); + const body = JSON.stringify({ + input: jsonInputs + }); + const compressedReadableStream = new Response(body).body.pipeThrough( + new CompressionStream("gzip") + ); + let routingModel = "default"; + if (this.model === "@cf/meta/llama-2-7b-chat-int8") { + routingModel = "llama_2_7b_chat_int8"; + } + const res = await this.binding.fetch("/run", { + method: "POST", + body: compressedReadableStream, + headers: { + "content-encoding": "gzip", + "cf-consn-model-id": this.model, + "cf-consn-routing-model": routingModel, + ...this.options?.extraHeaders || {} } - }; + }); + if (!res.ok) { + throw new Error(`API returned ${res.status}: ${await res.text()}`); + } + const { result } = await res.json(); + const outputByName = {}; + for (let i = 0, len = result.length; i < len; i++) { + const tensor = Tensor.fromJSON(result[i]); + const name = tensor.name || "output" + i; + outputByName[name] = tensor; + } + return outputByName; } - dbValue.tokens.total += usage.total_tokens; - if (!dbValue.tokens.chats[context.SHARE_CONTEXT.chatId]) { - dbValue.tokens.chats[context.SHARE_CONTEXT.chatId] = usage.total_tokens; - } else { - dbValue.tokens.chats[context.SHARE_CONTEXT.chatId] += usage.total_tokens; +}; +var modelMappings = { + "text-classification": ["@cf/huggingface/distilbert-sst-2-int8"], + "text-embeddings": ["@cf/baai/bge-base-en-v1.5"], + "speech-recognition": ["@cf/openai/whisper"], + "image-classification": ["@cf/microsoft/resnet-50"], + "text-generation": ["@cf/meta/llama-2-7b-chat-int8"], + translation: ["@cf/meta/m2m100-1.2b"] +}; +var chunkArray = (arr, size) => arr.length > size ? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)] : [arr]; +var Ai = class { + constructor(binding, options = {}) { + this.binding = binding; + this.options = options; + } + async run(model, inputs) { + const session = new InferenceSession( + this.binding, + model, + this.options.sessionOptions || {} + ); + let tensorInput; + let typedInputs; + let outputMap = (r) => r; + const tasks = Object.keys(modelMappings); + let task = ""; + for (var t in tasks) { + if (modelMappings[tasks[t]].indexOf(model) !== -1) { + task = tasks[t]; + break; + } + } + switch (task) { + case "text-classification": + typedInputs = inputs; + tensorInput = [ + new Tensor("str", [typedInputs.text], { + shape: [[typedInputs.text].length], + name: "input_text" + }) + ]; + outputMap = (r) => { + return [ + { + label: "NEGATIVE", + score: r.logits.value[0][0] + }, + { + label: "POSITIVE", + score: r.logits.value[0][1] + } + ]; + }; + break; + case "text-embeddings": + typedInputs = inputs; + tensorInput = [ + new Tensor( + "str", + Array.isArray(typedInputs.text) ? typedInputs.text : [typedInputs.text], + { + shape: [ + Array.isArray(typedInputs.text) ? typedInputs.text.length : [typedInputs.text].length + ], + name: "input_text" + } + ) + ]; + outputMap = (r) => { + if (Array.isArray(r.embeddings.value[0])) { + return { + shape: r.embeddings.shape, + data: r.embeddings.value + }; + } else { + return { + shape: r.embeddings.shape, + data: chunkArray(r.embeddings.value, r.embeddings.shape[1]) + }; + } + }; + break; + case "speech-recognition": + typedInputs = inputs; + tensorInput = [ + new Tensor("uint8", typedInputs.audio, { + shape: [1, typedInputs.audio.length], + name: "audio" + }) + ]; + outputMap = (r) => { + return { text: r.name.value[0] }; + }; + break; + case "text-generation": + typedInputs = inputs; + let prompt = ""; + if (typedInputs.messages === void 0) { + prompt = typedInputs.prompt; + } else { + for (let i = 0; i < typedInputs.messages.length; i++) { + const inp = typedInputs.messages[i]; + switch (inp.role) { + case "system": + prompt += "[INST]<>" + inp.content + "<>[/INST]\n"; + break; + case "user": + prompt += "[INST]" + inp.content + "[/INST]\n"; + break; + case "assistant": + prompt += inp.content + "\n"; + break; + default: + throw new Error("Invalid role: " + inp.role); + } + } + } + tensorInput = [ + new Tensor("str", [prompt], { + shape: [1], + name: "INPUT_0" + }), + new Tensor("uint32", [256], { + // sequence length + shape: [1], + name: "INPUT_1" + }) + ]; + outputMap = (r) => { + return { response: r.name.value[0] }; + }; + break; + case "translation": + typedInputs = inputs; + tensorInput = [ + new Tensor("str", [typedInputs.text], { + shape: [1, 1], + name: "text" + }), + new Tensor("str", [typedInputs.source_lang || "en"], { + shape: [1, 1], + name: "source_lang" + }), + new Tensor("str", [typedInputs.target_lang], { + shape: [1, 1], + name: "target_lang" + }) + ]; + outputMap = (r) => { + return { translated_text: r.name.value[0] }; + }; + break; + default: + throw new Error(`No such model ${model} or task`); + } + const output = await session.run(tensorInput); + return outputMap(output); } - await DATABASE.put(context.SHARE_CONTEXT.usageKey, JSON.stringify(dbValue)); -} +}; + +// src/chat.js async function loadHistory(key, context) { const initMessage = { role: "system", content: context.USER_CONFIG.SYSTEM_INIT_MESSAGE }; const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0; @@ -1015,9 +1416,25 @@ async function loadHistory(key, context) { } return { real: history, original }; } - -// src/chat.js -async function chatWithOpenAI(text, context, modifier) { +async function requestCompletionsFromLLM(text, context, llm, modifier, onStream) { + const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0; + const historyKey = context.SHARE_CONTEXT.chatHistoryKey; + let history = await loadHistory(historyKey, context); + if (modifier) { + const modifierData = modifier(history, text); + history = modifierData.history; + text = modifierData.text; + } + const { real: realHistory, original: originalHistory } = history; + const answer = await llm(text, realHistory, context, onStream); + if (!historyDisable) { + originalHistory.push({ role: "user", content: text || "", cosplay: context.SHARE_CONTEXT.role || "" }); + originalHistory.push({ role: "assistant", content: answer, cosplay: context.SHARE_CONTEXT.role || "" }); + await DATABASE.put(historyKey, JSON.stringify(originalHistory)).catch(console.error); + } + return answer; +} +async function chatWithLLM(text, context, modifier) { try { try { const msg = await sendMessageToTelegramWithContext(context)(ENV.I18N.message.loading).then((r) => r.json()); @@ -1042,7 +1459,11 @@ async function chatWithOpenAI(text, context, modifier) { } }; } - const answer = await requestCompletionsFromChatGPT(text, context, modifier, onStream); + let llm = requestCompletionsFromOpenAI; + if (isWorkersAIEnable(context)) { + llm = requestCompletionsFromWorkersAI; + } + const answer = await requestCompletionsFromLLM(text, context, llm, modifier, onStream); context.CURRENT_CHAT_CONTEXT.parse_mode = parseMode; if (ENV.SHOW_REPLY_BUTTON && context.CURRENT_CHAT_CONTEXT.message_id) { try { @@ -1376,7 +1797,7 @@ async function commandRegenerate(message, command, subcommand, context) { } return { history: { real, original }, text: nextText }; }; - return chatWithOpenAI(null, context, mf); + return chatWithLLM(null, context, mf); } async function commandEcho(message, command, subcommand, context) { let msg = "
";
@@ -1514,8 +1935,9 @@ async function msgIgnoreOldMessage(message, context) {
   return null;
 }
 async function msgCheckEnvIsReady(message, context) {
-  if (context.openAIKeyFromContext() === null) {
-    return sendMessageToTelegramWithContext(context)("OpenAI API Key Not Set");
+  const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context);
+  if (!llmEnable) {
+    return sendMessageToTelegramWithContext(context)("LLM Not Set");
   }
   if (!DATABASE) {
     return sendMessageToTelegramWithContext(context)("DATABASE Not Set");
@@ -1639,7 +2061,7 @@ async function msgHandleRole(message, context) {
   }
 }
 async function msgChatWithOpenAI(message, context) {
-  return chatWithOpenAI(message.text, context, null);
+  return chatWithLLM(message.text, context, null);
 }
 async function msgProcessByChatType(message, context) {
   const handlerMap = {
diff --git a/dist/timestamp b/dist/timestamp
index 7b793a94..9ad82cd2 100644
--- a/dist/timestamp
+++ b/dist/timestamp
@@ -1 +1 @@
-1691479160
+1696662329
diff --git a/doc/CONFIG.md b/doc/CONFIG.md
index dbd258b7..53b01703 100644
--- a/doc/CONFIG.md
+++ b/doc/CONFIG.md
@@ -73,6 +73,8 @@
 | OPENAI_API_KEY          | OpenAI API Key,设定后每次调用API都会带上, 每个用户可以设定自己的Key | `/setenv OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx` |
 
 
+### Workers AI 配置
+在 `项目主页-Settings-Variables-AI Bindings` 添加 `AI_LLM	= Workers AI Catalog`
 
 
 ### 支持命令
diff --git a/src/chat.js b/src/chat.js
index cada722d..ccc74b7d 100644
--- a/src/chat.js
+++ b/src/chat.js
@@ -1,18 +1,149 @@
-import {sendChatActionToTelegramWithContext, sendMessageToTelegramWithContext, deleteMessageFromTelegramWithContext} from './telegram.js';
-import {ENV} from './env.js';
-import {requestCompletionsFromChatGPT} from './openai.js';
+import {
+  deleteMessageFromTelegramWithContext,
+  sendChatActionToTelegramWithContext,
+  sendMessageToTelegramWithContext
+} from './telegram.js';
+import {DATABASE, ENV} from './env.js';
 // eslint-disable-next-line no-unused-vars
 import {Context} from './context.js';
+import {requestCompletionsFromOpenAI} from "./openai.js";
+import {tokensCounter} from "./utils.js";
+import {isWorkersAIEnable, requestCompletionsFromWorkersAI} from "./workers-ai.js";
+
+
+
+/**
+ * 加载历史TG消息
+ *
+ * @param {string} key
+ * @param {Context} context
+ * @return {Promise}
+ */
+async function loadHistory(key, context) {
+  const initMessage = {role: 'system', content: context.USER_CONFIG.SYSTEM_INIT_MESSAGE};
+  const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
+
+  // 判断是否禁用历史记录
+  if (historyDisable) {
+    initMessage.role = ENV.SYSTEM_INIT_MESSAGE_ROLE;
+    return {real: [initMessage], original: [initMessage]};
+  }
+
+  // 加载历史记录
+  let history = [];
+  try {
+    history = JSON.parse(await DATABASE.get(key));
+  } catch (e) {
+    console.error(e);
+  }
+  if (!history || !Array.isArray(history)) {
+    history = [];
+  }
+
+
+  let original = JSON.parse(JSON.stringify(history));
+
+  // 按身份过滤
+  if (context.SHARE_CONTEXT.role) {
+    history = history.filter((chat) => context.SHARE_CONTEXT.role === chat.cosplay);
+  }
+
+  history.forEach((item) => {
+    delete item.cosplay;
+  });
+
+  const counter = await tokensCounter();
+
+  const trimHistory = (list, initLength, maxLength, maxToken) => {
+    // 历史记录超出长度需要裁剪
+    if (list.length > maxLength) {
+      list = list.splice(list.length - maxLength);
+    }
+    // 处理token长度问题
+    let tokenLength = initLength;
+    for (let i = list.length - 1; i >= 0; i--) {
+      const historyItem = list[i];
+      let length = 0;
+      if (historyItem.content) {
+        length = counter(historyItem.content);
+      } else {
+        historyItem.content = '';
+      }
+      // 如果最大长度超过maxToken,裁剪history
+      tokenLength += length;
+      if (tokenLength > maxToken) {
+        list = list.splice(i + 1);
+        break;
+      }
+    }
+    return list;
+  };
+
+  // 裁剪
+  if (ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH > 0) {
+    const initLength = counter(initMessage.content);
+    const roleCount = Math.max(Object.keys(context.USER_DEFINE.ROLE).length, 1);
+    history = trimHistory(history, initLength, ENV.MAX_HISTORY_LENGTH, ENV.MAX_TOKEN_LENGTH);
+    original = trimHistory(original, initLength, ENV.MAX_HISTORY_LENGTH * roleCount, ENV.MAX_TOKEN_LENGTH * roleCount);
+  }
+
+  // 插入init
+  switch (history.length > 0 ? history[0].role : '') {
+    case 'assistant': // 第一条为机器人,替换成init
+    case 'system': // 第一条为system,用新的init替换
+      history[0] = initMessage;
+      break;
+    default:// 默认给第一条插入init
+      history.unshift(initMessage);
+  }
+
+  // 如果第一条是system,替换role为SYSTEM_INIT_MESSAGE_ROLE
+  if (ENV.SYSTEM_INIT_MESSAGE_ROLE !== 'system' && history.length > 0 && history[0].role === 'system') {
+    history[0].role = ENV.SYSTEM_INIT_MESSAGE_ROLE;
+  }
+
+  return {real: history, original: original};
+}
+
+
 
 /**
- * 与OpenAI聊天
+ *
+ * @param {string} text
+ * @param {Context} context
+ * @param {function} llm
+ * @param {function} modifier
+ * @param {function} onStream
+ * @return {Promise}
+ */
+export async function requestCompletionsFromLLM(text, context, llm, modifier, onStream) {
+  const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
+  const historyKey = context.SHARE_CONTEXT.chatHistoryKey;
+  let history = await loadHistory(historyKey, context);
+  if (modifier) {
+    const modifierData = modifier(history, text);
+    history = modifierData.history;
+    text = modifierData.text;
+  }
+  const {real: realHistory, original: originalHistory} = history;
+  const answer = await llm(text, realHistory, context, onStream);
+  if (!historyDisable) {
+    originalHistory.push({role: 'user', content: text || '', cosplay: context.SHARE_CONTEXT.role || ''});
+    originalHistory.push({role: 'assistant', content: answer, cosplay: context.SHARE_CONTEXT.role || ''});
+    await DATABASE.put(historyKey, JSON.stringify(originalHistory)).catch(console.error);
+  }
+  return answer;
+}
+
+/**
+ * 与LLM聊天
  *
  * @param {string} text
  * @param {Context} context
  * @param {function} modifier
  * @return {Promise}
  */
-export async function chatWithOpenAI(text, context, modifier) {
+export async function chatWithLLM(text, context, modifier) {
   try {
     try {
       const msg = await sendMessageToTelegramWithContext(context)(ENV.I18N.message.loading).then((r) => r.json());
@@ -38,7 +169,12 @@ export async function chatWithOpenAI(text, context, modifier) {
       };
     }
 
-    const answer = await requestCompletionsFromChatGPT(text, context, modifier, onStream);
+    let llm = requestCompletionsFromOpenAI;
+    if (isWorkersAIEnable(context)) {
+      llm = requestCompletionsFromWorkersAI
+    }
+
+    const answer = await requestCompletionsFromLLM(text, context, llm, modifier, onStream);
     context.CURRENT_CHAT_CONTEXT.parse_mode = parseMode;
     if (ENV.SHOW_REPLY_BUTTON && context.CURRENT_CHAT_CONTEXT.message_id) {
       try {
diff --git a/src/command.js b/src/command.js
index 5b9c9581..f3ea84ce 100644
--- a/src/command.js
+++ b/src/command.js
@@ -9,7 +9,7 @@ import {
   sendMessageToTelegramWithContext,
   sendPhotoToTelegramWithContext,
 } from './telegram.js';
-import {chatWithOpenAI} from './chat.js';
+import {chatWithLLM} from './chat.js';
 
 
 const commandAuthCheck = {
@@ -440,7 +440,7 @@ async function commandRegenerate(message, command, subcommand, context) {
     }
     return {history: {real, original}, text: nextText};
   };
-  return chatWithOpenAI(null, context, mf);
+  return chatWithLLM(null, context, mf);
 }
 
 /**
diff --git a/src/env.js b/src/env.js
index 1e8d7437..1b0da9c4 100644
--- a/src/env.js
+++ b/src/env.js
@@ -31,6 +31,7 @@
  * @property {string} OPENAI_API_DOMAIN
  * @property {string} AZURE_API_KEY
  * @property {string} AZURE_COMPLETIONS_API
+ * @property {string} WORKERS_AI_MODEL
  */
 /**
  * @type {Environment}
@@ -98,12 +99,19 @@ export const ENV = {
   // 开发模式
   DEV_MODE: false,
 
+  // Telegram API Domain
   TELEGRAM_API_DOMAIN: 'https://api.telegram.org',
+  // OpenAI API Domain 可替换兼容openai api的其他服务商
   OPENAI_API_DOMAIN: 'https://api.openai.com',
 
+  // Azure API Key
   AZURE_API_KEY: null,
+    // Azure Completions API
   AZURE_COMPLETIONS_API: null,
 
+  // workers ai模型
+  WORKERS_AI_MODEL: null
+
 };
 
 export const CONST = {
@@ -115,10 +123,13 @@ export const CONST = {
 export let DATABASE = null;
 export let API_GUARD = null;
 
+export let AI_LLM = null;
+
 const ENV_VALUE_TYPE = {
   API_KEY: [],
   AZURE_API_KEY: 'string',
   AZURE_COMPLETIONS_API: 'string',
+  WORKERS_AI_MODEL: 'string',
 };
 
 /**
@@ -134,6 +145,7 @@ const ENV_VALUE_TYPE = {
 export function initEnv(env, i18n) {
   DATABASE = env.DATABASE;
   API_GUARD = env.API_GUARD;
+  AI_LLM = env.AI_LLM;
   for (const key in ENV) {
     if (env[key]) {
       switch (ENV_VALUE_TYPE[key]?typeof ENV_VALUE_TYPE[key]:(typeof ENV[key])) {
diff --git a/src/message.js b/src/message.js
index 365dbf24..891621c7 100644
--- a/src/message.js
+++ b/src/message.js
@@ -3,7 +3,9 @@ import {Context} from './context.js';
 import {sendMessageToTelegramWithContext} from './telegram.js';
 import {handleCommandMessage} from './command.js';
 import {errorToString} from './utils.js';
-import {chatWithOpenAI} from './chat.js';
+import {chatWithLLM} from './chat.js';
+import {isOpenAIEnable} from "./openai.js";
+import {isWorkersAIEnable} from "./workers-ai.js";
 // import {TelegramMessage, TelegramWebhookRequest} from './type.d.ts';
 
 
@@ -77,8 +79,9 @@ async function msgIgnoreOldMessage(message, context) {
  * @return {Promise}
  */
 async function msgCheckEnvIsReady(message, context) {
-  if (context.openAIKeyFromContext() === null) {
-    return sendMessageToTelegramWithContext(context)('OpenAI API Key Not Set');
+  const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context)
+  if (!llmEnable) {
+    return sendMessageToTelegramWithContext(context)('LLM Not Set');
   }
   if (!DATABASE) {
     return sendMessageToTelegramWithContext(context)('DATABASE Not Set');
@@ -278,7 +281,7 @@ async function msgHandleRole(message, context) {
  * @return {Promise}
  */
 async function msgChatWithOpenAI(message, context) {
-  return chatWithOpenAI(message.text, context, null);
+  return chatWithLLM(message.text, context, null);
 }
 
 /**
diff --git a/src/openai.js b/src/openai.js
index 5484ace8..c9358ad6 100644
--- a/src/openai.js
+++ b/src/openai.js
@@ -1,7 +1,6 @@
 /* eslint-disable no-unused-vars */
 import {Context} from './context.js';
 import {DATABASE, ENV} from './env.js';
-import {tokensCounter} from './utils.js';
 
 /**
  * 从流数据中提取内容
@@ -30,6 +29,16 @@ function extractContentFromStreamData(stream) {
   };
 }
 
+/**
+ * @return {boolean}
+ * @param {Context} context
+ */
+export function isOpenAIEnable(context) {
+  const key = context.openAIKeyFromContext();
+  return key && key.length > 0;
+}
+
+
 /**
  * 发送消息到ChatGPT
  *
@@ -39,7 +48,7 @@ function extractContentFromStreamData(stream) {
  * @param {function} onStream
  * @return {Promise}
  */
-async function requestCompletionsFromOpenAI(message, history, context, onStream) {
+export async function requestCompletionsFromOpenAI(message, history, context, onStream) {
   const key = context.openAIKeyFromContext();
   const body = {
     model: ENV.CHAT_MODEL,
@@ -139,33 +148,6 @@ export async function requestImageFromOpenAI(prompt, context) {
   return resp.data[0].url;
 }
 
-/**
- *
- * @param {string} text
- * @param {Context} context
- * @param {function} modifier
- * @param {function} onStream
- * @return {Promise}
- */
-export async function requestCompletionsFromChatGPT(text, context, modifier, onStream) {
-  const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
-  const historyKey = context.SHARE_CONTEXT.chatHistoryKey;
-  let history = await loadHistory(historyKey, context);
-  if (modifier) {
-    const modifierData = modifier(history, text);
-    history = modifierData.history;
-    text = modifierData.text;
-  }
-  const {real: realHistory, original: originalHistory} = history;
-  const answer = await requestCompletionsFromOpenAI(text, realHistory, context, onStream);
-  if (!historyDisable) {
-    originalHistory.push({role: 'user', content: text || '', cosplay: context.SHARE_CONTEXT.role || ''});
-    originalHistory.push({role: 'assistant', content: answer, cosplay: context.SHARE_CONTEXT.role || ''});
-    await DATABASE.put(historyKey, JSON.stringify(originalHistory)).catch(console.error);
-  }
-  return answer;
-}
-
 
 /**
  * 更新当前机器人的用量统计
@@ -198,97 +180,3 @@ async function updateBotUsage(usage, context) {
 
   await DATABASE.put(context.SHARE_CONTEXT.usageKey, JSON.stringify(dbValue));
 }
-
-/**
- * 加载历史TG消息
- *
- * @param {string} key
- * @param {Context} context
- * @return {Promise}
- */
-async function loadHistory(key, context) {
-  const initMessage = {role: 'system', content: context.USER_CONFIG.SYSTEM_INIT_MESSAGE};
-  const historyDisable = ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH <= 0;
-
-  // 判断是否禁用历史记录
-  if (historyDisable) {
-    initMessage.role = ENV.SYSTEM_INIT_MESSAGE_ROLE;
-    return {real: [initMessage], original: [initMessage]};
-  }
-
-  // 加载历史记录
-  let history = [];
-  try {
-    history = JSON.parse(await DATABASE.get(key));
-  } catch (e) {
-    console.error(e);
-  }
-  if (!history || !Array.isArray(history)) {
-    history = [];
-  }
-
-
-  let original = JSON.parse(JSON.stringify(history));
-
-  // 按身份过滤
-  if (context.SHARE_CONTEXT.role) {
-    history = history.filter((chat) => context.SHARE_CONTEXT.role === chat.cosplay);
-  }
-
-  history.forEach((item) => {
-    delete item.cosplay;
-  });
-
-  const counter = await tokensCounter();
-
-  const trimHistory = (list, initLength, maxLength, maxToken) => {
-    // 历史记录超出长度需要裁剪
-    if (list.length > maxLength) {
-      list = list.splice(list.length - maxLength);
-    }
-    // 处理token长度问题
-    let tokenLength = initLength;
-    for (let i = list.length - 1; i >= 0; i--) {
-      const historyItem = list[i];
-      let length = 0;
-      if (historyItem.content) {
-        length = counter(historyItem.content);
-      } else {
-        historyItem.content = '';
-      }
-      // 如果最大长度超过maxToken,裁剪history
-      tokenLength += length;
-      if (tokenLength > maxToken) {
-        list = list.splice(i + 1);
-        break;
-      }
-    }
-    return list;
-  };
-
-  // 裁剪
-  if (ENV.AUTO_TRIM_HISTORY && ENV.MAX_HISTORY_LENGTH > 0) {
-    const initLength = counter(initMessage.content);
-    const roleCount = Math.max(Object.keys(context.USER_DEFINE.ROLE).length, 1);
-    history = trimHistory(history, initLength, ENV.MAX_HISTORY_LENGTH, ENV.MAX_TOKEN_LENGTH);
-    original = trimHistory(original, initLength, ENV.MAX_HISTORY_LENGTH * roleCount, ENV.MAX_TOKEN_LENGTH * roleCount);
-  }
-
-  // 插入init
-  switch (history.length > 0 ? history[0].role : '') {
-    case 'assistant': // 第一条为机器人,替换成init
-    case 'system': // 第一条为system,用新的init替换
-      history[0] = initMessage;
-      break;
-    default:// 默认给第一条插入init
-      history.unshift(initMessage);
-  }
-
-  // 如果第一条是system,替换role为SYSTEM_INIT_MESSAGE_ROLE
-  if (ENV.SYSTEM_INIT_MESSAGE_ROLE !== 'system' && history.length > 0 && history[0].role === 'system') {
-    history[0].role = ENV.SYSTEM_INIT_MESSAGE_ROLE;
-  }
-
-  return {real: history, original: original};
-}
-
diff --git a/src/workers-ai.js b/src/workers-ai.js
new file mode 100644
index 00000000..d42f7e8d
--- /dev/null
+++ b/src/workers-ai.js
@@ -0,0 +1,453 @@
+import {ENV, AI_LLM} from "./env.js";
+
+/**
+ * @return {boolean}
+ */
+export function isWorkersAIEnable(context) {
+    return AI_LLM !== null;
+    // return ENV.WORKERS_AI_MODEL !== null;
+}
+
+
+/**
+ * 发送消息到Workers AI
+ *
+ * @param {string} message
+ * @param {Array} history
+ * @param {Context} context
+ * @param {function} onStream
+ * @return {Promise}
+ */
+export async function requestCompletionsFromWorkersAI(message, history, context, onStream) {
+
+    const ai = new Ai(AI_LLM);
+    const model = ENV.WORKERS_AI_MODEL || '@cf/meta/llama-2-7b-chat-int8'
+    const request = {
+        messages: [...history || [], { role: "user", content: message }]
+    };
+    const response = await ai.run(model, request);
+    return response.response;
+}
+
+/* eslint-disable */
+// src/tensor.ts
+var TensorType = /* @__PURE__ */ (TensorType2 => {
+    TensorType2['String'] = 'str';
+    TensorType2['Bool'] = 'bool';
+    TensorType2['Float16'] = 'float16';
+    TensorType2['Float32'] = 'float32';
+    TensorType2['Int16'] = 'int16';
+    TensorType2['Int32'] = 'int32';
+    TensorType2['Int64'] = 'int64';
+    TensorType2['Int8'] = 'int8';
+    TensorType2['Uint16'] = 'uint16';
+    TensorType2['Uint32'] = 'uint32';
+    TensorType2['Uint64'] = 'uint64';
+    TensorType2['Uint8'] = 'uint8';
+    return TensorType2;
+})(TensorType || {});
+var TypedArrayProto = Object.getPrototypeOf(Uint8Array);
+function isArray(value) {
+    return Array.isArray(value) || value instanceof TypedArrayProto;
+}
+function arrLength(obj) {
+    return obj instanceof TypedArrayProto
+        ? obj.length
+        : obj
+            .flat()
+            .reduce(
+                (acc, cur) => acc + (cur instanceof TypedArrayProto ? cur.length : 1),
+                0
+            );
+}
+function ensureShape(shape, value) {
+    if (shape.length === 0 && !isArray(value)) {
+        return;
+    }
+    const count = shape.reduce((acc, v) => {
+        if (!Number.isInteger(v)) {
+            throw new Error(
+                `expected shape to be array-like of integers but found non-integer element "${v}"`
+            );
+        }
+        return acc * v;
+    }, 1);
+    if (count != arrLength(value)) {
+        throw new Error(
+            `invalid shape: expected ${count} elements for shape ${shape} but value array has length ${value.length}`
+        );
+    }
+}
+function ensureType(type, value) {
+    if (isArray(value)) {
+        value.forEach(v => ensureType(type, v));
+        return;
+    }
+    switch (type) {
+        case 'bool' /* Bool */: {
+            if (typeof value === 'boolean') {
+                return;
+            }
+            break;
+        }
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */: {
+            if (typeof value === 'number') {
+                return;
+            }
+            break;
+        }
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'int32' /* Int32 */:
+        case 'uint32' /* Uint32 */: {
+            if (Number.isInteger(value)) {
+                return;
+            }
+            break;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            if (typeof value === 'bigint') {
+                return;
+            }
+            break;
+        }
+        case 'str' /* String */: {
+            if (typeof value === 'string') {
+                return;
+            }
+            break;
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+function serializeType(type, value) {
+    if (isArray(value)) {
+        return [...value].map(v => serializeType(type, v));
+    }
+    switch (type) {
+        case 'str' /* String */:
+        case 'bool' /* Bool */:
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */:
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'uint32' /* Uint32 */:
+        case 'int32' /* Int32 */: {
+            return value;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            return value.toString();
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+function deserializeType(type, value) {
+    if (isArray(value)) {
+        return value.map(v => deserializeType(type, v));
+    }
+    switch (type) {
+        case 'str' /* String */:
+        case 'bool' /* Bool */:
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */:
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'uint32' /* Uint32 */:
+        case 'int32' /* Int32 */: {
+            return value;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            return BigInt(value);
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+var Tensor = class _Tensor {
+    constructor(type, value, opts = {}) {
+        this.type = type;
+        this.value = value;
+        ensureType(type, this.value);
+        if (opts.shape === void 0) {
+            if (isArray(this.value)) {
+                this.shape = [arrLength(value)];
+            } else {
+                this.shape = [];
+            }
+        } else {
+            this.shape = opts.shape;
+        }
+        ensureShape(this.shape, this.value);
+        this.name = opts.name || null;
+    }
+    static fromJSON(obj) {
+        const { type, shape, value, b64Value, name } = obj;
+        const opts = { shape, name };
+        if (b64Value !== void 0) {
+            const value2 = b64ToArray(b64Value, type)[0];
+            return new _Tensor(type, value2, opts);
+        } else {
+            return new _Tensor(type, deserializeType(type, value), opts);
+        }
+    }
+    toJSON() {
+        return {
+            type: this.type,
+            shape: this.shape,
+            name: this.name,
+            value: serializeType(this.type, this.value)
+        };
+    }
+};
+function b64ToArray(base64, type) {
+    const byteString = atob(base64);
+    const bytes = new Uint8Array(byteString.length);
+    for (let i = 0; i < byteString.length; i++) {
+        bytes[i] = byteString.charCodeAt(i);
+    }
+    const arrBuffer = new DataView(bytes.buffer).buffer;
+    switch (type) {
+        case 'float32':
+            return new Float32Array(arrBuffer);
+        case 'float64':
+            return new Float64Array(arrBuffer);
+        case 'int32':
+            return new Int32Array(arrBuffer);
+        case 'int64':
+            return new BigInt64Array(arrBuffer);
+        default:
+            throw Error(`invalid data type for base64 input: ${type}`);
+    }
+}
+
+// src/session.ts
+function parseInputs(inputs) {
+    if (Array.isArray(inputs)) {
+        return inputs.map(input => input.toJSON());
+    }
+    if (inputs !== null && typeof inputs === 'object') {
+        return Object.keys(inputs).map(key => {
+            let tensor = inputs[key].toJSON();
+            tensor.name = key;
+            return tensor;
+        });
+    }
+    throw new Error(`invalid inputs, must be Array> | TensorsObject`);
+}
+var InferenceSession = class {
+    constructor(binding, model, options = {}) {
+        this.binding = binding;
+        this.model = model;
+        this.options = options;
+    }
+    async run(inputs, options) {
+        const jsonInputs = parseInputs(inputs);
+        const body = JSON.stringify({
+            input: jsonInputs
+        });
+        const compressedReadableStream = new Response(body).body.pipeThrough(
+            new CompressionStream('gzip')
+        );
+        let routingModel = 'default';
+        if (this.model === '@cf/meta/llama-2-7b-chat-int8') {
+            routingModel = 'llama_2_7b_chat_int8';
+        }
+        const res = await this.binding.fetch('/run', {
+            method: 'POST',
+            body: compressedReadableStream,
+            headers: {
+                'content-encoding': 'gzip',
+                'cf-consn-model-id': this.model,
+                'cf-consn-routing-model': routingModel,
+                ...(this.options?.extraHeaders || {})
+            }
+        });
+        if (!res.ok) {
+            throw new Error(`API returned ${res.status}: ${await res.text()}`);
+        }
+        const { result } = await res.json();
+        const outputByName = {};
+        for (let i = 0, len = result.length; i < len; i++) {
+            const tensor = Tensor.fromJSON(result[i]);
+            const name = tensor.name || 'output' + i;
+            outputByName[name] = tensor;
+        }
+        return outputByName;
+    }
+};
+
+// src/ai.ts
+var modelMappings = {
+    'text-classification': ['@cf/huggingface/distilbert-sst-2-int8'],
+    'text-embeddings': ['@cf/baai/bge-base-en-v1.5'],
+    'speech-recognition': ['@cf/openai/whisper'],
+    'image-classification': ['@cf/microsoft/resnet-50'],
+    'text-generation': ['@cf/meta/llama-2-7b-chat-int8'],
+    translation: ['@cf/meta/m2m100-1.2b']
+};
+var chunkArray = (arr, size) =>
+    arr.length > size
+        ? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
+        : [arr];
+var Ai = class {
+    constructor(binding, options = {}) {
+        this.binding = binding;
+        this.options = options;
+    }
+    async run(model, inputs) {
+        const session = new InferenceSession(
+            this.binding,
+            model,
+            this.options.sessionOptions || {}
+        );
+        let tensorInput;
+        let typedInputs;
+        let outputMap = r => r;
+        const tasks = Object.keys(modelMappings);
+        let task = '';
+        for (var t in tasks) {
+            if (modelMappings[tasks[t]].indexOf(model) !== -1) {
+                task = tasks[t];
+                break;
+            }
+        }
+        switch (task) {
+            case 'text-classification':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('str' /* String */, [typedInputs.text], {
+                        shape: [[typedInputs.text].length],
+                        name: 'input_text'
+                    })
+                ];
+                outputMap = r => {
+                    return [
+                        {
+                            label: 'NEGATIVE',
+                            score: r.logits.value[0][0]
+                        },
+                        {
+                            label: 'POSITIVE',
+                            score: r.logits.value[0][1]
+                        }
+                    ];
+                };
+                break;
+            case 'text-embeddings':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor(
+                        'str' /* String */,
+                        Array.isArray(typedInputs.text)
+                            ? typedInputs.text
+                            : [typedInputs.text],
+                        {
+                            shape: [
+                                Array.isArray(typedInputs.text)
+                                    ? typedInputs.text.length
+                                    : [typedInputs.text].length
+                            ],
+                            name: 'input_text'
+                        }
+                    )
+                ];
+                outputMap = r => {
+                    if (Array.isArray(r.embeddings.value[0])) {
+                        return {
+                            shape: r.embeddings.shape,
+                            data: r.embeddings.value
+                        };
+                    } else {
+                        return {
+                            shape: r.embeddings.shape,
+                            data: chunkArray(r.embeddings.value, r.embeddings.shape[1])
+                        };
+                    }
+                };
+                break;
+            case 'speech-recognition':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('uint8' /* Uint8 */, typedInputs.audio, {
+                        shape: [1, typedInputs.audio.length],
+                        name: 'audio'
+                    })
+                ];
+                outputMap = r => {
+                    return { text: r.name.value[0] };
+                };
+                break;
+            case 'text-generation':
+                typedInputs = inputs;
+                let prompt = '';
+                if (typedInputs.messages === void 0) {
+                    prompt = typedInputs.prompt;
+                } else {
+                    for (let i = 0; i < typedInputs.messages.length; i++) {
+                        const inp = typedInputs.messages[i];
+                        switch (inp.role) {
+                            case 'system':
+                                prompt += '[INST]<>' + inp.content + '<>[/INST]\n';
+                                break;
+                            case 'user':
+                                prompt += '[INST]' + inp.content + '[/INST]\n';
+                                break;
+                            case 'assistant':
+                                prompt += inp.content + '\n';
+                                break;
+                            default:
+                                throw new Error('Invalid role: ' + inp.role);
+                        }
+                    }
+                }
+                tensorInput = [
+                    new Tensor('str' /* String */, [prompt], {
+                        shape: [1],
+                        name: 'INPUT_0'
+                    }),
+                    new Tensor('uint32' /* Uint32 */, [256], {
+                        // sequence length
+                        shape: [1],
+                        name: 'INPUT_1'
+                    })
+                ];
+                outputMap = r => {
+                    return { response: r.name.value[0] };
+                };
+                break;
+            case 'translation':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('str' /* String */, [typedInputs.text], {
+                        shape: [1, 1],
+                        name: 'text'
+                    }),
+                    new Tensor('str' /* String */, [typedInputs.source_lang || 'en'], {
+                        shape: [1, 1],
+                        name: 'source_lang'
+                    }),
+                    new Tensor('str' /* String */, [typedInputs.target_lang], {
+                        shape: [1, 1],
+                        name: 'target_lang'
+                    })
+                ];
+                outputMap = r => {
+                    return { translated_text: r.name.value[0] };
+                };
+                break;
+            default:
+                throw new Error(`No such model ${model} or task`);
+        }
+        const output = await session.run(tensorInput);
+        return outputMap(output);
+    }
+};
diff --git a/yarn.lock b/yarn.lock
new file mode 100644
index 00000000..191164b7
--- /dev/null
+++ b/yarn.lock
@@ -0,0 +1,1189 @@
+# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
+# yarn lockfile v1
+
+
+"@aashutoshrathi/word-wrap@^1.2.3":
+  version "1.2.6"
+  resolved "https://registry.npmmirror.com/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz#bd9154aec9983f77b3a034ecaa015c2e4201f6cf"
+  integrity sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==
+
+"@cloudflare/kv-asset-handler@^0.2.0":
+  version "0.2.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/kv-asset-handler/-/kv-asset-handler-0.2.0.tgz#c9959bbd7a1c40bd7c674adae98aa8c8d0e5ca68"
+  integrity sha512-MVbXLbTcAotOPUj0pAMhVtJ+3/kFkwJqc5qNOleOZTv6QkZZABDMS21dSrSlVswEHwrpWC03e4fWytjqKvuE2A==
+  dependencies:
+    mime "^3.0.0"
+
+"@cloudflare/workerd-darwin-64@1.20231002.0":
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/workerd-darwin-64/-/workerd-darwin-64-1.20231002.0.tgz#ec0d4f926c1a693a145df26b2b71dfb4f9df18ae"
+  integrity sha512-sgtjzVO/wtI/6S7O0bk4zQAv2xlvqOxB18AXzlit6uXgbYFGeQedRHjhKVMOacGmWEnM4C3ir/fxJGsc3Pyxng==
+
+"@cloudflare/workerd-darwin-arm64@1.20231002.0":
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/workerd-darwin-arm64/-/workerd-darwin-arm64-1.20231002.0.tgz#fc4e64b91d0c7ebab76ae770133a32d0de54c1a5"
+  integrity sha512-dv8nztYFaTYYgBpyy80vc4hdMYv9mhyNbvBsZywm8S7ivcIpzogi0UKkGU4E/G0lYK6W3WtwTBqwRe+pXJ1+Ww==
+
+"@cloudflare/workerd-linux-64@1.20231002.0":
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/workerd-linux-64/-/workerd-linux-64-1.20231002.0.tgz#26d9bd2b4d6a20e7923cb95c62667dd47012f797"
+  integrity sha512-UG8SlLcGzaQDSSw6FR4+Zf408925wkLOCAi8w5qEoFYu3g4Ef7ZenstesCOsyWL7qBDKx0/iwk6+a76W5IHI0Q==
+
+"@cloudflare/workerd-linux-arm64@1.20231002.0":
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/workerd-linux-arm64/-/workerd-linux-arm64-1.20231002.0.tgz#e93c3a1f6150e041bcb909fa521c9106f2c9e070"
+  integrity sha512-GPaa66ZSq1gK09r87c5CJbHIApcIU//LVHz3rnUxK0//00YCwUuGUUK1dn/ylg+fVqDQxIDmH+ABnobBanvcDA==
+
+"@cloudflare/workerd-windows-64@1.20231002.0":
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/@cloudflare/workerd-windows-64/-/workerd-windows-64-1.20231002.0.tgz#05fe9a960bcdd2887d6ee191c831c6e4e3d4052e"
+  integrity sha512-ybIy+sCme0VO0RscndXvqWNBaRMUOc8vhi+1N2h/KDsKfNLsfEQph+XWecfKzJseUy1yE2rV1xei3BaNmaa6vg==
+
+"@esbuild-plugins/node-globals-polyfill@^0.2.3":
+  version "0.2.3"
+  resolved "https://registry.npmmirror.com/@esbuild-plugins/node-globals-polyfill/-/node-globals-polyfill-0.2.3.tgz#0e4497a2b53c9e9485e149bc92ddb228438d6bcf"
+  integrity sha512-r3MIryXDeXDOZh7ih1l/yE9ZLORCd5e8vWg02azWRGj5SPTuoh69A2AIyn0Z31V/kHBfZ4HgWJ+OK3GTTwLmnw==
+
+"@esbuild-plugins/node-modules-polyfill@^0.2.2":
+  version "0.2.2"
+  resolved "https://registry.npmmirror.com/@esbuild-plugins/node-modules-polyfill/-/node-modules-polyfill-0.2.2.tgz#cefa3dc0bd1c16277a8338b52833420c94987327"
+  integrity sha512-LXV7QsWJxRuMYvKbiznh+U1ilIop3g2TeKRzUxOG5X3YITc8JyyTa90BmLwqqv0YnX4v32CSlG+vsziZp9dMvA==
+  dependencies:
+    escape-string-regexp "^4.0.0"
+    rollup-plugin-node-polyfills "^0.2.1"
+
+"@esbuild/android-arm64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/android-arm64/-/android-arm64-0.17.19.tgz#bafb75234a5d3d1b690e7c2956a599345e84a2fd"
+  integrity sha512-KBMWvEZooR7+kzY0BtbTQn0OAYY7CsiydT63pVEaPtVYF0hXbUaOyZog37DKxK7NF3XacBJOpYT4adIJh+avxA==
+
+"@esbuild/android-arm@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/android-arm/-/android-arm-0.17.19.tgz#5898f7832c2298bc7d0ab53701c57beb74d78b4d"
+  integrity sha512-rIKddzqhmav7MSmoFCmDIb6e2W57geRsM94gV2l38fzhXMwq7hZoClug9USI2pFRGL06f4IOPHHpFNOkWieR8A==
+
+"@esbuild/android-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/android-x64/-/android-x64-0.17.19.tgz#658368ef92067866d95fb268719f98f363d13ae1"
+  integrity sha512-uUTTc4xGNDT7YSArp/zbtmbhO0uEEK9/ETW29Wk1thYUJBz3IVnvgEiEwEa9IeLyvnpKrWK64Utw2bgUmDveww==
+
+"@esbuild/darwin-arm64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/darwin-arm64/-/darwin-arm64-0.17.19.tgz#584c34c5991b95d4d48d333300b1a4e2ff7be276"
+  integrity sha512-80wEoCfF/hFKM6WE1FyBHc9SfUblloAWx6FJkFWTWiCoht9Mc0ARGEM47e67W9rI09YoUxJL68WHfDRYEAvOhg==
+
+"@esbuild/darwin-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/darwin-x64/-/darwin-x64-0.17.19.tgz#7751d236dfe6ce136cce343dce69f52d76b7f6cb"
+  integrity sha512-IJM4JJsLhRYr9xdtLytPLSH9k/oxR3boaUIYiHkAawtwNOXKE8KoU8tMvryogdcT8AU+Bflmh81Xn6Q0vTZbQw==
+
+"@esbuild/freebsd-arm64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.19.tgz#cacd171665dd1d500f45c167d50c6b7e539d5fd2"
+  integrity sha512-pBwbc7DufluUeGdjSU5Si+P3SoMF5DQ/F/UmTSb8HXO80ZEAJmrykPyzo1IfNbAoaqw48YRpv8shwd1NoI0jcQ==
+
+"@esbuild/freebsd-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/freebsd-x64/-/freebsd-x64-0.17.19.tgz#0769456eee2a08b8d925d7c00b79e861cb3162e4"
+  integrity sha512-4lu+n8Wk0XlajEhbEffdy2xy53dpR06SlzvhGByyg36qJw6Kpfk7cp45DR/62aPH9mtJRmIyrXAS5UWBrJT6TQ==
+
+"@esbuild/linux-arm64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-arm64/-/linux-arm64-0.17.19.tgz#38e162ecb723862c6be1c27d6389f48960b68edb"
+  integrity sha512-ct1Tg3WGwd3P+oZYqic+YZF4snNl2bsnMKRkb3ozHmnM0dGWuxcPTTntAF6bOP0Sp4x0PjSF+4uHQ1xvxfRKqg==
+
+"@esbuild/linux-arm@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-arm/-/linux-arm-0.17.19.tgz#1a2cd399c50040184a805174a6d89097d9d1559a"
+  integrity sha512-cdmT3KxjlOQ/gZ2cjfrQOtmhG4HJs6hhvm3mWSRDPtZ/lP5oe8FWceS10JaSJC13GBd4eH/haHnqf7hhGNLerA==
+
+"@esbuild/linux-ia32@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-ia32/-/linux-ia32-0.17.19.tgz#e28c25266b036ce1cabca3c30155222841dc035a"
+  integrity sha512-w4IRhSy1VbsNxHRQpeGCHEmibqdTUx61Vc38APcsRbuVgK0OPEnQ0YD39Brymn96mOx48Y2laBQGqgZ0j9w6SQ==
+
+"@esbuild/linux-loong64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-loong64/-/linux-loong64-0.17.19.tgz#0f887b8bb3f90658d1a0117283e55dbd4c9dcf72"
+  integrity sha512-2iAngUbBPMq439a+z//gE+9WBldoMp1s5GWsUSgqHLzLJ9WoZLZhpwWuym0u0u/4XmZ3gpHmzV84PonE+9IIdQ==
+
+"@esbuild/linux-mips64el@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-mips64el/-/linux-mips64el-0.17.19.tgz#f5d2a0b8047ea9a5d9f592a178ea054053a70289"
+  integrity sha512-LKJltc4LVdMKHsrFe4MGNPp0hqDFA1Wpt3jE1gEyM3nKUvOiO//9PheZZHfYRfYl6AwdTH4aTcXSqBerX0ml4A==
+
+"@esbuild/linux-ppc64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-ppc64/-/linux-ppc64-0.17.19.tgz#876590e3acbd9fa7f57a2c7d86f83717dbbac8c7"
+  integrity sha512-/c/DGybs95WXNS8y3Ti/ytqETiW7EU44MEKuCAcpPto3YjQbyK3IQVKfF6nbghD7EcLUGl0NbiL5Rt5DMhn5tg==
+
+"@esbuild/linux-riscv64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-riscv64/-/linux-riscv64-0.17.19.tgz#7f49373df463cd9f41dc34f9b2262d771688bf09"
+  integrity sha512-FC3nUAWhvFoutlhAkgHf8f5HwFWUL6bYdvLc/TTuxKlvLi3+pPzdZiFKSWz/PF30TB1K19SuCxDTI5KcqASJqA==
+
+"@esbuild/linux-s390x@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-s390x/-/linux-s390x-0.17.19.tgz#e2afd1afcaf63afe2c7d9ceacd28ec57c77f8829"
+  integrity sha512-IbFsFbxMWLuKEbH+7sTkKzL6NJmG2vRyy6K7JJo55w+8xDk7RElYn6xvXtDW8HCfoKBFK69f3pgBJSUSQPr+4Q==
+
+"@esbuild/linux-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/linux-x64/-/linux-x64-0.17.19.tgz#8a0e9738b1635f0c53389e515ae83826dec22aa4"
+  integrity sha512-68ngA9lg2H6zkZcyp22tsVt38mlhWde8l3eJLWkyLrp4HwMUr3c1s/M2t7+kHIhvMjglIBrFpncX1SzMckomGw==
+
+"@esbuild/netbsd-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/netbsd-x64/-/netbsd-x64-0.17.19.tgz#c29fb2453c6b7ddef9a35e2c18b37bda1ae5c462"
+  integrity sha512-CwFq42rXCR8TYIjIfpXCbRX0rp1jo6cPIUPSaWwzbVI4aOfX96OXY8M6KNmtPcg7QjYeDmN+DD0Wp3LaBOLf4Q==
+
+"@esbuild/openbsd-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/openbsd-x64/-/openbsd-x64-0.17.19.tgz#95e75a391403cb10297280d524d66ce04c920691"
+  integrity sha512-cnq5brJYrSZ2CF6c35eCmviIN3k3RczmHz8eYaVlNasVqsNY+JKohZU5MKmaOI+KkllCdzOKKdPs762VCPC20g==
+
+"@esbuild/sunos-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/sunos-x64/-/sunos-x64-0.17.19.tgz#722eaf057b83c2575937d3ffe5aeb16540da7273"
+  integrity sha512-vCRT7yP3zX+bKWFeP/zdS6SqdWB8OIpaRq/mbXQxTGHnIxspRtigpkUcDMlSCOejlHowLqII7K2JKevwyRP2rg==
+
+"@esbuild/win32-arm64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-arm64/-/win32-arm64-0.17.19.tgz#9aa9dc074399288bdcdd283443e9aeb6b9552b6f"
+  integrity sha512-yYx+8jwowUstVdorcMdNlzklLYhPxjniHWFKgRqH7IFlUEa0Umu3KuYplf1HUZZ422e3NU9F4LGb+4O0Kdcaag==
+
+"@esbuild/win32-ia32@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-ia32/-/win32-ia32-0.17.19.tgz#95ad43c62ad62485e210f6299c7b2571e48d2b03"
+  integrity sha512-eggDKanJszUtCdlVs0RB+h35wNlb5v4TWEkq4vZcmVt5u/HiDZrTXe2bWFQUez3RgNHwx/x4sk5++4NSSicKkw==
+
+"@esbuild/win32-x64@0.17.19":
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/@esbuild/win32-x64/-/win32-x64-0.17.19.tgz#8cfaf2ff603e9aabb910e9c0558c26cf32744061"
+  integrity sha512-lAhycmKnVOuRYNtRtatQR1LPQf2oYCkRGkSFnseDAKPl8lu5SOsK/e1sXe5a0Pc5kHIHe6P2I/ilntNv2xf3cA==
+
+"@eslint-community/eslint-utils@^4.2.0":
+  version "4.4.0"
+  resolved "https://registry.npmmirror.com/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz#a23514e8fb9af1269d5f7788aa556798d61c6b59"
+  integrity sha512-1/sA4dwrzBAyeUoQ6oxahHKmrZvsnLCg4RfxW3ZFGGmQkSNQPFNLV9CUEFQP1x9EYXHTo5p6xdhZM1Ne9p/AfA==
+  dependencies:
+    eslint-visitor-keys "^3.3.0"
+
+"@eslint-community/regexpp@^4.6.1":
+  version "4.9.1"
+  resolved "https://registry.npmmirror.com/@eslint-community/regexpp/-/regexpp-4.9.1.tgz#449dfa81a57a1d755b09aa58d826c1262e4283b4"
+  integrity sha512-Y27x+MBLjXa+0JWDhykM3+JE+il3kHKAEqabfEWq3SDhZjLYb6/BHL/JKFnH3fe207JaXkyDo685Oc2Glt6ifA==
+
+"@eslint/eslintrc@^2.1.2":
+  version "2.1.2"
+  resolved "https://registry.npmmirror.com/@eslint/eslintrc/-/eslintrc-2.1.2.tgz#c6936b4b328c64496692f76944e755738be62396"
+  integrity sha512-+wvgpDsrB1YqAMdEUCcnTlpfVBH7Vqn6A/NT3D8WVXFIaKMlErPIZT3oCIAVCOtarRpMtelZLqJeU3t7WY6X6g==
+  dependencies:
+    ajv "^6.12.4"
+    debug "^4.3.2"
+    espree "^9.6.0"
+    globals "^13.19.0"
+    ignore "^5.2.0"
+    import-fresh "^3.2.1"
+    js-yaml "^4.1.0"
+    minimatch "^3.1.2"
+    strip-json-comments "^3.1.1"
+
+"@eslint/js@8.51.0":
+  version "8.51.0"
+  resolved "https://registry.npmmirror.com/@eslint/js/-/js-8.51.0.tgz#6d419c240cfb2b66da37df230f7e7eef801c32fa"
+  integrity sha512-HxjQ8Qn+4SI3/AFv6sOrDB+g6PpUTDwSJiQqOrnneEk8L71161srI9gjzzZvYVbzHiVg/BvcH95+cK/zfIt4pg==
+
+"@fastify/busboy@^2.0.0":
+  version "2.0.0"
+  resolved "https://registry.npmmirror.com/@fastify/busboy/-/busboy-2.0.0.tgz#f22824caff3ae506b18207bad4126dbc6ccdb6b8"
+  integrity sha512-JUFJad5lv7jxj926GPgymrWQxxjPYuJNiNjNMzqT+HiuP6Vl3dk5xzG+8sTX96np0ZAluvaMzPsjhHZ5rNuNQQ==
+
+"@humanwhocodes/config-array@^0.11.11":
+  version "0.11.11"
+  resolved "https://registry.npmmirror.com/@humanwhocodes/config-array/-/config-array-0.11.11.tgz#88a04c570dbbc7dd943e4712429c3df09bc32844"
+  integrity sha512-N2brEuAadi0CcdeMXUkhbZB84eskAc8MEX1By6qEchoVywSgXPIjou4rYsl0V3Hj0ZnuGycGCjdNgockbzeWNA==
+  dependencies:
+    "@humanwhocodes/object-schema" "^1.2.1"
+    debug "^4.1.1"
+    minimatch "^3.0.5"
+
+"@humanwhocodes/module-importer@^1.0.1":
+  version "1.0.1"
+  resolved "https://registry.npmmirror.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c"
+  integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==
+
+"@humanwhocodes/object-schema@^1.2.1":
+  version "1.2.1"
+  resolved "https://registry.npmmirror.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45"
+  integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA==
+
+"@nodelib/fs.scandir@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.npmmirror.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5"
+  integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==
+  dependencies:
+    "@nodelib/fs.stat" "2.0.5"
+    run-parallel "^1.1.9"
+
+"@nodelib/fs.stat@2.0.5":
+  version "2.0.5"
+  resolved "https://registry.npmmirror.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b"
+  integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==
+
+"@nodelib/fs.walk@^1.2.8":
+  version "1.2.8"
+  resolved "https://registry.npmmirror.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a"
+  integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==
+  dependencies:
+    "@nodelib/fs.scandir" "2.1.5"
+    fastq "^1.6.0"
+
+acorn-jsx@^5.3.2:
+  version "5.3.2"
+  resolved "https://registry.npmmirror.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
+  integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
+
+acorn-walk@^8.2.0:
+  version "8.2.0"
+  resolved "https://registry.npmmirror.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1"
+  integrity sha512-k+iyHEuPgSw6SbuDpGQM+06HQUa04DZ3o+F6CSzXMvvI5KMvnaEqXe+YVe555R9nn6GPt404fos4wcgpw12SDA==
+
+acorn@^8.8.0, acorn@^8.9.0:
+  version "8.10.0"
+  resolved "https://registry.npmmirror.com/acorn/-/acorn-8.10.0.tgz#8be5b3907a67221a81ab23c7889c4c5526b62ec5"
+  integrity sha512-F0SAmZ8iUtS//m8DmCTA0jlh6TDKkHQyK6xc6V4KDTyZKA9dnvX9/3sRTVQrWm79glUAZbnmmNcdYwUIHWVybw==
+
+ajv@^6.12.4:
+  version "6.12.6"
+  resolved "https://registry.npmmirror.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4"
+  integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==
+  dependencies:
+    fast-deep-equal "^3.1.1"
+    fast-json-stable-stringify "^2.0.0"
+    json-schema-traverse "^0.4.1"
+    uri-js "^4.2.2"
+
+ansi-regex@^5.0.1:
+  version "5.0.1"
+  resolved "https://registry.npmmirror.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
+  integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
+
+ansi-styles@^4.1.0:
+  version "4.3.0"
+  resolved "https://registry.npmmirror.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+  integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+  dependencies:
+    color-convert "^2.0.1"
+
+anymatch@~3.1.2:
+  version "3.1.3"
+  resolved "https://registry.npmmirror.com/anymatch/-/anymatch-3.1.3.tgz#790c58b19ba1720a84205b57c618d5ad8524973e"
+  integrity sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==
+  dependencies:
+    normalize-path "^3.0.0"
+    picomatch "^2.0.4"
+
+argparse@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.npmmirror.com/argparse/-/argparse-2.0.1.tgz#246f50f3ca78a3240f6c997e8a9bd1eac49e4b38"
+  integrity sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==
+
+as-table@^1.0.36:
+  version "1.0.55"
+  resolved "https://registry.npmmirror.com/as-table/-/as-table-1.0.55.tgz#dc984da3937745de902cea1d45843c01bdbbec4f"
+  integrity sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==
+  dependencies:
+    printable-characters "^1.0.42"
+
+balanced-match@^1.0.0:
+  version "1.0.2"
+  resolved "https://registry.npmmirror.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee"
+  integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==
+
+binary-extensions@^2.0.0:
+  version "2.2.0"
+  resolved "https://registry.npmmirror.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d"
+  integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==
+
+blake3-wasm@^2.1.5:
+  version "2.1.5"
+  resolved "https://registry.npmmirror.com/blake3-wasm/-/blake3-wasm-2.1.5.tgz#b22dbb84bc9419ed0159caa76af4b1b132e6ba52"
+  integrity sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==
+
+brace-expansion@^1.1.7:
+  version "1.1.11"
+  resolved "https://registry.npmmirror.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
+  integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==
+  dependencies:
+    balanced-match "^1.0.0"
+    concat-map "0.0.1"
+
+braces@~3.0.2:
+  version "3.0.2"
+  resolved "https://registry.npmmirror.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
+  integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
+  dependencies:
+    fill-range "^7.0.1"
+
+buffer-from@^1.0.0:
+  version "1.1.2"
+  resolved "https://registry.npmmirror.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5"
+  integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==
+
+callsites@^3.0.0:
+  version "3.1.0"
+  resolved "https://registry.npmmirror.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73"
+  integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==
+
+capnp-ts@^0.7.0:
+  version "0.7.0"
+  resolved "https://registry.npmmirror.com/capnp-ts/-/capnp-ts-0.7.0.tgz#16fd8e76b667d002af8fcf4bf92bf15d1a7b54a9"
+  integrity sha512-XKxXAC3HVPv7r674zP0VC3RTXz+/JKhfyw94ljvF80yynK6VkTnqE3jMuN8b3dUVmmc43TjyxjW4KTsmB3c86g==
+  dependencies:
+    debug "^4.3.1"
+    tslib "^2.2.0"
+
+chalk@^4.0.0:
+  version "4.1.2"
+  resolved "https://registry.npmmirror.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+  integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+  dependencies:
+    ansi-styles "^4.1.0"
+    supports-color "^7.1.0"
+
+chokidar@^3.5.3:
+  version "3.5.3"
+  resolved "https://registry.npmmirror.com/chokidar/-/chokidar-3.5.3.tgz#1cf37c8707b932bd1af1ae22c0432e2acd1903bd"
+  integrity sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==
+  dependencies:
+    anymatch "~3.1.2"
+    braces "~3.0.2"
+    glob-parent "~5.1.2"
+    is-binary-path "~2.1.0"
+    is-glob "~4.0.1"
+    normalize-path "~3.0.0"
+    readdirp "~3.6.0"
+  optionalDependencies:
+    fsevents "~2.3.2"
+
+color-convert@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.npmmirror.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+  integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+  dependencies:
+    color-name "~1.1.4"
+
+color-name@~1.1.4:
+  version "1.1.4"
+  resolved "https://registry.npmmirror.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+  integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
+concat-map@0.0.1:
+  version "0.0.1"
+  resolved "https://registry.npmmirror.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
+  integrity sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==
+
+cookie@^0.5.0:
+  version "0.5.0"
+  resolved "https://registry.npmmirror.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b"
+  integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==
+
+cross-spawn@^7.0.2:
+  version "7.0.3"
+  resolved "https://registry.npmmirror.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6"
+  integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==
+  dependencies:
+    path-key "^3.1.0"
+    shebang-command "^2.0.0"
+    which "^2.0.1"
+
+data-uri-to-buffer@^2.0.0:
+  version "2.0.2"
+  resolved "https://registry.npmmirror.com/data-uri-to-buffer/-/data-uri-to-buffer-2.0.2.tgz#d296973d5a4897a5dbe31716d118211921f04770"
+  integrity sha512-ND9qDTLc6diwj+Xe5cdAgVTbLVdXbtxTJRXRhli8Mowuaan+0EJOtdqJ0QCHNSSPyoXGx9HX2/VMnKeC34AChA==
+
+debug@^4.1.1, debug@^4.3.1, debug@^4.3.2:
+  version "4.3.4"
+  resolved "https://registry.npmmirror.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865"
+  integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==
+  dependencies:
+    ms "2.1.2"
+
+deep-is@^0.1.3:
+  version "0.1.4"
+  resolved "https://registry.npmmirror.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831"
+  integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==
+
+doctrine@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmmirror.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961"
+  integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==
+  dependencies:
+    esutils "^2.0.2"
+
+esbuild@0.17.19, esbuild@^0.17.11:
+  version "0.17.19"
+  resolved "https://registry.npmmirror.com/esbuild/-/esbuild-0.17.19.tgz#087a727e98299f0462a3d0bcdd9cd7ff100bd955"
+  integrity sha512-XQ0jAPFkK/u3LcVRcvVHQcTIqD6E2H1fvZMA5dQPSOWb3suUbWbfbRf94pjc0bNzRYLfIrDRQXr7X+LHIm5oHw==
+  optionalDependencies:
+    "@esbuild/android-arm" "0.17.19"
+    "@esbuild/android-arm64" "0.17.19"
+    "@esbuild/android-x64" "0.17.19"
+    "@esbuild/darwin-arm64" "0.17.19"
+    "@esbuild/darwin-x64" "0.17.19"
+    "@esbuild/freebsd-arm64" "0.17.19"
+    "@esbuild/freebsd-x64" "0.17.19"
+    "@esbuild/linux-arm" "0.17.19"
+    "@esbuild/linux-arm64" "0.17.19"
+    "@esbuild/linux-ia32" "0.17.19"
+    "@esbuild/linux-loong64" "0.17.19"
+    "@esbuild/linux-mips64el" "0.17.19"
+    "@esbuild/linux-ppc64" "0.17.19"
+    "@esbuild/linux-riscv64" "0.17.19"
+    "@esbuild/linux-s390x" "0.17.19"
+    "@esbuild/linux-x64" "0.17.19"
+    "@esbuild/netbsd-x64" "0.17.19"
+    "@esbuild/openbsd-x64" "0.17.19"
+    "@esbuild/sunos-x64" "0.17.19"
+    "@esbuild/win32-arm64" "0.17.19"
+    "@esbuild/win32-ia32" "0.17.19"
+    "@esbuild/win32-x64" "0.17.19"
+
+escape-string-regexp@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.npmmirror.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34"
+  integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==
+
+eslint-config-google@^0.14.0:
+  version "0.14.0"
+  resolved "https://registry.npmmirror.com/eslint-config-google/-/eslint-config-google-0.14.0.tgz#4f5f8759ba6e11b424294a219dbfa18c508bcc1a"
+  integrity sha512-WsbX4WbjuMvTdeVL6+J3rK1RGhCTqjsFjX7UMSMgZiyxxaNLkoJENbrGExzERFeoTpGw3F3FypTiWAP9ZXzkEw==
+
+eslint-scope@^7.2.2:
+  version "7.2.2"
+  resolved "https://registry.npmmirror.com/eslint-scope/-/eslint-scope-7.2.2.tgz#deb4f92563390f32006894af62a22dba1c46423f"
+  integrity sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==
+  dependencies:
+    esrecurse "^4.3.0"
+    estraverse "^5.2.0"
+
+eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3:
+  version "3.4.3"
+  resolved "https://registry.npmmirror.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800"
+  integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==
+
+eslint@>=5.16.0:
+  version "8.51.0"
+  resolved "https://registry.npmmirror.com/eslint/-/eslint-8.51.0.tgz#4a82dae60d209ac89a5cff1604fea978ba4950f3"
+  integrity sha512-2WuxRZBrlwnXi+/vFSJyjMqrNjtJqiasMzehF0shoLaW7DzS3/9Yvrmq5JiT66+pNjiX4UBnLDiKHcWAr/OInA==
+  dependencies:
+    "@eslint-community/eslint-utils" "^4.2.0"
+    "@eslint-community/regexpp" "^4.6.1"
+    "@eslint/eslintrc" "^2.1.2"
+    "@eslint/js" "8.51.0"
+    "@humanwhocodes/config-array" "^0.11.11"
+    "@humanwhocodes/module-importer" "^1.0.1"
+    "@nodelib/fs.walk" "^1.2.8"
+    ajv "^6.12.4"
+    chalk "^4.0.0"
+    cross-spawn "^7.0.2"
+    debug "^4.3.2"
+    doctrine "^3.0.0"
+    escape-string-regexp "^4.0.0"
+    eslint-scope "^7.2.2"
+    eslint-visitor-keys "^3.4.3"
+    espree "^9.6.1"
+    esquery "^1.4.2"
+    esutils "^2.0.2"
+    fast-deep-equal "^3.1.3"
+    file-entry-cache "^6.0.1"
+    find-up "^5.0.0"
+    glob-parent "^6.0.2"
+    globals "^13.19.0"
+    graphemer "^1.4.0"
+    ignore "^5.2.0"
+    imurmurhash "^0.1.4"
+    is-glob "^4.0.0"
+    is-path-inside "^3.0.3"
+    js-yaml "^4.1.0"
+    json-stable-stringify-without-jsonify "^1.0.1"
+    levn "^0.4.1"
+    lodash.merge "^4.6.2"
+    minimatch "^3.1.2"
+    natural-compare "^1.4.0"
+    optionator "^0.9.3"
+    strip-ansi "^6.0.1"
+    text-table "^0.2.0"
+
+espree@^9.6.0, espree@^9.6.1:
+  version "9.6.1"
+  resolved "https://registry.npmmirror.com/espree/-/espree-9.6.1.tgz#a2a17b8e434690a5432f2f8018ce71d331a48c6f"
+  integrity sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==
+  dependencies:
+    acorn "^8.9.0"
+    acorn-jsx "^5.3.2"
+    eslint-visitor-keys "^3.4.1"
+
+esquery@^1.4.2:
+  version "1.5.0"
+  resolved "https://registry.npmmirror.com/esquery/-/esquery-1.5.0.tgz#6ce17738de8577694edd7361c57182ac8cb0db0b"
+  integrity sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==
+  dependencies:
+    estraverse "^5.1.0"
+
+esrecurse@^4.3.0:
+  version "4.3.0"
+  resolved "https://registry.npmmirror.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921"
+  integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==
+  dependencies:
+    estraverse "^5.2.0"
+
+estraverse@^5.1.0, estraverse@^5.2.0:
+  version "5.3.0"
+  resolved "https://registry.npmmirror.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123"
+  integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==
+
+estree-walker@^0.6.1:
+  version "0.6.1"
+  resolved "https://registry.npmmirror.com/estree-walker/-/estree-walker-0.6.1.tgz#53049143f40c6eb918b23671d1fe3219f3a1b362"
+  integrity sha512-SqmZANLWS0mnatqbSfRP5g8OXZC12Fgg1IwNtLsyHDzJizORW4khDfjPqJZsemPWBB2uqykUah5YpQ6epsqC/w==
+
+esutils@^2.0.2:
+  version "2.0.3"
+  resolved "https://registry.npmmirror.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64"
+  integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==
+
+exit-hook@^2.2.1:
+  version "2.2.1"
+  resolved "https://registry.npmmirror.com/exit-hook/-/exit-hook-2.2.1.tgz#007b2d92c6428eda2b76e7016a34351586934593"
+  integrity sha512-eNTPlAD67BmP31LDINZ3U7HSF8l57TxOY2PmBJ1shpCvpnxBF93mWCE8YHBnXs8qiUZJc9WDcWIeC3a2HIAMfw==
+
+fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3:
+  version "3.1.3"
+  resolved "https://registry.npmmirror.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525"
+  integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==
+
+fast-json-stable-stringify@^2.0.0:
+  version "2.1.0"
+  resolved "https://registry.npmmirror.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633"
+  integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==
+
+fast-levenshtein@^2.0.6:
+  version "2.0.6"
+  resolved "https://registry.npmmirror.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
+  integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==
+
+fastq@^1.6.0:
+  version "1.15.0"
+  resolved "https://registry.npmmirror.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a"
+  integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==
+  dependencies:
+    reusify "^1.0.4"
+
+file-entry-cache@^6.0.1:
+  version "6.0.1"
+  resolved "https://registry.npmmirror.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027"
+  integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==
+  dependencies:
+    flat-cache "^3.0.4"
+
+fill-range@^7.0.1:
+  version "7.0.1"
+  resolved "https://registry.npmmirror.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
+  integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
+  dependencies:
+    to-regex-range "^5.0.1"
+
+find-up@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.npmmirror.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc"
+  integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==
+  dependencies:
+    locate-path "^6.0.0"
+    path-exists "^4.0.0"
+
+flat-cache@^3.0.4:
+  version "3.1.1"
+  resolved "https://registry.npmmirror.com/flat-cache/-/flat-cache-3.1.1.tgz#a02a15fdec25a8f844ff7cc658f03dd99eb4609b"
+  integrity sha512-/qM2b3LUIaIgviBQovTLvijfyOQXPtSRnRK26ksj2J7rzPIecePUIpJsZ4T02Qg+xiAEKIs5K8dsHEd+VaKa/Q==
+  dependencies:
+    flatted "^3.2.9"
+    keyv "^4.5.3"
+    rimraf "^3.0.2"
+
+flatted@^3.2.9:
+  version "3.2.9"
+  resolved "https://registry.npmmirror.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf"
+  integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ==
+
+fs.realpath@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.npmmirror.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f"
+  integrity sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==
+
+fsevents@~2.3.2:
+  version "2.3.3"
+  resolved "https://registry.npmmirror.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+  integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
+
+get-source@^2.0.12:
+  version "2.0.12"
+  resolved "https://registry.npmmirror.com/get-source/-/get-source-2.0.12.tgz#0b47d57ea1e53ce0d3a69f4f3d277eb8047da944"
+  integrity sha512-X5+4+iD+HoSeEED+uwrQ07BOQr0kEDFMVqqpBuI+RaZBpBpHCuXxo70bjar6f0b0u/DQJsJ7ssurpP0V60Az+w==
+  dependencies:
+    data-uri-to-buffer "^2.0.0"
+    source-map "^0.6.1"
+
+glob-parent@^6.0.2:
+  version "6.0.2"
+  resolved "https://registry.npmmirror.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3"
+  integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==
+  dependencies:
+    is-glob "^4.0.3"
+
+glob-parent@~5.1.2:
+  version "5.1.2"
+  resolved "https://registry.npmmirror.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
+  integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
+  dependencies:
+    is-glob "^4.0.1"
+
+glob-to-regexp@^0.4.1:
+  version "0.4.1"
+  resolved "https://registry.npmmirror.com/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz#c75297087c851b9a578bd217dd59a92f59fe546e"
+  integrity sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==
+
+glob@^7.1.3:
+  version "7.2.3"
+  resolved "https://registry.npmmirror.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b"
+  integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==
+  dependencies:
+    fs.realpath "^1.0.0"
+    inflight "^1.0.4"
+    inherits "2"
+    minimatch "^3.1.1"
+    once "^1.3.0"
+    path-is-absolute "^1.0.0"
+
+globals@^13.19.0:
+  version "13.23.0"
+  resolved "https://registry.npmmirror.com/globals/-/globals-13.23.0.tgz#ef31673c926a0976e1f61dab4dca57e0c0a8af02"
+  integrity sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA==
+  dependencies:
+    type-fest "^0.20.2"
+
+graphemer@^1.4.0:
+  version "1.4.0"
+  resolved "https://registry.npmmirror.com/graphemer/-/graphemer-1.4.0.tgz#fb2f1d55e0e3a1849aeffc90c4fa0dd53a0e66c6"
+  integrity sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==
+
+has-flag@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.npmmirror.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+  integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
+ignore@^5.2.0:
+  version "5.2.4"
+  resolved "https://registry.npmmirror.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324"
+  integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ==
+
+import-fresh@^3.2.1:
+  version "3.3.0"
+  resolved "https://registry.npmmirror.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b"
+  integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw==
+  dependencies:
+    parent-module "^1.0.0"
+    resolve-from "^4.0.0"
+
+imurmurhash@^0.1.4:
+  version "0.1.4"
+  resolved "https://registry.npmmirror.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea"
+  integrity sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==
+
+inflight@^1.0.4:
+  version "1.0.6"
+  resolved "https://registry.npmmirror.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9"
+  integrity sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==
+  dependencies:
+    once "^1.3.0"
+    wrappy "1"
+
+inherits@2:
+  version "2.0.4"
+  resolved "https://registry.npmmirror.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
+  integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
+
+is-binary-path@~2.1.0:
+  version "2.1.0"
+  resolved "https://registry.npmmirror.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
+  integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
+  dependencies:
+    binary-extensions "^2.0.0"
+
+is-extglob@^2.1.1:
+  version "2.1.1"
+  resolved "https://registry.npmmirror.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
+  integrity sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==
+
+is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1:
+  version "4.0.3"
+  resolved "https://registry.npmmirror.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084"
+  integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==
+  dependencies:
+    is-extglob "^2.1.1"
+
+is-number@^7.0.0:
+  version "7.0.0"
+  resolved "https://registry.npmmirror.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+  integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
+is-path-inside@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.npmmirror.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
+  integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
+
+isexe@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmmirror.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
+  integrity sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==
+
+js-yaml@^4.1.0:
+  version "4.1.0"
+  resolved "https://registry.npmmirror.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602"
+  integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==
+  dependencies:
+    argparse "^2.0.1"
+
+json-buffer@3.0.1:
+  version "3.0.1"
+  resolved "https://registry.npmmirror.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13"
+  integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==
+
+json-schema-traverse@^0.4.1:
+  version "0.4.1"
+  resolved "https://registry.npmmirror.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660"
+  integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==
+
+json-stable-stringify-without-jsonify@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.npmmirror.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651"
+  integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==
+
+keyv@^4.5.3:
+  version "4.5.3"
+  resolved "https://registry.npmmirror.com/keyv/-/keyv-4.5.3.tgz#00873d2b046df737963157bd04f294ca818c9c25"
+  integrity sha512-QCiSav9WaX1PgETJ+SpNnx2PRRapJ/oRSXM4VO5OGYGSjrxbKPVFVhB3l2OCbLCk329N8qyAtsJjSjvVBWzEug==
+  dependencies:
+    json-buffer "3.0.1"
+
+levn@^0.4.1:
+  version "0.4.1"
+  resolved "https://registry.npmmirror.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade"
+  integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==
+  dependencies:
+    prelude-ls "^1.2.1"
+    type-check "~0.4.0"
+
+locate-path@^6.0.0:
+  version "6.0.0"
+  resolved "https://registry.npmmirror.com/locate-path/-/locate-path-6.0.0.tgz#55321eb309febbc59c4801d931a72452a681d286"
+  integrity sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==
+  dependencies:
+    p-locate "^5.0.0"
+
+lodash.merge@^4.6.2:
+  version "4.6.2"
+  resolved "https://registry.npmmirror.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a"
+  integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==
+
+magic-string@^0.25.3:
+  version "0.25.9"
+  resolved "https://registry.npmmirror.com/magic-string/-/magic-string-0.25.9.tgz#de7f9faf91ef8a1c91d02c2e5314c8277dbcdd1c"
+  integrity sha512-RmF0AsMzgt25qzqqLc1+MbHmhdx0ojF2Fvs4XnOqz2ZOBXzzkEwc/dJQZCYHAn7v1jbVOjAZfK8msRn4BxO4VQ==
+  dependencies:
+    sourcemap-codec "^1.4.8"
+
+mime@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmmirror.com/mime/-/mime-3.0.0.tgz#b374550dca3a0c18443b0c950a6a58f1931cf7a7"
+  integrity sha512-jSCU7/VB1loIWBZe14aEYHU/+1UMEHoaO7qxCOVJOw9GgH72VAWppxNcjU+x9a2k3GSIBXNKxXQFqRvvZ7vr3A==
+
+miniflare@3.20231002.1:
+  version "3.20231002.1"
+  resolved "https://registry.npmmirror.com/miniflare/-/miniflare-3.20231002.1.tgz#3ee52e03890bbb9197627e1214321de09070dad7"
+  integrity sha512-4xJ8FezJkQqHzCm71lovb9L/wJ0VV/odMFf5CIxfLTunsx97kTIlZnhS6aHuvcbzdztbWp1RR71K/1qFUHdpdQ==
+  dependencies:
+    acorn "^8.8.0"
+    acorn-walk "^8.2.0"
+    capnp-ts "^0.7.0"
+    exit-hook "^2.2.1"
+    glob-to-regexp "^0.4.1"
+    source-map-support "0.5.21"
+    stoppable "^1.1.0"
+    undici "^5.22.1"
+    workerd "1.20231002.0"
+    ws "^8.11.0"
+    youch "^3.2.2"
+    zod "^3.20.6"
+
+minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2:
+  version "3.1.2"
+  resolved "https://registry.npmmirror.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b"
+  integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==
+  dependencies:
+    brace-expansion "^1.1.7"
+
+ms@2.1.2:
+  version "2.1.2"
+  resolved "https://registry.npmmirror.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
+  integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
+
+mustache@^4.2.0:
+  version "4.2.0"
+  resolved "https://registry.npmmirror.com/mustache/-/mustache-4.2.0.tgz#e5892324d60a12ec9c2a73359edca52972bf6f64"
+  integrity sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==
+
+nanoid@^3.3.3:
+  version "3.3.6"
+  resolved "https://registry.npmmirror.com/nanoid/-/nanoid-3.3.6.tgz#443380c856d6e9f9824267d960b4236ad583ea4c"
+  integrity sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==
+
+natural-compare@^1.4.0:
+  version "1.4.0"
+  resolved "https://registry.npmmirror.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
+  integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
+
+node-forge@^1:
+  version "1.3.1"
+  resolved "https://registry.npmmirror.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
+  integrity sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==
+
+normalize-path@^3.0.0, normalize-path@~3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmmirror.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
+  integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
+
+once@^1.3.0:
+  version "1.4.0"
+  resolved "https://registry.npmmirror.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
+  integrity sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==
+  dependencies:
+    wrappy "1"
+
+optionator@^0.9.3:
+  version "0.9.3"
+  resolved "https://registry.npmmirror.com/optionator/-/optionator-0.9.3.tgz#007397d44ed1872fdc6ed31360190f81814e2c64"
+  integrity sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==
+  dependencies:
+    "@aashutoshrathi/word-wrap" "^1.2.3"
+    deep-is "^0.1.3"
+    fast-levenshtein "^2.0.6"
+    levn "^0.4.1"
+    prelude-ls "^1.2.1"
+    type-check "^0.4.0"
+
+p-limit@^3.0.2:
+  version "3.1.0"
+  resolved "https://registry.npmmirror.com/p-limit/-/p-limit-3.1.0.tgz#e1daccbe78d0d1388ca18c64fea38e3e57e3706b"
+  integrity sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==
+  dependencies:
+    yocto-queue "^0.1.0"
+
+p-locate@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.npmmirror.com/p-locate/-/p-locate-5.0.0.tgz#83c8315c6785005e3bd021839411c9e110e6d834"
+  integrity sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==
+  dependencies:
+    p-limit "^3.0.2"
+
+parent-module@^1.0.0:
+  version "1.0.1"
+  resolved "https://registry.npmmirror.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2"
+  integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==
+  dependencies:
+    callsites "^3.0.0"
+
+path-exists@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.npmmirror.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3"
+  integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==
+
+path-is-absolute@^1.0.0:
+  version "1.0.1"
+  resolved "https://registry.npmmirror.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f"
+  integrity sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==
+
+path-key@^3.1.0:
+  version "3.1.1"
+  resolved "https://registry.npmmirror.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375"
+  integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==
+
+path-to-regexp@^6.2.0:
+  version "6.2.1"
+  resolved "https://registry.npmmirror.com/path-to-regexp/-/path-to-regexp-6.2.1.tgz#d54934d6798eb9e5ef14e7af7962c945906918e5"
+  integrity sha512-JLyh7xT1kizaEvcaXOQwOc2/Yhw6KZOvPf1S8401UyLk86CU79LN3vl7ztXGm/pZ+YjoyAJ4rxmHwbkBXJX+yw==
+
+picomatch@^2.0.4, picomatch@^2.2.1:
+  version "2.3.1"
+  resolved "https://registry.npmmirror.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+  integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
+
+prelude-ls@^1.2.1:
+  version "1.2.1"
+  resolved "https://registry.npmmirror.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
+  integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
+
+printable-characters@^1.0.42:
+  version "1.0.42"
+  resolved "https://registry.npmmirror.com/printable-characters/-/printable-characters-1.0.42.tgz#3f18e977a9bd8eb37fcc4ff5659d7be90868b3d8"
+  integrity sha512-dKp+C4iXWK4vVYZmYSd0KBH5F/h1HoZRsbJ82AVKRO3PEo8L4lBS/vLwhVtpwwuYcoIsVY+1JYKR268yn480uQ==
+
+punycode@^2.1.0:
+  version "2.3.0"
+  resolved "https://registry.npmmirror.com/punycode/-/punycode-2.3.0.tgz#f67fa67c94da8f4d0cfff981aee4118064199b8f"
+  integrity sha512-rRV+zQD8tVFys26lAGR9WUuS4iUAngJScM+ZRSKtvl5tKeZ2t5bvdNFdNHBW9FWR4guGHlgmsZ1G7BSm2wTbuA==
+
+queue-microtask@^1.2.2:
+  version "1.2.3"
+  resolved "https://registry.npmmirror.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
+  integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==
+
+readdirp@~3.6.0:
+  version "3.6.0"
+  resolved "https://registry.npmmirror.com/readdirp/-/readdirp-3.6.0.tgz#74a370bd857116e245b29cc97340cd431a02a6c7"
+  integrity sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==
+  dependencies:
+    picomatch "^2.2.1"
+
+resolve-from@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.npmmirror.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6"
+  integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==
+
+reusify@^1.0.4:
+  version "1.0.4"
+  resolved "https://registry.npmmirror.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76"
+  integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==
+
+rimraf@^3.0.2:
+  version "3.0.2"
+  resolved "https://registry.npmmirror.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"
+  integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==
+  dependencies:
+    glob "^7.1.3"
+
+rollup-plugin-inject@^3.0.0:
+  version "3.0.2"
+  resolved "https://registry.npmmirror.com/rollup-plugin-inject/-/rollup-plugin-inject-3.0.2.tgz#e4233855bfba6c0c12a312fd6649dff9a13ee9f4"
+  integrity sha512-ptg9PQwzs3orn4jkgXJ74bfs5vYz1NCZlSQMBUA0wKcGp5i5pA1AO3fOUEte8enhGUC+iapTCzEWw2jEFFUO/w==
+  dependencies:
+    estree-walker "^0.6.1"
+    magic-string "^0.25.3"
+    rollup-pluginutils "^2.8.1"
+
+rollup-plugin-node-polyfills@^0.2.1:
+  version "0.2.1"
+  resolved "https://registry.npmmirror.com/rollup-plugin-node-polyfills/-/rollup-plugin-node-polyfills-0.2.1.tgz#53092a2744837164d5b8a28812ba5f3ff61109fd"
+  integrity sha512-4kCrKPTJ6sK4/gLL/U5QzVT8cxJcofO0OU74tnB19F40cmuAKSzH5/siithxlofFEjwvw1YAhPmbvGNA6jEroA==
+  dependencies:
+    rollup-plugin-inject "^3.0.0"
+
+rollup-pluginutils@^2.8.1:
+  version "2.8.2"
+  resolved "https://registry.npmmirror.com/rollup-pluginutils/-/rollup-pluginutils-2.8.2.tgz#72f2af0748b592364dbd3389e600e5a9444a351e"
+  integrity sha512-EEp9NhnUkwY8aif6bxgovPHMoMoNr2FulJziTndpt5H9RdwC47GSGuII9XxpSdzVGM0GWrNPHV6ie1LTNJPaLQ==
+  dependencies:
+    estree-walker "^0.6.1"
+
+run-parallel@^1.1.9:
+  version "1.2.0"
+  resolved "https://registry.npmmirror.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee"
+  integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==
+  dependencies:
+    queue-microtask "^1.2.2"
+
+selfsigned@^2.0.1:
+  version "2.1.1"
+  resolved "https://registry.npmmirror.com/selfsigned/-/selfsigned-2.1.1.tgz#18a7613d714c0cd3385c48af0075abf3f266af61"
+  integrity sha512-GSL3aowiF7wa/WtSFwnUrludWFoNhftq8bUkH9pkzjpN2XSPOAYEgg6e0sS9s0rZwgJzJiQRPU18A6clnoW5wQ==
+  dependencies:
+    node-forge "^1"
+
+shebang-command@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.npmmirror.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea"
+  integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==
+  dependencies:
+    shebang-regex "^3.0.0"
+
+shebang-regex@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.npmmirror.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172"
+  integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==
+
+source-map-support@0.5.21:
+  version "0.5.21"
+  resolved "https://registry.npmmirror.com/source-map-support/-/source-map-support-0.5.21.tgz#04fe7c7f9e1ed2d662233c28cb2b35b9f63f6e4f"
+  integrity sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==
+  dependencies:
+    buffer-from "^1.0.0"
+    source-map "^0.6.0"
+
+source-map@0.6.1, source-map@^0.6.0, source-map@^0.6.1:
+  version "0.6.1"
+  resolved "https://registry.npmmirror.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
+  integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
+
+sourcemap-codec@^1.4.8:
+  version "1.4.8"
+  resolved "https://registry.npmmirror.com/sourcemap-codec/-/sourcemap-codec-1.4.8.tgz#ea804bd94857402e6992d05a38ef1ae35a9ab4c4"
+  integrity sha512-9NykojV5Uih4lgo5So5dtw+f0JgJX30KCNI8gwhz2J9A15wD0Ml6tjHKwf6fTSa6fAdVBdZeNOs9eJ71qCk8vA==
+
+stacktracey@^2.1.8:
+  version "2.1.8"
+  resolved "https://registry.npmmirror.com/stacktracey/-/stacktracey-2.1.8.tgz#bf9916020738ce3700d1323b32bd2c91ea71199d"
+  integrity sha512-Kpij9riA+UNg7TnphqjH7/CzctQ/owJGNbFkfEeve4Z4uxT5+JapVLFXcsurIfN34gnTWZNJ/f7NMG0E8JDzTw==
+  dependencies:
+    as-table "^1.0.36"
+    get-source "^2.0.12"
+
+stoppable@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.npmmirror.com/stoppable/-/stoppable-1.1.0.tgz#32da568e83ea488b08e4d7ea2c3bcc9d75015d5b"
+  integrity sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==
+
+strip-ansi@^6.0.1:
+  version "6.0.1"
+  resolved "https://registry.npmmirror.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
+  integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
+  dependencies:
+    ansi-regex "^5.0.1"
+
+strip-json-comments@^3.1.1:
+  version "3.1.1"
+  resolved "https://registry.npmmirror.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
+  integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
+
+supports-color@^7.1.0:
+  version "7.2.0"
+  resolved "https://registry.npmmirror.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+  integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+  dependencies:
+    has-flag "^4.0.0"
+
+text-table@^0.2.0:
+  version "0.2.0"
+  resolved "https://registry.npmmirror.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
+  integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==
+
+to-regex-range@^5.0.1:
+  version "5.0.1"
+  resolved "https://registry.npmmirror.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+  integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+  dependencies:
+    is-number "^7.0.0"
+
+tslib@^2.2.0:
+  version "2.6.2"
+  resolved "https://registry.npmmirror.com/tslib/-/tslib-2.6.2.tgz#703ac29425e7b37cd6fd456e92404d46d1f3e4ae"
+  integrity sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==
+
+type-check@^0.4.0, type-check@~0.4.0:
+  version "0.4.0"
+  resolved "https://registry.npmmirror.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1"
+  integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==
+  dependencies:
+    prelude-ls "^1.2.1"
+
+type-fest@^0.20.2:
+  version "0.20.2"
+  resolved "https://registry.npmmirror.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4"
+  integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==
+
+undici@^5.22.1:
+  version "5.25.4"
+  resolved "https://registry.npmmirror.com/undici/-/undici-5.25.4.tgz#7d8ef81d94f84cd384986271e5e5599b6dff4296"
+  integrity sha512-450yJxT29qKMf3aoudzFpIciqpx6Pji3hEWaXqXmanbXF58LTAGCKxcJjxMXWu3iG+Mudgo3ZUfDB6YDFd/dAw==
+  dependencies:
+    "@fastify/busboy" "^2.0.0"
+
+uri-js@^4.2.2:
+  version "4.4.1"
+  resolved "https://registry.npmmirror.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e"
+  integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==
+  dependencies:
+    punycode "^2.1.0"
+
+which@^2.0.1:
+  version "2.0.2"
+  resolved "https://registry.npmmirror.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"
+  integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==
+  dependencies:
+    isexe "^2.0.0"
+
+workerd@1.20231002.0:
+  version "1.20231002.0"
+  resolved "https://registry.npmmirror.com/workerd/-/workerd-1.20231002.0.tgz#ade9ed9104d2d771e5e721a8d739275b3b815d37"
+  integrity sha512-NFuUQBj30ZguDoPZ6bL40hINiu8aP2Pvxr/3xAdhWOwVFLuObPOiSdQ8qm4JYZ7jovxWjWE4Z7VR2avjIzEksQ==
+  optionalDependencies:
+    "@cloudflare/workerd-darwin-64" "1.20231002.0"
+    "@cloudflare/workerd-darwin-arm64" "1.20231002.0"
+    "@cloudflare/workerd-linux-64" "1.20231002.0"
+    "@cloudflare/workerd-linux-arm64" "1.20231002.0"
+    "@cloudflare/workerd-windows-64" "1.20231002.0"
+
+wrangler@^3.0.0:
+  version "3.11.0"
+  resolved "https://registry.npmmirror.com/wrangler/-/wrangler-3.11.0.tgz#a7b1c2526d4ac837b4cae225fc16c87519f6aba1"
+  integrity sha512-glR3UPD1RJDokOIOUt56vPLacV7riWdfhPUCx5ZmZWVn0dDXB51ktj9DyK2VU8zHM+yj90OQhwHVdI77mGxWkw==
+  dependencies:
+    "@cloudflare/kv-asset-handler" "^0.2.0"
+    "@esbuild-plugins/node-globals-polyfill" "^0.2.3"
+    "@esbuild-plugins/node-modules-polyfill" "^0.2.2"
+    blake3-wasm "^2.1.5"
+    chokidar "^3.5.3"
+    esbuild "0.17.19"
+    miniflare "3.20231002.1"
+    nanoid "^3.3.3"
+    path-to-regexp "^6.2.0"
+    selfsigned "^2.0.1"
+    source-map "0.6.1"
+    source-map-support "0.5.21"
+    xxhash-wasm "^1.0.1"
+  optionalDependencies:
+    fsevents "~2.3.2"
+
+wrappy@1:
+  version "1.0.2"
+  resolved "https://registry.npmmirror.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
+  integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
+
+ws@^8.11.0:
+  version "8.14.2"
+  resolved "https://registry.npmmirror.com/ws/-/ws-8.14.2.tgz#6c249a806eb2db7a20d26d51e7709eab7b2e6c7f"
+  integrity sha512-wEBG1ftX4jcglPxgFCMJmZ2PLtSbJ2Peg6TmpJFTbe9GZYOQCDPdMYu/Tm0/bGZkw8paZnJY45J4K2PZrLYq8g==
+
+xxhash-wasm@^1.0.1:
+  version "1.0.2"
+  resolved "https://registry.npmmirror.com/xxhash-wasm/-/xxhash-wasm-1.0.2.tgz#ecc0f813219b727af4d5f3958ca6becee2f2f1ff"
+  integrity sha512-ibF0Or+FivM9lNrg+HGJfVX8WJqgo+kCLDc4vx6xMeTce7Aj+DLttKbxxRR/gNLSAelRc1omAPlJ77N/Jem07A==
+
+yocto-queue@^0.1.0:
+  version "0.1.0"
+  resolved "https://registry.npmmirror.com/yocto-queue/-/yocto-queue-0.1.0.tgz#0294eb3dee05028d31ee1a5fa2c556a6aaf10a1b"
+  integrity sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==
+
+youch@^3.2.2:
+  version "3.3.2"
+  resolved "https://registry.npmmirror.com/youch/-/youch-3.3.2.tgz#a0dae868ecb1f3f6889e48131c42c88d69eeb123"
+  integrity sha512-9cwz/z7abtcHOIuH45nzmUFCZbyJA1nLqlirKvyNRx4wDMhqsBaifAJzBej7L4fsVPjFxYq3NK3GAcfvZsydFw==
+  dependencies:
+    cookie "^0.5.0"
+    mustache "^4.2.0"
+    stacktracey "^2.1.8"
+
+zod@^3.20.6:
+  version "3.22.4"
+  resolved "https://registry.npmmirror.com/zod/-/zod-3.22.4.tgz#f31c3a9386f61b1f228af56faa9255e845cf3fff"
+  integrity sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==

From 748eef6995e4d685ff6f359c1b62d35388a35f01 Mon Sep 17 00:00:00 2001
From: TBXark 
Date: Sat, 7 Oct 2023 15:38:03 +0800
Subject: [PATCH 02/11] =?UTF-8?q?perf:=20=E5=88=86=E7=A6=BBcf=20ai=20sdk?=
 =?UTF-8?q?=E4=BB=A3=E7=A0=81=E5=88=B0=E7=8B=AC=E7=AB=8B=E6=96=87=E4=BB=B6?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 dist/buildinfo.json          |   2 +-
 dist/index.js                |  32 +--
 dist/timestamp               |   2 +-
 src/vendors/cloudflare-ai.js | 424 ++++++++++++++++++++++++++++++++++
 src/workers-ai.js            | 425 +----------------------------------
 5 files changed, 445 insertions(+), 440 deletions(-)
 create mode 100644 src/vendors/cloudflare-ai.js

diff --git a/dist/buildinfo.json b/dist/buildinfo.json
index 345a29a4..132c0c7f 100644
--- a/dist/buildinfo.json
+++ b/dist/buildinfo.json
@@ -1 +1 @@
-{"sha": "0019c7a", "timestamp": 1696662329}
+{"sha": "682c32a", "timestamp": 1696664248}
diff --git a/dist/index.js b/dist/index.js
index 65c9afe9..9562ed8f 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -39,9 +39,9 @@ var ENV = {
   // 检查更新的分支
   UPDATE_BRANCH: "master",
   // 当前版本
-  BUILD_TIMESTAMP: 1696662329,
+  BUILD_TIMESTAMP: 1696664248,
   // 当前版本 commit id
-  BUILD_VERSION: "0019c7a",
+  BUILD_VERSION: "682c32a",
   I18N: null,
   LANGUAGE: "zh-cn",
   // 使用流模式
@@ -947,19 +947,7 @@ function makeResponse200(resp) {
   }
 }
 
-// src/workers-ai.js
-function isWorkersAIEnable(context) {
-  return AI_LLM !== null;
-}
-async function requestCompletionsFromWorkersAI(message, history, context, onStream) {
-  const ai = new Ai(AI_LLM);
-  const model = ENV.WORKERS_AI_MODEL || "@cf/meta/llama-2-7b-chat-int8";
-  const request = {
-    messages: [...history || [], { role: "user", content: message }]
-  };
-  const response = await ai.run(model, request);
-  return response.response;
-}
+// src/vendors/cloudflare-ai.js
 var TypedArrayProto = Object.getPrototypeOf(Uint8Array);
 function isArray(value) {
   return Array.isArray(value) || value instanceof TypedArrayProto;
@@ -1351,6 +1339,20 @@ var Ai = class {
   }
 };
 
+// src/workers-ai.js
+function isWorkersAIEnable(context) {
+  return AI_LLM !== null;
+}
+async function requestCompletionsFromWorkersAI(message, history, context, onStream) {
+  const ai = new Ai(AI_LLM);
+  const model = ENV.WORKERS_AI_MODEL || "@cf/meta/llama-2-7b-chat-int8";
+  const request = {
+    messages: [...history || [], { role: "user", content: message }]
+  };
+  const response = await ai.run(model, request);
+  return response.response;
+}
+
 // src/chat.js
 async function loadHistory(key, context) {
   const initMessage = { role: "system", content: context.USER_CONFIG.SYSTEM_INIT_MESSAGE };
diff --git a/dist/timestamp b/dist/timestamp
index 9ad82cd2..77e240aa 100644
--- a/dist/timestamp
+++ b/dist/timestamp
@@ -1 +1 @@
-1696662329
+1696664248
diff --git a/src/vendors/cloudflare-ai.js b/src/vendors/cloudflare-ai.js
new file mode 100644
index 00000000..0ebe4b83
--- /dev/null
+++ b/src/vendors/cloudflare-ai.js
@@ -0,0 +1,424 @@
+/* eslint-disable */
+// src/tensor.ts
+var TensorType = /* @__PURE__ */ (TensorType2 => {
+    TensorType2['String'] = 'str';
+    TensorType2['Bool'] = 'bool';
+    TensorType2['Float16'] = 'float16';
+    TensorType2['Float32'] = 'float32';
+    TensorType2['Int16'] = 'int16';
+    TensorType2['Int32'] = 'int32';
+    TensorType2['Int64'] = 'int64';
+    TensorType2['Int8'] = 'int8';
+    TensorType2['Uint16'] = 'uint16';
+    TensorType2['Uint32'] = 'uint32';
+    TensorType2['Uint64'] = 'uint64';
+    TensorType2['Uint8'] = 'uint8';
+    return TensorType2;
+})(TensorType || {});
+var TypedArrayProto = Object.getPrototypeOf(Uint8Array);
+function isArray(value) {
+    return Array.isArray(value) || value instanceof TypedArrayProto;
+}
+function arrLength(obj) {
+    return obj instanceof TypedArrayProto
+        ? obj.length
+        : obj
+            .flat()
+            .reduce(
+                (acc, cur) => acc + (cur instanceof TypedArrayProto ? cur.length : 1),
+                0
+            );
+}
+function ensureShape(shape, value) {
+    if (shape.length === 0 && !isArray(value)) {
+        return;
+    }
+    const count = shape.reduce((acc, v) => {
+        if (!Number.isInteger(v)) {
+            throw new Error(
+                `expected shape to be array-like of integers but found non-integer element "${v}"`
+            );
+        }
+        return acc * v;
+    }, 1);
+    if (count != arrLength(value)) {
+        throw new Error(
+            `invalid shape: expected ${count} elements for shape ${shape} but value array has length ${value.length}`
+        );
+    }
+}
+function ensureType(type, value) {
+    if (isArray(value)) {
+        value.forEach(v => ensureType(type, v));
+        return;
+    }
+    switch (type) {
+        case 'bool' /* Bool */: {
+            if (typeof value === 'boolean') {
+                return;
+            }
+            break;
+        }
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */: {
+            if (typeof value === 'number') {
+                return;
+            }
+            break;
+        }
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'int32' /* Int32 */:
+        case 'uint32' /* Uint32 */: {
+            if (Number.isInteger(value)) {
+                return;
+            }
+            break;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            if (typeof value === 'bigint') {
+                return;
+            }
+            break;
+        }
+        case 'str' /* String */: {
+            if (typeof value === 'string') {
+                return;
+            }
+            break;
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+function serializeType(type, value) {
+    if (isArray(value)) {
+        return [...value].map(v => serializeType(type, v));
+    }
+    switch (type) {
+        case 'str' /* String */:
+        case 'bool' /* Bool */:
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */:
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'uint32' /* Uint32 */:
+        case 'int32' /* Int32 */: {
+            return value;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            return value.toString();
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+function deserializeType(type, value) {
+    if (isArray(value)) {
+        return value.map(v => deserializeType(type, v));
+    }
+    switch (type) {
+        case 'str' /* String */:
+        case 'bool' /* Bool */:
+        case 'float16' /* Float16 */:
+        case 'float32' /* Float32 */:
+        case 'int8' /* Int8 */:
+        case 'uint8' /* Uint8 */:
+        case 'int16' /* Int16 */:
+        case 'uint16' /* Uint16 */:
+        case 'uint32' /* Uint32 */:
+        case 'int32' /* Int32 */: {
+            return value;
+        }
+        case 'int64' /* Int64 */:
+        case 'uint64' /* Uint64 */: {
+            return BigInt(value);
+        }
+    }
+    throw new Error(`unexpected type "${type}" with value "${value}".`);
+}
+var Tensor = class _Tensor {
+    constructor(type, value, opts = {}) {
+        this.type = type;
+        this.value = value;
+        ensureType(type, this.value);
+        if (opts.shape === void 0) {
+            if (isArray(this.value)) {
+                this.shape = [arrLength(value)];
+            } else {
+                this.shape = [];
+            }
+        } else {
+            this.shape = opts.shape;
+        }
+        ensureShape(this.shape, this.value);
+        this.name = opts.name || null;
+    }
+    static fromJSON(obj) {
+        const { type, shape, value, b64Value, name } = obj;
+        const opts = { shape, name };
+        if (b64Value !== void 0) {
+            const value2 = b64ToArray(b64Value, type)[0];
+            return new _Tensor(type, value2, opts);
+        } else {
+            return new _Tensor(type, deserializeType(type, value), opts);
+        }
+    }
+    toJSON() {
+        return {
+            type: this.type,
+            shape: this.shape,
+            name: this.name,
+            value: serializeType(this.type, this.value)
+        };
+    }
+};
+function b64ToArray(base64, type) {
+    const byteString = atob(base64);
+    const bytes = new Uint8Array(byteString.length);
+    for (let i = 0; i < byteString.length; i++) {
+        bytes[i] = byteString.charCodeAt(i);
+    }
+    const arrBuffer = new DataView(bytes.buffer).buffer;
+    switch (type) {
+        case 'float32':
+            return new Float32Array(arrBuffer);
+        case 'float64':
+            return new Float64Array(arrBuffer);
+        case 'int32':
+            return new Int32Array(arrBuffer);
+        case 'int64':
+            return new BigInt64Array(arrBuffer);
+        default:
+            throw Error(`invalid data type for base64 input: ${type}`);
+    }
+}
+
+// src/session.ts
+function parseInputs(inputs) {
+    if (Array.isArray(inputs)) {
+        return inputs.map(input => input.toJSON());
+    }
+    if (inputs !== null && typeof inputs === 'object') {
+        return Object.keys(inputs).map(key => {
+            let tensor = inputs[key].toJSON();
+            tensor.name = key;
+            return tensor;
+        });
+    }
+    throw new Error(`invalid inputs, must be Array> | TensorsObject`);
+}
+var InferenceSession = class {
+    constructor(binding, model, options = {}) {
+        this.binding = binding;
+        this.model = model;
+        this.options = options;
+    }
+    async run(inputs, options) {
+        const jsonInputs = parseInputs(inputs);
+        const body = JSON.stringify({
+            input: jsonInputs
+        });
+        const compressedReadableStream = new Response(body).body.pipeThrough(
+            new CompressionStream('gzip')
+        );
+        let routingModel = 'default';
+        if (this.model === '@cf/meta/llama-2-7b-chat-int8') {
+            routingModel = 'llama_2_7b_chat_int8';
+        }
+        const res = await this.binding.fetch('/run', {
+            method: 'POST',
+            body: compressedReadableStream,
+            headers: {
+                'content-encoding': 'gzip',
+                'cf-consn-model-id': this.model,
+                'cf-consn-routing-model': routingModel,
+                ...(this.options?.extraHeaders || {})
+            }
+        });
+        if (!res.ok) {
+            throw new Error(`API returned ${res.status}: ${await res.text()}`);
+        }
+        const { result } = await res.json();
+        const outputByName = {};
+        for (let i = 0, len = result.length; i < len; i++) {
+            const tensor = Tensor.fromJSON(result[i]);
+            const name = tensor.name || 'output' + i;
+            outputByName[name] = tensor;
+        }
+        return outputByName;
+    }
+};
+
+// src/ai.ts
+var modelMappings = {
+    'text-classification': ['@cf/huggingface/distilbert-sst-2-int8'],
+    'text-embeddings': ['@cf/baai/bge-base-en-v1.5'],
+    'speech-recognition': ['@cf/openai/whisper'],
+    'image-classification': ['@cf/microsoft/resnet-50'],
+    'text-generation': ['@cf/meta/llama-2-7b-chat-int8'],
+    translation: ['@cf/meta/m2m100-1.2b']
+};
+var chunkArray = (arr, size) =>
+    arr.length > size
+        ? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
+        : [arr];
+var Ai = class {
+    constructor(binding, options = {}) {
+        this.binding = binding;
+        this.options = options;
+    }
+    async run(model, inputs) {
+        const session = new InferenceSession(
+            this.binding,
+            model,
+            this.options.sessionOptions || {}
+        );
+        let tensorInput;
+        let typedInputs;
+        let outputMap = r => r;
+        const tasks = Object.keys(modelMappings);
+        let task = '';
+        for (var t in tasks) {
+            if (modelMappings[tasks[t]].indexOf(model) !== -1) {
+                task = tasks[t];
+                break;
+            }
+        }
+        switch (task) {
+            case 'text-classification':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('str' /* String */, [typedInputs.text], {
+                        shape: [[typedInputs.text].length],
+                        name: 'input_text'
+                    })
+                ];
+                outputMap = r => {
+                    return [
+                        {
+                            label: 'NEGATIVE',
+                            score: r.logits.value[0][0]
+                        },
+                        {
+                            label: 'POSITIVE',
+                            score: r.logits.value[0][1]
+                        }
+                    ];
+                };
+                break;
+            case 'text-embeddings':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor(
+                        'str' /* String */,
+                        Array.isArray(typedInputs.text)
+                            ? typedInputs.text
+                            : [typedInputs.text],
+                        {
+                            shape: [
+                                Array.isArray(typedInputs.text)
+                                    ? typedInputs.text.length
+                                    : [typedInputs.text].length
+                            ],
+                            name: 'input_text'
+                        }
+                    )
+                ];
+                outputMap = r => {
+                    if (Array.isArray(r.embeddings.value[0])) {
+                        return {
+                            shape: r.embeddings.shape,
+                            data: r.embeddings.value
+                        };
+                    } else {
+                        return {
+                            shape: r.embeddings.shape,
+                            data: chunkArray(r.embeddings.value, r.embeddings.shape[1])
+                        };
+                    }
+                };
+                break;
+            case 'speech-recognition':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('uint8' /* Uint8 */, typedInputs.audio, {
+                        shape: [1, typedInputs.audio.length],
+                        name: 'audio'
+                    })
+                ];
+                outputMap = r => {
+                    return { text: r.name.value[0] };
+                };
+                break;
+            case 'text-generation':
+                typedInputs = inputs;
+                let prompt = '';
+                if (typedInputs.messages === void 0) {
+                    prompt = typedInputs.prompt;
+                } else {
+                    for (let i = 0; i < typedInputs.messages.length; i++) {
+                        const inp = typedInputs.messages[i];
+                        switch (inp.role) {
+                            case 'system':
+                                prompt += '[INST]<>' + inp.content + '<>[/INST]\n';
+                                break;
+                            case 'user':
+                                prompt += '[INST]' + inp.content + '[/INST]\n';
+                                break;
+                            case 'assistant':
+                                prompt += inp.content + '\n';
+                                break;
+                            default:
+                                throw new Error('Invalid role: ' + inp.role);
+                        }
+                    }
+                }
+                tensorInput = [
+                    new Tensor('str' /* String */, [prompt], {
+                        shape: [1],
+                        name: 'INPUT_0'
+                    }),
+                    new Tensor('uint32' /* Uint32 */, [256], {
+                        // sequence length
+                        shape: [1],
+                        name: 'INPUT_1'
+                    })
+                ];
+                outputMap = r => {
+                    return { response: r.name.value[0] };
+                };
+                break;
+            case 'translation':
+                typedInputs = inputs;
+                tensorInput = [
+                    new Tensor('str' /* String */, [typedInputs.text], {
+                        shape: [1, 1],
+                        name: 'text'
+                    }),
+                    new Tensor('str' /* String */, [typedInputs.source_lang || 'en'], {
+                        shape: [1, 1],
+                        name: 'source_lang'
+                    }),
+                    new Tensor('str' /* String */, [typedInputs.target_lang], {
+                        shape: [1, 1],
+                        name: 'target_lang'
+                    })
+                ];
+                outputMap = r => {
+                    return { translated_text: r.name.value[0] };
+                };
+                break;
+            default:
+                throw new Error(`No such model ${model} or task`);
+        }
+        const output = await session.run(tensorInput);
+        return outputMap(output);
+    }
+};
+
+export { Ai }
\ No newline at end of file
diff --git a/src/workers-ai.js b/src/workers-ai.js
index d42f7e8d..f2b3b042 100644
--- a/src/workers-ai.js
+++ b/src/workers-ai.js
@@ -1,4 +1,6 @@
 import {ENV, AI_LLM} from "./env.js";
+import {Ai} from "./vendors/cloudflare-ai.js";
+
 
 /**
  * @return {boolean}
@@ -28,426 +30,3 @@ export async function requestCompletionsFromWorkersAI(message, history, context,
     const response = await ai.run(model, request);
     return response.response;
 }
-
-/* eslint-disable */
-// src/tensor.ts
-var TensorType = /* @__PURE__ */ (TensorType2 => {
-    TensorType2['String'] = 'str';
-    TensorType2['Bool'] = 'bool';
-    TensorType2['Float16'] = 'float16';
-    TensorType2['Float32'] = 'float32';
-    TensorType2['Int16'] = 'int16';
-    TensorType2['Int32'] = 'int32';
-    TensorType2['Int64'] = 'int64';
-    TensorType2['Int8'] = 'int8';
-    TensorType2['Uint16'] = 'uint16';
-    TensorType2['Uint32'] = 'uint32';
-    TensorType2['Uint64'] = 'uint64';
-    TensorType2['Uint8'] = 'uint8';
-    return TensorType2;
-})(TensorType || {});
-var TypedArrayProto = Object.getPrototypeOf(Uint8Array);
-function isArray(value) {
-    return Array.isArray(value) || value instanceof TypedArrayProto;
-}
-function arrLength(obj) {
-    return obj instanceof TypedArrayProto
-        ? obj.length
-        : obj
-            .flat()
-            .reduce(
-                (acc, cur) => acc + (cur instanceof TypedArrayProto ? cur.length : 1),
-                0
-            );
-}
-function ensureShape(shape, value) {
-    if (shape.length === 0 && !isArray(value)) {
-        return;
-    }
-    const count = shape.reduce((acc, v) => {
-        if (!Number.isInteger(v)) {
-            throw new Error(
-                `expected shape to be array-like of integers but found non-integer element "${v}"`
-            );
-        }
-        return acc * v;
-    }, 1);
-    if (count != arrLength(value)) {
-        throw new Error(
-            `invalid shape: expected ${count} elements for shape ${shape} but value array has length ${value.length}`
-        );
-    }
-}
-function ensureType(type, value) {
-    if (isArray(value)) {
-        value.forEach(v => ensureType(type, v));
-        return;
-    }
-    switch (type) {
-        case 'bool' /* Bool */: {
-            if (typeof value === 'boolean') {
-                return;
-            }
-            break;
-        }
-        case 'float16' /* Float16 */:
-        case 'float32' /* Float32 */: {
-            if (typeof value === 'number') {
-                return;
-            }
-            break;
-        }
-        case 'int8' /* Int8 */:
-        case 'uint8' /* Uint8 */:
-        case 'int16' /* Int16 */:
-        case 'uint16' /* Uint16 */:
-        case 'int32' /* Int32 */:
-        case 'uint32' /* Uint32 */: {
-            if (Number.isInteger(value)) {
-                return;
-            }
-            break;
-        }
-        case 'int64' /* Int64 */:
-        case 'uint64' /* Uint64 */: {
-            if (typeof value === 'bigint') {
-                return;
-            }
-            break;
-        }
-        case 'str' /* String */: {
-            if (typeof value === 'string') {
-                return;
-            }
-            break;
-        }
-    }
-    throw new Error(`unexpected type "${type}" with value "${value}".`);
-}
-function serializeType(type, value) {
-    if (isArray(value)) {
-        return [...value].map(v => serializeType(type, v));
-    }
-    switch (type) {
-        case 'str' /* String */:
-        case 'bool' /* Bool */:
-        case 'float16' /* Float16 */:
-        case 'float32' /* Float32 */:
-        case 'int8' /* Int8 */:
-        case 'uint8' /* Uint8 */:
-        case 'int16' /* Int16 */:
-        case 'uint16' /* Uint16 */:
-        case 'uint32' /* Uint32 */:
-        case 'int32' /* Int32 */: {
-            return value;
-        }
-        case 'int64' /* Int64 */:
-        case 'uint64' /* Uint64 */: {
-            return value.toString();
-        }
-    }
-    throw new Error(`unexpected type "${type}" with value "${value}".`);
-}
-function deserializeType(type, value) {
-    if (isArray(value)) {
-        return value.map(v => deserializeType(type, v));
-    }
-    switch (type) {
-        case 'str' /* String */:
-        case 'bool' /* Bool */:
-        case 'float16' /* Float16 */:
-        case 'float32' /* Float32 */:
-        case 'int8' /* Int8 */:
-        case 'uint8' /* Uint8 */:
-        case 'int16' /* Int16 */:
-        case 'uint16' /* Uint16 */:
-        case 'uint32' /* Uint32 */:
-        case 'int32' /* Int32 */: {
-            return value;
-        }
-        case 'int64' /* Int64 */:
-        case 'uint64' /* Uint64 */: {
-            return BigInt(value);
-        }
-    }
-    throw new Error(`unexpected type "${type}" with value "${value}".`);
-}
-var Tensor = class _Tensor {
-    constructor(type, value, opts = {}) {
-        this.type = type;
-        this.value = value;
-        ensureType(type, this.value);
-        if (opts.shape === void 0) {
-            if (isArray(this.value)) {
-                this.shape = [arrLength(value)];
-            } else {
-                this.shape = [];
-            }
-        } else {
-            this.shape = opts.shape;
-        }
-        ensureShape(this.shape, this.value);
-        this.name = opts.name || null;
-    }
-    static fromJSON(obj) {
-        const { type, shape, value, b64Value, name } = obj;
-        const opts = { shape, name };
-        if (b64Value !== void 0) {
-            const value2 = b64ToArray(b64Value, type)[0];
-            return new _Tensor(type, value2, opts);
-        } else {
-            return new _Tensor(type, deserializeType(type, value), opts);
-        }
-    }
-    toJSON() {
-        return {
-            type: this.type,
-            shape: this.shape,
-            name: this.name,
-            value: serializeType(this.type, this.value)
-        };
-    }
-};
-function b64ToArray(base64, type) {
-    const byteString = atob(base64);
-    const bytes = new Uint8Array(byteString.length);
-    for (let i = 0; i < byteString.length; i++) {
-        bytes[i] = byteString.charCodeAt(i);
-    }
-    const arrBuffer = new DataView(bytes.buffer).buffer;
-    switch (type) {
-        case 'float32':
-            return new Float32Array(arrBuffer);
-        case 'float64':
-            return new Float64Array(arrBuffer);
-        case 'int32':
-            return new Int32Array(arrBuffer);
-        case 'int64':
-            return new BigInt64Array(arrBuffer);
-        default:
-            throw Error(`invalid data type for base64 input: ${type}`);
-    }
-}
-
-// src/session.ts
-function parseInputs(inputs) {
-    if (Array.isArray(inputs)) {
-        return inputs.map(input => input.toJSON());
-    }
-    if (inputs !== null && typeof inputs === 'object') {
-        return Object.keys(inputs).map(key => {
-            let tensor = inputs[key].toJSON();
-            tensor.name = key;
-            return tensor;
-        });
-    }
-    throw new Error(`invalid inputs, must be Array> | TensorsObject`);
-}
-var InferenceSession = class {
-    constructor(binding, model, options = {}) {
-        this.binding = binding;
-        this.model = model;
-        this.options = options;
-    }
-    async run(inputs, options) {
-        const jsonInputs = parseInputs(inputs);
-        const body = JSON.stringify({
-            input: jsonInputs
-        });
-        const compressedReadableStream = new Response(body).body.pipeThrough(
-            new CompressionStream('gzip')
-        );
-        let routingModel = 'default';
-        if (this.model === '@cf/meta/llama-2-7b-chat-int8') {
-            routingModel = 'llama_2_7b_chat_int8';
-        }
-        const res = await this.binding.fetch('/run', {
-            method: 'POST',
-            body: compressedReadableStream,
-            headers: {
-                'content-encoding': 'gzip',
-                'cf-consn-model-id': this.model,
-                'cf-consn-routing-model': routingModel,
-                ...(this.options?.extraHeaders || {})
-            }
-        });
-        if (!res.ok) {
-            throw new Error(`API returned ${res.status}: ${await res.text()}`);
-        }
-        const { result } = await res.json();
-        const outputByName = {};
-        for (let i = 0, len = result.length; i < len; i++) {
-            const tensor = Tensor.fromJSON(result[i]);
-            const name = tensor.name || 'output' + i;
-            outputByName[name] = tensor;
-        }
-        return outputByName;
-    }
-};
-
-// src/ai.ts
-var modelMappings = {
-    'text-classification': ['@cf/huggingface/distilbert-sst-2-int8'],
-    'text-embeddings': ['@cf/baai/bge-base-en-v1.5'],
-    'speech-recognition': ['@cf/openai/whisper'],
-    'image-classification': ['@cf/microsoft/resnet-50'],
-    'text-generation': ['@cf/meta/llama-2-7b-chat-int8'],
-    translation: ['@cf/meta/m2m100-1.2b']
-};
-var chunkArray = (arr, size) =>
-    arr.length > size
-        ? [arr.slice(0, size), ...chunkArray(arr.slice(size), size)]
-        : [arr];
-var Ai = class {
-    constructor(binding, options = {}) {
-        this.binding = binding;
-        this.options = options;
-    }
-    async run(model, inputs) {
-        const session = new InferenceSession(
-            this.binding,
-            model,
-            this.options.sessionOptions || {}
-        );
-        let tensorInput;
-        let typedInputs;
-        let outputMap = r => r;
-        const tasks = Object.keys(modelMappings);
-        let task = '';
-        for (var t in tasks) {
-            if (modelMappings[tasks[t]].indexOf(model) !== -1) {
-                task = tasks[t];
-                break;
-            }
-        }
-        switch (task) {
-            case 'text-classification':
-                typedInputs = inputs;
-                tensorInput = [
-                    new Tensor('str' /* String */, [typedInputs.text], {
-                        shape: [[typedInputs.text].length],
-                        name: 'input_text'
-                    })
-                ];
-                outputMap = r => {
-                    return [
-                        {
-                            label: 'NEGATIVE',
-                            score: r.logits.value[0][0]
-                        },
-                        {
-                            label: 'POSITIVE',
-                            score: r.logits.value[0][1]
-                        }
-                    ];
-                };
-                break;
-            case 'text-embeddings':
-                typedInputs = inputs;
-                tensorInput = [
-                    new Tensor(
-                        'str' /* String */,
-                        Array.isArray(typedInputs.text)
-                            ? typedInputs.text
-                            : [typedInputs.text],
-                        {
-                            shape: [
-                                Array.isArray(typedInputs.text)
-                                    ? typedInputs.text.length
-                                    : [typedInputs.text].length
-                            ],
-                            name: 'input_text'
-                        }
-                    )
-                ];
-                outputMap = r => {
-                    if (Array.isArray(r.embeddings.value[0])) {
-                        return {
-                            shape: r.embeddings.shape,
-                            data: r.embeddings.value
-                        };
-                    } else {
-                        return {
-                            shape: r.embeddings.shape,
-                            data: chunkArray(r.embeddings.value, r.embeddings.shape[1])
-                        };
-                    }
-                };
-                break;
-            case 'speech-recognition':
-                typedInputs = inputs;
-                tensorInput = [
-                    new Tensor('uint8' /* Uint8 */, typedInputs.audio, {
-                        shape: [1, typedInputs.audio.length],
-                        name: 'audio'
-                    })
-                ];
-                outputMap = r => {
-                    return { text: r.name.value[0] };
-                };
-                break;
-            case 'text-generation':
-                typedInputs = inputs;
-                let prompt = '';
-                if (typedInputs.messages === void 0) {
-                    prompt = typedInputs.prompt;
-                } else {
-                    for (let i = 0; i < typedInputs.messages.length; i++) {
-                        const inp = typedInputs.messages[i];
-                        switch (inp.role) {
-                            case 'system':
-                                prompt += '[INST]<>' + inp.content + '<>[/INST]\n';
-                                break;
-                            case 'user':
-                                prompt += '[INST]' + inp.content + '[/INST]\n';
-                                break;
-                            case 'assistant':
-                                prompt += inp.content + '\n';
-                                break;
-                            default:
-                                throw new Error('Invalid role: ' + inp.role);
-                        }
-                    }
-                }
-                tensorInput = [
-                    new Tensor('str' /* String */, [prompt], {
-                        shape: [1],
-                        name: 'INPUT_0'
-                    }),
-                    new Tensor('uint32' /* Uint32 */, [256], {
-                        // sequence length
-                        shape: [1],
-                        name: 'INPUT_1'
-                    })
-                ];
-                outputMap = r => {
-                    return { response: r.name.value[0] };
-                };
-                break;
-            case 'translation':
-                typedInputs = inputs;
-                tensorInput = [
-                    new Tensor('str' /* String */, [typedInputs.text], {
-                        shape: [1, 1],
-                        name: 'text'
-                    }),
-                    new Tensor('str' /* String */, [typedInputs.source_lang || 'en'], {
-                        shape: [1, 1],
-                        name: 'source_lang'
-                    }),
-                    new Tensor('str' /* String */, [typedInputs.target_lang], {
-                        shape: [1, 1],
-                        name: 'target_lang'
-                    })
-                ];
-                outputMap = r => {
-                    return { translated_text: r.name.value[0] };
-                };
-                break;
-            default:
-                throw new Error(`No such model ${model} or task`);
-        }
-        const output = await session.run(tensorInput);
-        return outputMap(output);
-    }
-};

From 004f7888b35654c71f255ef7484cc6ce5ecee5c7 Mon Sep 17 00:00:00 2001
From: TBXark 
Date: Sat, 7 Oct 2023 15:49:04 +0800
Subject: [PATCH 03/11] =?UTF-8?q?fix:=20gpt3=E8=AE=A1=E6=95=B0=E5=99=A8BUG?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 dist/buildinfo.json       |  2 +-
 dist/index.js             | 65 ++++++++++++++++++++-------------------
 dist/timestamp            |  2 +-
 src/chat.js               | 12 +++-----
 src/context.js            |  2 +-
 src/env.js                |  7 +++--
 src/message.js            |  6 ++--
 src/openai.js             | 10 +++---
 src/router.js             |  5 ++-
 src/utils.js              | 26 ++++++++++++++--
 src/{ => vendors}/gpt3.js | 33 +++-----------------
 src/workers-ai.js         | 24 +++++++--------
 12 files changed, 96 insertions(+), 98 deletions(-)
 rename src/{ => vendors}/gpt3.js (81%)

diff --git a/dist/buildinfo.json b/dist/buildinfo.json
index 132c0c7f..101546e7 100644
--- a/dist/buildinfo.json
+++ b/dist/buildinfo.json
@@ -1 +1 @@
-{"sha": "682c32a", "timestamp": 1696664248}
+{"sha": "748eef6", "timestamp": 1696664893}
diff --git a/dist/index.js b/dist/index.js
index 9562ed8f..b2e219de 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -26,6 +26,8 @@ var ENV = {
   MAX_TOKEN_LENGTH: 2048,
   // 使用GPT3的TOKEN计数
   GPT3_TOKENS_COUNT: false,
+  // GPT3计数器资源地址
+  GPT3_TOKENS_COUNT_REPO: "https://raw.githubusercontent.com/tbxark-arc/GPT-3-Encoder/master",
   // 全局默认初始化消息
   SYSTEM_INIT_MESSAGE: "You are a helpful assistant",
   // 全局默认初始化消息角色
@@ -39,9 +41,9 @@ var ENV = {
   // 检查更新的分支
   UPDATE_BRANCH: "master",
   // 当前版本
-  BUILD_TIMESTAMP: 1696664248,
+  BUILD_TIMESTAMP: 1696664893,
   // 当前版本 commit id
-  BUILD_VERSION: "682c32a",
+  BUILD_VERSION: "748eef6",
   I18N: null,
   LANGUAGE: "zh-cn",
   // 使用流模式
@@ -554,7 +556,7 @@ async function requestCompletionsFromOpenAI(message, history, context, onStream)
   const timeout = 1e3 * 60 * 5;
   setTimeout(() => controller.abort(), timeout);
   let url = `${ENV.OPENAI_API_DOMAIN}/v1/chat/completions`;
-  let header = {
+  const header = {
     "Content-Type": "application/json",
     "Authorization": `Bearer ${key}`
   };
@@ -660,33 +662,10 @@ async function updateBotUsage(usage, context) {
   await DATABASE.put(context.SHARE_CONTEXT.usageKey, JSON.stringify(dbValue));
 }
 
-// src/gpt3.js
-async function resourceLoader(key, url) {
-  try {
-    const raw = await DATABASE.get(key);
-    if (raw && raw !== "") {
-      return raw;
-    }
-  } catch (e) {
-    console.error(e);
-  }
-  try {
-    const bpe = await fetch(url, {
-      headers: {
-        "User-Agent": CONST.USER_AGENT
-      }
-    }).then((x) => x.text());
-    await DATABASE.put(key, bpe);
-    return bpe;
-  } catch (e) {
-    console.error(e);
-  }
-  return null;
-}
-async function gpt3TokensCounter() {
-  const repo = "https://raw.githubusercontent.com/tbxark-archive/GPT-3-Encoder/master";
-  const encoder = await resourceLoader("encoder_raw_file", `${repo}/encoder.json`).then((x) => JSON.parse(x));
-  const bpe_file = await resourceLoader("bpe_raw_file", `${repo}/vocab.bpe`);
+// src/vendors/gpt3.js
+async function gpt3TokensCounter(repo, loader) {
+  const encoder = await loader("encoder_raw_file", `${repo}/encoder.json`).then((x) => JSON.parse(x));
+  const bpe_file = await loader("bpe_raw_file", `${repo}/vocab.bpe`);
   const range = (x, y) => {
     const res = Array.from(Array(y).keys()).slice(x);
     return res;
@@ -919,7 +898,29 @@ async function tokensCounter() {
   let counter = (text) => Array.from(text).length;
   try {
     if (ENV.GPT3_TOKENS_COUNT) {
-      counter = await gpt3TokensCounter();
+      const loader = async (key, url) => {
+        try {
+          const raw = await DATABASE.get(key);
+          if (raw && raw !== "") {
+            return raw;
+          }
+        } catch (e) {
+          console.error(e);
+        }
+        try {
+          const bpe = await fetch(url, {
+            headers: {
+              "User-Agent": CONST.USER_AGENT
+            }
+          }).then((x) => x.text());
+          await DATABASE.put(key, bpe);
+          return bpe;
+        } catch (e) {
+          console.error(e);
+        }
+        return null;
+      };
+      counter = await gpt3TokensCounter(ENV.GPT3_TOKENS_COUNT_REPO, loader);
     }
   } catch (e) {
     console.error(e);
@@ -2259,7 +2260,7 @@ async function defaultIndexAction() {
 }
 async function gpt3TokenTest(request) {
   const text = new URL(request.url).searchParams.get("text") || "Hello World";
-  const counter = await gpt3TokensCounter();
+  const counter = await tokensCounter();
   const HTML = renderHTML(`
     

ChatGPT-Telegram-Workers


diff --git a/dist/timestamp b/dist/timestamp index 77e240aa..c5349f8e 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1696664248 +1696664893 diff --git a/src/chat.js b/src/chat.js index ccc74b7d..00d26d34 100644 --- a/src/chat.js +++ b/src/chat.js @@ -1,15 +1,14 @@ import { deleteMessageFromTelegramWithContext, sendChatActionToTelegramWithContext, - sendMessageToTelegramWithContext + sendMessageToTelegramWithContext, } from './telegram.js'; import {DATABASE, ENV} from './env.js'; // eslint-disable-next-line no-unused-vars import {Context} from './context.js'; -import {requestCompletionsFromOpenAI} from "./openai.js"; -import {tokensCounter} from "./utils.js"; -import {isWorkersAIEnable, requestCompletionsFromWorkersAI} from "./workers-ai.js"; - +import {requestCompletionsFromOpenAI} from './openai.js'; +import {tokensCounter} from './utils.js'; +import {isWorkersAIEnable, requestCompletionsFromWorkersAI} from './workers-ai.js'; /** @@ -106,7 +105,6 @@ async function loadHistory(key, context) { } - /** * * @param {string} text @@ -171,7 +169,7 @@ export async function chatWithLLM(text, context, modifier) { let llm = requestCompletionsFromOpenAI; if (isWorkersAIEnable(context)) { - llm = requestCompletionsFromWorkersAI + llm = requestCompletionsFromWorkersAI; } const answer = await requestCompletionsFromLLM(text, context, llm, modifier, onStream); diff --git a/src/context.js b/src/context.js index 6be93787..17170152 100644 --- a/src/context.js +++ b/src/context.js @@ -196,7 +196,7 @@ export class Context { */ openAIKeyFromContext() { if (ENV.AZURE_COMPLETIONS_API) { - return ENV.AZURE_API_KEY + return ENV.AZURE_API_KEY; } if (this.USER_CONFIG.OPENAI_API_KEY) { return this.USER_CONFIG.OPENAI_API_KEY; diff --git a/src/env.js b/src/env.js index 1b0da9c4..eef94f15 100644 --- a/src/env.js +++ b/src/env.js @@ -13,6 +13,7 @@ * @property {number} MAX_HISTORY_LENGTH * @property {number} MAX_TOKEN_LENGTH * @property {boolean} GPT3_TOKENS_COUNT + * @property {string} GPT3_TOKENS_COUNT_REPO * @property {string} SYSTEM_INIT_MESSAGE * @property {string} SYSTEM_INIT_MESSAGE_ROLE * @property {boolean} ENABLE_USAGE_STATISTICS @@ -67,6 +68,8 @@ export const ENV = { MAX_TOKEN_LENGTH: 2048, // 使用GPT3的TOKEN计数 GPT3_TOKENS_COUNT: false, + // GPT3计数器资源地址 + GPT3_TOKENS_COUNT_REPO: 'https://raw.githubusercontent.com/tbxark-arc/GPT-3-Encoder/master', // 全局默认初始化消息 SYSTEM_INIT_MESSAGE: 'You are a helpful assistant', // 全局默认初始化消息角色 @@ -106,11 +109,11 @@ export const ENV = { // Azure API Key AZURE_API_KEY: null, - // Azure Completions API + // Azure Completions API AZURE_COMPLETIONS_API: null, // workers ai模型 - WORKERS_AI_MODEL: null + WORKERS_AI_MODEL: null, }; diff --git a/src/message.js b/src/message.js index 891621c7..1305ca6b 100644 --- a/src/message.js +++ b/src/message.js @@ -4,8 +4,8 @@ import {sendMessageToTelegramWithContext} from './telegram.js'; import {handleCommandMessage} from './command.js'; import {errorToString} from './utils.js'; import {chatWithLLM} from './chat.js'; -import {isOpenAIEnable} from "./openai.js"; -import {isWorkersAIEnable} from "./workers-ai.js"; +import {isOpenAIEnable} from './openai.js'; +import {isWorkersAIEnable} from './workers-ai.js'; // import {TelegramMessage, TelegramWebhookRequest} from './type.d.ts'; @@ -79,7 +79,7 @@ async function msgIgnoreOldMessage(message, context) { * @return {Promise} */ async function msgCheckEnvIsReady(message, context) { - const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context) + const llmEnable = isOpenAIEnable(context) || isWorkersAIEnable(context); if (!llmEnable) { return sendMessageToTelegramWithContext(context)('LLM Not Set'); } diff --git a/src/openai.js b/src/openai.js index c9358ad6..6ebf5d52 100644 --- a/src/openai.js +++ b/src/openai.js @@ -63,15 +63,15 @@ export async function requestCompletionsFromOpenAI(message, history, context, on setTimeout(() => controller.abort(), timeout); let url = `${ENV.OPENAI_API_DOMAIN}/v1/chat/completions`; - let header = { + const header = { 'Content-Type': 'application/json', 'Authorization': `Bearer ${key}`, - } + }; if (ENV.AZURE_COMPLETIONS_API) { url = ENV.AZURE_COMPLETIONS_API; - header['api-key'] = key - delete header['Authorization'] - delete body.model + header['api-key'] = key; + delete header['Authorization']; + delete body.model; } const resp = await fetch(url, { method: 'POST', diff --git a/src/router.js b/src/router.js index 55ecd18b..8190619c 100644 --- a/src/router.js +++ b/src/router.js @@ -2,8 +2,7 @@ import {handleMessage} from './message.js'; import {API_GUARD, DATABASE, ENV} from './env.js'; import {bindCommandForTelegram, commandsDocument} from './command.js'; import {bindTelegramWebHook, getBot} from './telegram.js'; -import {errorToString, historyPassword, makeResponse200, renderHTML} from './utils.js'; -import {gpt3TokensCounter} from './gpt3.js'; +import {errorToString, historyPassword, makeResponse200, renderHTML, tokensCounter} from './utils.js'; const helpLink = 'https://github.com/TBXark/ChatGPT-Telegram-Workers/blob/master/doc/DEPLOY.md'; @@ -160,7 +159,7 @@ async function defaultIndexAction() { */ async function gpt3TokenTest(request) { const text = new URL(request.url).searchParams.get('text') || 'Hello World'; - const counter = await gpt3TokensCounter(); + const counter = await tokensCounter(); const HTML = renderHTML(`

ChatGPT-Telegram-Workers


diff --git a/src/utils.js b/src/utils.js index bb47167f..4e6dd756 100644 --- a/src/utils.js +++ b/src/utils.js @@ -1,5 +1,5 @@ import {CONST, DATABASE, ENV} from './env.js'; -import {gpt3TokensCounter} from './gpt3.js'; +import {gpt3TokensCounter} from './vendors/gpt3.js'; /** * @param {number} length @@ -127,7 +127,29 @@ export async function tokensCounter() { let counter = (text) => Array.from(text).length; try { if (ENV.GPT3_TOKENS_COUNT) { - counter = await gpt3TokensCounter(); + const loader = async (key, url) => { + try { + const raw = await DATABASE.get(key); + if (raw && raw !== '') { + return raw; + } + } catch (e) { + console.error(e); + } + try { + const bpe = await fetch(url, { + headers: { + 'User-Agent': CONST.USER_AGENT, + }, + }).then((x) => x.text()); + await DATABASE.put(key, bpe); + return bpe; + } catch (e) { + console.error(e); + } + return null; + }; + counter = await gpt3TokensCounter( ENV.GPT3_TOKENS_COUNT_REPO, loader); } } catch (e) { console.error(e); diff --git a/src/gpt3.js b/src/vendors/gpt3.js similarity index 81% rename from src/gpt3.js rename to src/vendors/gpt3.js index 0bfb6424..fbe243e6 100644 --- a/src/gpt3.js +++ b/src/vendors/gpt3.js @@ -1,35 +1,10 @@ -/* eslint-disable camelcase, require-jsdoc */ +/* eslint-disable */ // https://github.com/latitudegames/GPT-3-Encoder -import {CONST, DATABASE} from './env.js'; -async function resourceLoader(key, url) { - try { - const raw = await DATABASE.get(key); - if (raw && raw !== '') { - return raw; - } - } catch (e) { - console.error(e); - } - try { - const bpe = await fetch(url, { - headers: { - 'User-Agent': CONST.USER_AGENT, - }, - }).then((x) => x.text()); - await DATABASE.put(key, bpe); - return bpe; - } catch (e) { - console.error(e); - } - return null; -} - -export async function gpt3TokensCounter() { - const repo = 'https://raw.githubusercontent.com/tbxark-archive/GPT-3-Encoder/master'; - const encoder = await resourceLoader('encoder_raw_file', `${repo}/encoder.json`).then((x) => JSON.parse(x)); - const bpe_file = await resourceLoader('bpe_raw_file', `${repo}/vocab.bpe`); +export async function gpt3TokensCounter(repo, loader) { + const encoder = await loader('encoder_raw_file', `${repo}/encoder.json`).then((x) => JSON.parse(x)); + const bpe_file = await loader('bpe_raw_file', `${repo}/vocab.bpe`); const range = (x, y) => { const res = Array.from(Array(y).keys()).slice(x); diff --git a/src/workers-ai.js b/src/workers-ai.js index f2b3b042..8f4c9048 100644 --- a/src/workers-ai.js +++ b/src/workers-ai.js @@ -1,13 +1,14 @@ -import {ENV, AI_LLM} from "./env.js"; -import {Ai} from "./vendors/cloudflare-ai.js"; +import {ENV, AI_LLM} from './env.js'; +import {Ai} from './vendors/cloudflare-ai.js'; /** + * @param {Context} context * @return {boolean} */ export function isWorkersAIEnable(context) { - return AI_LLM !== null; - // return ENV.WORKERS_AI_MODEL !== null; + return AI_LLM !== null; + // return ENV.WORKERS_AI_MODEL !== null; } @@ -21,12 +22,11 @@ export function isWorkersAIEnable(context) { * @return {Promise} */ export async function requestCompletionsFromWorkersAI(message, history, context, onStream) { - - const ai = new Ai(AI_LLM); - const model = ENV.WORKERS_AI_MODEL || '@cf/meta/llama-2-7b-chat-int8' - const request = { - messages: [...history || [], { role: "user", content: message }] - }; - const response = await ai.run(model, request); - return response.response; + const ai = new Ai(AI_LLM); + const model = ENV.WORKERS_AI_MODEL || '@cf/meta/llama-2-7b-chat-int8'; + const request = { + messages: [...history || [], {role: 'user', content: message}], + }; + const response = await ai.run(model, request); + return response.response; } From 92f35a1a1fa10641cb7c6b935d266730cdfd2b40 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sat, 7 Oct 2023 15:50:51 +0800 Subject: [PATCH 04/11] doc: Update README.md --- README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.md b/README.md index 308cf017..a66320bc 100644 --- a/README.md +++ b/README.md @@ -47,9 +47,6 @@ ~~新建多个机器人绑定到同一个workers,设置`TELEGRAM_AVAILABLE_TOKENS`,每个机器人赋予不同的`SYSTEM_INIT_MESSAGE`~~。开启群聊模式,新建多个群聊,每个群内只有自己个机器人,每个群的机器人由不同的`SYSTEM_INIT_MESSAGE`,比如翻译专家,文案专家,代码专家。然后每次根据自己的需求和不同的群里的机器人聊天,这样就不用经常切换配置属性。 -## 支持我 - -如果使用openai期间需要绑卡可以使用我的onekey的邀请码: 如果有其他问题可以加群交流。 ## 特别鸣谢 From 022c32c8f7d8aacc8ceffb9ac20295656173d142 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sat, 7 Oct 2023 17:14:15 +0800 Subject: [PATCH 05/11] =?UTF-8?q?perf:=20=E6=B7=BB=E5=8A=A0WORKERS=5FAI=5F?= =?UTF-8?q?MODEL=E9=BB=98=E8=AE=A4=E5=80=BC?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dist/buildinfo.json | 2 +- dist/index.js | 11 ++-- dist/timestamp | 2 +- doc/CONFIG.md | 124 +++++++++++++++++++++--------------------- src/env.js | 4 +- src/workers-ai.js | 2 +- wrangler-example.toml | 6 +- 7 files changed, 77 insertions(+), 74 deletions(-) diff --git a/dist/buildinfo.json b/dist/buildinfo.json index 101546e7..a54f3807 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "748eef6", "timestamp": 1696664893} +{"sha": "92f35a1", "timestamp": 1696670020} diff --git a/dist/index.js b/dist/index.js index b2e219de..bd435cf6 100644 --- a/dist/index.js +++ b/dist/index.js @@ -41,9 +41,9 @@ var ENV = { // 检查更新的分支 UPDATE_BRANCH: "master", // 当前版本 - BUILD_TIMESTAMP: 1696664893, + BUILD_TIMESTAMP: 1696670020, // 当前版本 commit id - BUILD_VERSION: "748eef6", + BUILD_VERSION: "92f35a1", I18N: null, LANGUAGE: "zh-cn", // 使用流模式 @@ -63,7 +63,7 @@ var ENV = { // Azure Completions API AZURE_COMPLETIONS_API: null, // workers ai模型 - WORKERS_AI_MODEL: null + WORKERS_AI_MODEL: "@cf/meta/llama-2-7b-chat-int8" }; var CONST = { PASSWORD_KEY: "chat_history_password", @@ -76,8 +76,7 @@ var AI_LLM = null; var ENV_VALUE_TYPE = { API_KEY: [], AZURE_API_KEY: "string", - AZURE_COMPLETIONS_API: "string", - WORKERS_AI_MODEL: "string" + AZURE_COMPLETIONS_API: "string" }; function initEnv(env, i18n2) { DATABASE = env.DATABASE; @@ -1346,7 +1345,7 @@ function isWorkersAIEnable(context) { } async function requestCompletionsFromWorkersAI(message, history, context, onStream) { const ai = new Ai(AI_LLM); - const model = ENV.WORKERS_AI_MODEL || "@cf/meta/llama-2-7b-chat-int8"; + const model = ENV.WORKERS_AI_MODEL; const request = { messages: [...history || [], { role: "user", content: message }] }; diff --git a/dist/timestamp b/dist/timestamp index c5349f8e..b44038ee 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1696664893 +1696670020 diff --git a/doc/CONFIG.md b/doc/CONFIG.md index 53b01703..6f0c72e4 100644 --- a/doc/CONFIG.md +++ b/doc/CONFIG.md @@ -3,49 +3,51 @@ 推荐在Workers配置界面填写环境变量, 而不是直接修改js代码中的变量 ### KV配置 -| KEY | 特殊说明 | -| :------- | ------------------------------------------------------------ | +| KEY | 特殊说明 | +|:---------|--------------------------------------| | DATABASE | 先新建KV,新建的时候名字随意,然后绑定的时候必须设定为DATABASE | ### 系统配置 为每个用户通用的配置,通常在workers配置界面填写 -| KEY | 说明 | 默认值 | 特殊说明 | -| :------------------------ | -------------------------- | -------------------------- | ------------------------------------------------------------ | -| API_KEY | OpenAI API Key | `null` | 可以同时使用多个key,使用的时候会随机选择一个 | -| CHAT_MODEL | open ai 模型选择 | `gpt-3.5-turbo` | | -| - | - | - | - | -| TELEGRAM_AVAILABLE_TOKENS | 支持多个Telegram Bot Token | `null` | 多个Token用`,`分隔 | -| - | - | - | - | -| CHAT_WHITE_LIST | 聊天ID白名单 | `null` | 多个ID用`,`分隔,不知道ID,和机器人聊一句就能返回 | -| I_AM_A_GENEROUS_PERSON | 关闭白名单,允许所有人访问 | `false` | 鉴于很多人不想设置白名单,或者不知道怎么获取ID,所以设置这个选项就能允许所有人访问, 值为`true`时生效 | -| - | - | - | - | -| AUTO_TRIM_HISTORY | 自动清理历史记录 | `true` | 为了避免4096字符限制,将消息删减 | -| MAX_HISTORY_LENGTH | 最大历史记录长度 | `20` | `AUTO_TRIM_HISTORY开启后` 为了避免4096字符限制,将消息删减 | -| MAX_TOKEN_LENGTH | 最大历史token数量 | 2048 | 过长容易超时建议设定在一个合适的数字 | -| GPT3_TOKENS_COUNT | GTP计数模式 | `false` | 使用更加精准的token计数模式替代单纯判断字符串长度,但是容易超时 | -| - | - | - | - | -| SYSTEM_INIT_MESSAGE | 系统初始化信息 | `你是一个得力的助手` | 默认机器人设定 | -| SYSTEM_INIT_MESSAGE_ROLE | 系统初始化信息角色 | `system` | 默认机器人设定 | -| - | - | - | - | -| ENABLE_USAGE_STATISTICS | 开启使用统计 | `false` | 开启后,每次调用API都会记录到KV,可以通过`/usage`查看 | -| HIDE_COMMAND_BUTTONS | 隐藏指令按钮 | `null` | 把想要隐藏的按钮写入用逗号分开`/start,/system`, 记得带上斜杠,修改之后得重新`init` | -| SHOW_REPLY_BUTTON | 显示快捷回复按钮 | `false` | 显示快捷回复按钮 | -| - | - | - | - | -| UPDATE_BRANCH | 分支 | `master` | 版本检测所在分支 | -| - | - | - | - | -| DEBUG_MODE | 调试模式 | `false` | 目前可以把最新一条消息保存到KV方便调试,非常消耗KV写入量,正式环境务必关闭 | -| DEV_MODE | 开发模式 | `false` | 开发测试用 | -| STREAM_MODE | 流模式 | `true` | 得到类似ChatGPT Web一样的打字机输出模式 | -| SAFE_MODE | 安全模式 | `true` | 安全模式,会增加KV写损耗,但是能避免Workers超时导致的Telegram死亡循环重试,减少Token的浪费,不建议关闭。 | -| - | - | - | - | -| LANGUAGE | 语言 | `zh-CN` | `zh-CN`,`zh-TW`和`en` | -| - | - | - | - | -| TELEGRAM_API_DOMAIN | Telegram | `https://api.telegram.org` | 可以自定义Telegram服务器 | -| OPENAI_API_DOMAIN | OpenAI | `https://api.openai.com` | 可以替换为其他与OpenAI API兼容的其他服务商的域名 | -| - | - | - | - | -| AZURE_API_KEY | azure api key | `null` | 支持azure的API,两个密钥随便选一个就可以 | -| AZURE_COMPLETIONS_API | azure api url | `null` | 格式`https://YOUR_RESOURCE_NAME.openai.azure.com/openai/deployments/YOUR_DEPLOYMENT_NAME/chat/completions?api-version=2023-05-15` | +| KEY | 说明 | 默认值 | 特殊说明 | +|:--------------------------|------------------------|---------------------------------|---------------------------------------------------------------------------------------------------------------------------------| +| API_KEY | OpenAI API Key | `null` | 可以同时使用多个key,使用的时候会随机选择一个 | +| CHAT_MODEL | open ai 模型选择 | `gpt-3.5-turbo` | | +| - | - | - | - | +| TELEGRAM_AVAILABLE_TOKENS | 支持多个Telegram Bot Token | `null` | 多个Token用`,`分隔 | +| - | - | - | - | +| CHAT_WHITE_LIST | 聊天ID白名单 | `null` | 多个ID用`,`分隔,不知道ID,和机器人聊一句就能返回 | +| I_AM_A_GENEROUS_PERSON | 关闭白名单,允许所有人访问 | `false` | 鉴于很多人不想设置白名单,或者不知道怎么获取ID,所以设置这个选项就能允许所有人访问, 值为`true`时生效 | +| - | - | - | - | +| AUTO_TRIM_HISTORY | 自动清理历史记录 | `true` | 为了避免4096字符限制,将消息删减 | +| MAX_HISTORY_LENGTH | 最大历史记录长度 | `20` | `AUTO_TRIM_HISTORY开启后` 为了避免4096字符限制,将消息删减 | +| MAX_TOKEN_LENGTH | 最大历史token数量 | 2048 | 过长容易超时建议设定在一个合适的数字 | +| GPT3_TOKENS_COUNT | GTP计数模式 | `false` | 使用更加精准的token计数模式替代单纯判断字符串长度,但是容易超时 | +| - | - | - | - | +| SYSTEM_INIT_MESSAGE | 系统初始化信息 | `你是一个得力的助手` | 默认机器人设定 | +| SYSTEM_INIT_MESSAGE_ROLE | 系统初始化信息角色 | `system` | 默认机器人设定 | +| - | - | - | - | +| ENABLE_USAGE_STATISTICS | 开启使用统计 | `false` | 开启后,每次调用API都会记录到KV,可以通过`/usage`查看 | +| HIDE_COMMAND_BUTTONS | 隐藏指令按钮 | `null` | 把想要隐藏的按钮写入用逗号分开`/start,/system`, 记得带上斜杠,修改之后得重新`init` | +| SHOW_REPLY_BUTTON | 显示快捷回复按钮 | `false` | 显示快捷回复按钮 | +| - | - | - | - | +| UPDATE_BRANCH | 分支 | `master` | 版本检测所在分支 | +| - | - | - | - | +| DEBUG_MODE | 调试模式 | `false` | 目前可以把最新一条消息保存到KV方便调试,非常消耗KV写入量,正式环境务必关闭 | +| DEV_MODE | 开发模式 | `false` | 开发测试用 | +| STREAM_MODE | 流模式 | `true` | 得到类似ChatGPT Web一样的打字机输出模式 | +| SAFE_MODE | 安全模式 | `true` | 安全模式,会增加KV写损耗,但是能避免Workers超时导致的Telegram死亡循环重试,减少Token的浪费,不建议关闭。 | +| - | - | - | - | +| LANGUAGE | 语言 | `zh-CN` | `zh-CN`,`zh-TW`和`en` | +| - | - | - | - | +| TELEGRAM_API_DOMAIN | Telegram | `https://api.telegram.org` | 可以自定义Telegram服务器 | +| OPENAI_API_DOMAIN | OpenAI | `https://api.openai.com` | 可以替换为其他与OpenAI API兼容的其他服务商的域名 | +| - | - | - | - | +| AZURE_API_KEY | azure api key | `null` | 支持azure的API,两个密钥随便选一个就可以 | +| AZURE_COMPLETIONS_API | azure api url | `null` | 格式`https://YOUR_RESOURCE_NAME.openai.azure.com/openai/deployments/YOUR_DEPLOYMENT_NAME/chat/completions?api-version=2023-05-15` | +| - | - | - | - | +| WORKERS_AI_MODEL | workers模型 | `@cf/meta/llama-2-7b-chat-int8` | 具体模型列表可以查看`https://developers.cloudflare.com/workers-ai/models/llm/` | ### 群组配置 可以把机器人加到群组中,然后群组里的所有人都可以和机器人聊天。 @@ -56,20 +58,20 @@ > IMPORTANT:必须在botfather中设置`/setprivacy`为`Disable`,否则机器人无法响应`@机器人`的聊天消息。 -| KEY | 说明 | 默认值 | 特殊说明 | -| :------------------------ | ---------------------- | ------- | ------------------------------------------------------------ | -| GROUP_CHAT_BOT_ENABLE | 开启群组机器人 | `true` | 开启后,机器人加入群组后,然后群组里的所有人都可以和机器人聊天。 | -| TELEGRAM_BOT_NAME | 机器人名字 xxx_bot | `null` | 顺序必须和`TELEGRAM_AVAILABLE_TOKENS` 一致, **必须设置否则无法在群聊中使用** | -| GROUP_CHAT_BOT_SHARE_MODE | 群组机器人共享历史记录 | `false` | 开启后,一个群组只有一个会话和配置。关闭的话群组的每个人都有自己的会话上下文。 | -| CHAT_GROUP_WHITE_LIST | 群组聊天ID白名单 | `null` | 多个ID用`,`分隔,不知道ID,在群组中和机器人聊一句就能返回 | +| KEY | 说明 | 默认值 | 特殊说明 | +|:--------------------------|---------------|---------|---------------------------------------------------------| +| GROUP_CHAT_BOT_ENABLE | 开启群组机器人 | `true` | 开启后,机器人加入群组后,然后群组里的所有人都可以和机器人聊天。 | +| TELEGRAM_BOT_NAME | 机器人名字 xxx_bot | `null` | 顺序必须和`TELEGRAM_AVAILABLE_TOKENS` 一致, **必须设置否则无法在群聊中使用** | +| GROUP_CHAT_BOT_SHARE_MODE | 群组机器人共享历史记录 | `false` | 开启后,一个群组只有一个会话和配置。关闭的话群组的每个人都有自己的会话上下文。 | +| CHAT_GROUP_WHITE_LIST | 群组聊天ID白名单 | `null` | 多个ID用`,`分隔,不知道ID,在群组中和机器人聊一句就能返回 | ### 用户配置 每个用户的自定义配置,只能通过Telegram发送消息来修改,消息格式为`/setenv KEY=VALUE` -| KEY | 说明 | 例子 | -| :---------------------- | ------------------------------------------------------------ | ------------------------------------------------------------ | -| SYSTEM_INIT_MESSAGE | 系统初始化参数,设定后就算开启新会话还能保持,不用每次都调试 | `/setenv SYSTEM_INIT_MESSAGE=现在开始你是喵娘,每句话以喵结尾` | -| OPENAI_API_EXTRA_PARAMS | OpenAI API额外参数,设定后每次调用API都会带上,可以用来调整温度等参数 | `/setenv OPENAI_API_EXTRA_PARAMS={"temperature": 0.5}` 每次修改必须为完整JSON | +| KEY | 说明 | 例子 | +|:------------------------|-----------------------------------------------|--------------------------------------------------------------------------------------------| +| SYSTEM_INIT_MESSAGE | 系统初始化参数,设定后就算开启新会话还能保持,不用每次都调试 | `/setenv SYSTEM_INIT_MESSAGE=现在开始你是喵娘,每句话以喵结尾` | +| OPENAI_API_EXTRA_PARAMS | OpenAI API额外参数,设定后每次调用API都会带上,可以用来调整温度等参数 | `/setenv OPENAI_API_EXTRA_PARAMS={"temperature": 0.5}` 每次修改必须为完整JSON | | OPENAI_API_KEY | OpenAI API Key,设定后每次调用API都会带上, 每个用户可以设定自己的Key | `/setenv OPENAI_API_KEY=sk-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx` | @@ -79,18 +81,18 @@ ### 支持命令 -| 命令 | 说明 | 示例 | -| :--------- | :-------------------------------------- | :-------------------------------- | -| `/help` | 获取命令帮助 | `/help` | -| `/new` | 发起新的对话 | `/new` | -| `/start` | 获取你的ID,并发起新的对话 | `/start` | -| `/img` | 生成一张图片 | `/img 图片描述` | -| `/version` | 获取当前版本号,判断是否需要更新 | `/version` | -| `/setenv` | 设置用户配置, 详情见`用户配置` | `/setenv KEY=VALUE` | -| `/delenv` | 删除用户配置 | `/delenv KEY` | -| `/usage` | 获取当前机器人的用量统计 | `/usage` | -| `/system` | 查看当前一些系统信息 | `/system` | -| `/role` | 设置预设的身份, 配置使用方法同`/setenv` | `/role` | +| 命令 | 说明 | 示例 | +|:-----------|:--------------------------|:--------------------------| +| `/help` | 获取命令帮助 | `/help` | +| `/new` | 发起新的对话 | `/new` | +| `/start` | 获取你的ID,并发起新的对话 | `/start` | +| `/img` | 生成一张图片 | `/img 图片描述` | +| `/version` | 获取当前版本号,判断是否需要更新 | `/version` | +| `/setenv` | 设置用户配置, 详情见`用户配置` | `/setenv KEY=VALUE` | +| `/delenv` | 删除用户配置 | `/delenv KEY` | +| `/usage` | 获取当前机器人的用量统计 | `/usage` | +| `/system` | 查看当前一些系统信息 | `/system` | +| `/role` | 设置预设的身份, 配置使用方法同`/setenv` | `/role` | | `/redo` | 修改上一个提问或者换一个回答 | `/redo 修改过的内容` 或者 `/redo` | -| `/echo` | 回显消息,仅开发模式可用 | `/echo` | +| `/echo` | 回显消息,仅开发模式可用 | `/echo` | diff --git a/src/env.js b/src/env.js index eef94f15..630664f8 100644 --- a/src/env.js +++ b/src/env.js @@ -113,8 +113,7 @@ export const ENV = { AZURE_COMPLETIONS_API: null, // workers ai模型 - WORKERS_AI_MODEL: null, - + WORKERS_AI_MODEL: '@cf/meta/llama-2-7b-chat-int8', }; export const CONST = { @@ -132,7 +131,6 @@ const ENV_VALUE_TYPE = { API_KEY: [], AZURE_API_KEY: 'string', AZURE_COMPLETIONS_API: 'string', - WORKERS_AI_MODEL: 'string', }; /** diff --git a/src/workers-ai.js b/src/workers-ai.js index 8f4c9048..ad2caa93 100644 --- a/src/workers-ai.js +++ b/src/workers-ai.js @@ -23,7 +23,7 @@ export function isWorkersAIEnable(context) { */ export async function requestCompletionsFromWorkersAI(message, history, context, onStream) { const ai = new Ai(AI_LLM); - const model = ENV.WORKERS_AI_MODEL || '@cf/meta/llama-2-7b-chat-int8'; + const model = ENV.WORKERS_AI_MODEL; const request = { messages: [...history || [], {role: 'user', content: message}], }; diff --git a/wrangler-example.toml b/wrangler-example.toml index 4db56d6c..a8e67953 100644 --- a/wrangler-example.toml +++ b/wrangler-example.toml @@ -1,6 +1,6 @@ # 这里的 name 改成你自己的workers 的名字 name = "chatgpt-telegram-workers" -compatibility_date = "2023-03-04" +compatibility_date = "2023-10-07" main = "./dist/index.js" workers_dev = true @@ -11,6 +11,10 @@ kv_namespaces = [ { binding = "DATABASE", id = "", preview_id = "" } ] +# 如果使用openai则注释这一段 +#[ai] +#binding = "AI_LLM" + [vars] # 更多参数使用请查看README.md From 5cde4ea8b8a6d190b8528cd520c278180303ab52 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sat, 7 Oct 2023 17:15:46 +0800 Subject: [PATCH 06/11] doc: Update CONFIG.md --- doc/CONFIG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/CONFIG.md b/doc/CONFIG.md index 6f0c72e4..c0ac596f 100644 --- a/doc/CONFIG.md +++ b/doc/CONFIG.md @@ -76,7 +76,7 @@ ### Workers AI 配置 -在 `项目主页-Settings-Variables-AI Bindings` 添加 `AI_LLM = Workers AI Catalog` +在 `项目主页-Settings-Variables-AI Bindings` 添加 `AI_LLM = Workers AI Catalog` ### 支持命令 From 9d714ee326b8fc028cb753f28f52deb71d65ba0f Mon Sep 17 00:00:00 2001 From: TBXark Date: Sat, 7 Oct 2023 17:21:29 +0800 Subject: [PATCH 07/11] =?UTF-8?q?perf:=20=E4=BF=AE=E5=A4=8DAI=E7=BB=91?= =?UTF-8?q?=E5=AE=9A=E5=90=8D=E5=AD=97=E4=B8=BACF=E9=BB=98=E8=AE=A4?= =?UTF-8?q?=E5=90=8D=E5=AD=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dist/buildinfo.json | 2 +- dist/index.js | 12 ++++++------ dist/timestamp | 2 +- doc/CONFIG.md | 2 +- src/env.js | 4 ++-- src/workers-ai.js | 6 +++--- wrangler-example.toml | 2 +- 7 files changed, 15 insertions(+), 15 deletions(-) diff --git a/dist/buildinfo.json b/dist/buildinfo.json index a54f3807..f385c236 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "92f35a1", "timestamp": 1696670020} +{"sha": "0ab2830", "timestamp": 1696670444} diff --git a/dist/index.js b/dist/index.js index bd435cf6..2517ddec 100644 --- a/dist/index.js +++ b/dist/index.js @@ -41,9 +41,9 @@ var ENV = { // 检查更新的分支 UPDATE_BRANCH: "master", // 当前版本 - BUILD_TIMESTAMP: 1696670020, + BUILD_TIMESTAMP: 1696670444, // 当前版本 commit id - BUILD_VERSION: "92f35a1", + BUILD_VERSION: "0ab2830", I18N: null, LANGUAGE: "zh-cn", // 使用流模式 @@ -72,7 +72,7 @@ var CONST = { }; var DATABASE = null; var API_GUARD = null; -var AI_LLM = null; +var AI = null; var ENV_VALUE_TYPE = { API_KEY: [], AZURE_API_KEY: "string", @@ -81,7 +81,7 @@ var ENV_VALUE_TYPE = { function initEnv(env, i18n2) { DATABASE = env.DATABASE; API_GUARD = env.API_GUARD; - AI_LLM = env.AI_LLM; + AI = env.AI; for (const key in ENV) { if (env[key]) { switch (ENV_VALUE_TYPE[key] ? typeof ENV_VALUE_TYPE[key] : typeof ENV[key]) { @@ -1341,10 +1341,10 @@ var Ai = class { // src/workers-ai.js function isWorkersAIEnable(context) { - return AI_LLM !== null; + return AI !== null; } async function requestCompletionsFromWorkersAI(message, history, context, onStream) { - const ai = new Ai(AI_LLM); + const ai = new Ai(AI); const model = ENV.WORKERS_AI_MODEL; const request = { messages: [...history || [], { role: "user", content: message }] diff --git a/dist/timestamp b/dist/timestamp index b44038ee..5a60800f 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1696670020 +1696670444 diff --git a/doc/CONFIG.md b/doc/CONFIG.md index c0ac596f..69bf3e4f 100644 --- a/doc/CONFIG.md +++ b/doc/CONFIG.md @@ -76,7 +76,7 @@ ### Workers AI 配置 -在 `项目主页-Settings-Variables-AI Bindings` 添加 `AI_LLM = Workers AI Catalog` +在 `项目主页-Settings-Variables-AI Bindings` 添加 `AI = Workers AI Catalog` ### 支持命令 diff --git a/src/env.js b/src/env.js index 630664f8..133bfb2d 100644 --- a/src/env.js +++ b/src/env.js @@ -125,7 +125,7 @@ export const CONST = { export let DATABASE = null; export let API_GUARD = null; -export let AI_LLM = null; +export let AI = null; const ENV_VALUE_TYPE = { API_KEY: [], @@ -146,7 +146,7 @@ const ENV_VALUE_TYPE = { export function initEnv(env, i18n) { DATABASE = env.DATABASE; API_GUARD = env.API_GUARD; - AI_LLM = env.AI_LLM; + AI = env.AI; for (const key in ENV) { if (env[key]) { switch (ENV_VALUE_TYPE[key]?typeof ENV_VALUE_TYPE[key]:(typeof ENV[key])) { diff --git a/src/workers-ai.js b/src/workers-ai.js index ad2caa93..b029b013 100644 --- a/src/workers-ai.js +++ b/src/workers-ai.js @@ -1,4 +1,4 @@ -import {ENV, AI_LLM} from './env.js'; +import {ENV, AI} from './env.js'; import {Ai} from './vendors/cloudflare-ai.js'; @@ -7,7 +7,7 @@ import {Ai} from './vendors/cloudflare-ai.js'; * @return {boolean} */ export function isWorkersAIEnable(context) { - return AI_LLM !== null; + return AI !== null; // return ENV.WORKERS_AI_MODEL !== null; } @@ -22,7 +22,7 @@ export function isWorkersAIEnable(context) { * @return {Promise} */ export async function requestCompletionsFromWorkersAI(message, history, context, onStream) { - const ai = new Ai(AI_LLM); + const ai = new Ai(AI); const model = ENV.WORKERS_AI_MODEL; const request = { messages: [...history || [], {role: 'user', content: message}], diff --git a/wrangler-example.toml b/wrangler-example.toml index a8e67953..f9493879 100644 --- a/wrangler-example.toml +++ b/wrangler-example.toml @@ -13,7 +13,7 @@ kv_namespaces = [ # 如果使用openai则注释这一段 #[ai] -#binding = "AI_LLM" +#binding = "AI" [vars] From 8d13bad68a58cdc613c51fdbc1ab99d852cb0304 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sun, 8 Oct 2023 14:13:12 +0800 Subject: [PATCH 08/11] =?UTF-8?q?fix:=20workers=20ai=20=E5=BC=80=E5=90=AF?= =?UTF-8?q?=E5=88=A4=E6=96=AD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/workers-ai.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/workers-ai.js b/src/workers-ai.js index b029b013..5217a337 100644 --- a/src/workers-ai.js +++ b/src/workers-ai.js @@ -7,8 +7,7 @@ import {Ai} from './vendors/cloudflare-ai.js'; * @return {boolean} */ export function isWorkersAIEnable(context) { - return AI !== null; - // return ENV.WORKERS_AI_MODEL !== null; + return AI && AI.fetch } From 12fa5950b855b8dbd0caf6263e8014424db437c8 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sun, 8 Oct 2023 14:13:47 +0800 Subject: [PATCH 09/11] =?UTF-8?q?perf:=20#216=20=E4=BD=BF=E7=94=A8?= =?UTF-8?q?=E6=9B=B4=E5=8A=A0=E7=B2=BE=E5=87=86=E7=9A=84=E6=B5=81=E8=A7=A3?= =?UTF-8?q?=E6=9E=90=E5=99=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dist/buildinfo.json | 2 +- dist/index.js | 247 +++++++++++++++++++++++++++++++++++------- dist/timestamp | 2 +- src/openai.js | 54 ++------- src/vendors/stream.js | 224 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 441 insertions(+), 88 deletions(-) create mode 100644 src/vendors/stream.js diff --git a/dist/buildinfo.json b/dist/buildinfo.json index f385c236..a2c45d01 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "0ab2830", "timestamp": 1696670444} +{"sha": "9d714ee", "timestamp": 1696745505} diff --git a/dist/index.js b/dist/index.js index 2517ddec..8d2fbdc9 100644 --- a/dist/index.js +++ b/dist/index.js @@ -41,9 +41,9 @@ var ENV = { // 检查更新的分支 UPDATE_BRANCH: "master", // 当前版本 - BUILD_TIMESTAMP: 1696670444, + BUILD_TIMESTAMP: 1696745505, // 当前版本 commit id - BUILD_VERSION: "0ab2830", + BUILD_VERSION: "9d714ee", I18N: null, LANGUAGE: "zh-cn", // 使用流模式 @@ -516,28 +516,208 @@ async function getBot(token) { } } -// src/openai.js -function extractContentFromStreamData(stream) { - const line = stream.split("\n"); - let remainingStr = ""; - let contentStr = ""; - for (const l of line) { +// src/vendors/stream.js +var Stream = class { + constructor(response, controller) { + this.response = response; + this.controller = controller; + this.decoder = new SSEDecoder(); + } + async *iterMessages() { + if (!this.response.body) { + this.controller.abort(); + throw new Error(`Attempted to iterate over a response with no body`); + } + const lineDecoder = new LineDecoder(); + const iter = readableStreamAsyncIterable(this.response.body); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + const sse = this.decoder.decode(line); + if (sse) + yield sse; + } + } + for (const line of lineDecoder.flush()) { + const sse = this.decoder.decode(line); + if (sse) + yield sse; + } + } + async *[Symbol.asyncIterator]() { + let done = false; try { - if (l.startsWith("data:") && l.endsWith("}")) { - const data = JSON.parse(l.substring(5)); - contentStr += data.choices[0].delta?.content || ""; - } else { - remainingStr = l; + for await (const sse of this.iterMessages()) { + if (done) + continue; + if (sse.data.startsWith("[DONE]")) { + done = true; + continue; + } + if (sse.event === null) { + try { + yield JSON.parse(sse.data); + } catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + } } + done = true; } catch (e) { - remainingStr = l; + if (e instanceof Error && e.name === "AbortError") + return; + throw e; + } finally { + if (!done) + this.controller.abort(); + } + } +}; +var SSEDecoder = class { + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + decode(line) { + if (line.endsWith("\r")) { + line = line.substring(0, line.length - 1); + } + if (!line) { + if (!this.event && !this.data.length) + return null; + const sse = { + event: this.event, + data: this.data.join("\n"), + raw: this.chunks + }; + this.event = null; + this.data = []; + this.chunks = []; + return sse; + } + this.chunks.push(line); + if (line.startsWith(":")) { + return null; + } + let [fieldname, _, value] = partition(line, ":"); + if (value.startsWith(" ")) { + value = value.substring(1); + } + if (fieldname === "event") { + this.event = value; + } else if (fieldname === "data") { + this.data.push(value); + } + return null; + } +}; +var LineDecoder = class { + constructor() { + this.buffer = []; + this.trailingCR = false; + } + decode(chunk) { + let text = this.decodeText(chunk); + if (this.trailingCR) { + text = "\r" + text; + this.trailingCR = false; + } + if (text.endsWith("\r")) { + this.trailingCR = true; + text = text.slice(0, -1); + } + if (!text) { + return []; + } + const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ""); + let lines = text.split(LineDecoder.NEWLINE_REGEXP); + if (lines.length === 1 && !trailingNewline) { + this.buffer.push(lines[0]); + return []; + } + if (this.buffer.length > 0) { + lines = [this.buffer.join("") + lines[0], ...lines.slice(1)]; + this.buffer = []; + } + if (!trailingNewline) { + this.buffer = [lines.pop() || ""]; + } + return lines; + } + decodeText(bytes) { + var _a; + if (bytes == null) + return ""; + if (typeof bytes === "string") + return bytes; + if (typeof Buffer !== "undefined") { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + throw new Error(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); + } + if (typeof TextDecoder !== "undefined") { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + (_a = this.textDecoder) !== null && _a !== void 0 ? _a : this.textDecoder = new TextDecoder("utf8"); + return this.textDecoder.decode(bytes); + } + throw new Error(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); + } + throw new Error(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); + } + flush() { + if (!this.buffer.length && !this.trailingCR) { + return []; } + const lines = [this.buffer.join("")]; + this.buffer = []; + this.trailingCR = false; + return lines; } +}; +LineDecoder.NEWLINE_CHARS = /* @__PURE__ */ new Set(["\n", "\r", "\v", "\f", "", "", "", "\x85", "\u2028", "\u2029"]); +LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029]/g; +function partition(str, delimiter) { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + return [str, "", ""]; +} +function readableStreamAsyncIterable(stream) { + if (stream[Symbol.asyncIterator]) + return stream; + const reader = stream.getReader(); return { - content: contentStr, - pending: remainingStr + async next() { + try { + const result = await reader.read(); + if (result === null || result === void 0 ? void 0 : result.done) + reader.releaseLock(); + return result; + } catch (e) { + reader.releaseLock(); + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: void 0 }; + }, + [Symbol.asyncIterator]() { + return this; + } }; } + +// src/openai.js function isOpenAIEnable(context) { const key = context.openAIKeyFromContext(); return key && key.length > 0; @@ -572,34 +752,19 @@ async function requestCompletionsFromOpenAI(message, history, context, onStream) signal }); if (onStream && resp.ok && resp.headers.get("content-type").indexOf("text/event-stream") !== -1) { - const reader = resp.body.getReader({ mode: "byob" }); - const decoder = new TextDecoder("utf-8"); - let data = { done: false }; - let pendingText = ""; + const stream = new Stream(resp, controller); let contentFull = ""; let lengthDelta = 0; let updateStep = 20; - while (data.done === false) { - try { - data = await reader.readAtLeast(4096, new Uint8Array(5e3)); - pendingText += decoder.decode(data.value); - const content = extractContentFromStreamData(pendingText); - pendingText = content.pending; - lengthDelta += content.content.length; - contentFull = contentFull + content.content; - if (lengthDelta > updateStep) { - lengthDelta = 0; - updateStep += 5; - await onStream(`${contentFull} + for await (const data of stream) { + const c = data.choices[0].delta?.content || ""; + lengthDelta += c.length; + contentFull = contentFull + c; + if (lengthDelta > updateStep) { + lengthDelta = 0; + updateStep += 5; + await onStream(`${contentFull} ${ENV.I18N.message.loading}...`); - } - } catch (e) { - contentFull += ` - -[ERROR]: ${e.message} - -`; - break; } } return contentFull; @@ -1341,7 +1506,7 @@ var Ai = class { // src/workers-ai.js function isWorkersAIEnable(context) { - return AI !== null; + return AI && AI.fetch; } async function requestCompletionsFromWorkersAI(message, history, context, onStream) { const ai = new Ai(AI); diff --git a/dist/timestamp b/dist/timestamp index 5a60800f..f1c01cab 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1696670444 +1696745505 diff --git a/src/openai.js b/src/openai.js index 6ebf5d52..9ad67fae 100644 --- a/src/openai.js +++ b/src/openai.js @@ -1,33 +1,8 @@ /* eslint-disable no-unused-vars */ import {Context} from './context.js'; import {DATABASE, ENV} from './env.js'; +import {Stream} from "./vendors/stream.js"; -/** - * 从流数据中提取内容 - * @param {string} stream - * @return {{pending: string, content: string}} - */ -function extractContentFromStreamData(stream) { - const line = stream.split('\n'); - let remainingStr = ''; - let contentStr = ''; - for (const l of line) { - try { - if (l.startsWith('data:') && l.endsWith('}')) { - const data = JSON.parse(l.substring(5)); - contentStr += data.choices[0].delta?.content || ''; - } else { - remainingStr = l; - } - } catch (e) { - remainingStr = l; - } - } - return { - content: contentStr, - pending: remainingStr, - }; -} /** * @return {boolean} @@ -80,30 +55,19 @@ export async function requestCompletionsFromOpenAI(message, history, context, on signal, }); if (onStream && resp.ok && resp.headers.get('content-type').indexOf('text/event-stream') !== -1) { - const reader = resp.body.getReader({mode: 'byob'}); - const decoder = new TextDecoder('utf-8'); - let data = {done: false}; - let pendingText = ''; + const stream = new Stream(resp, controller) let contentFull = ''; let lengthDelta = 0; let updateStep = 20; - while (data.done === false) { - try { - data = await reader.readAtLeast(4096, new Uint8Array(5000)); - pendingText += decoder.decode(data.value); - const content = extractContentFromStreamData(pendingText); - pendingText = content.pending; - lengthDelta += content.content.length; - contentFull = contentFull + content.content; + for await (const data of stream) { + const c = data.choices[0].delta?.content || '' + lengthDelta += c.length; + contentFull = contentFull + c; if (lengthDelta > updateStep) { - lengthDelta = 0; - updateStep += 5; - await onStream(`${contentFull}\n${ENV.I18N.message.loading}...`); + lengthDelta = 0; + updateStep += 5; + await onStream(`${contentFull}\n${ENV.I18N.message.loading}...`); } - } catch (e) { - contentFull += `\n\n[ERROR]: ${e.message}\n\n`; - break; - } } return contentFull; } diff --git a/src/vendors/stream.js b/src/vendors/stream.js new file mode 100644 index 00000000..b77d9095 --- /dev/null +++ b/src/vendors/stream.js @@ -0,0 +1,224 @@ +/* eslint-disable */ + +export class Stream { + constructor(response, controller) { + this.response = response; + this.controller = controller; + this.decoder = new SSEDecoder(); + } + async *iterMessages() { + if (!this.response.body) { + this.controller.abort(); + throw new Error(`Attempted to iterate over a response with no body`); + } + const lineDecoder = new LineDecoder(); + const iter = readableStreamAsyncIterable(this.response.body); + for await (const chunk of iter) { + for (const line of lineDecoder.decode(chunk)) { + const sse = this.decoder.decode(line); + if (sse) + yield sse; + } + } + for (const line of lineDecoder.flush()) { + const sse = this.decoder.decode(line); + if (sse) + yield sse; + } + } + async *[Symbol.asyncIterator]() { + let done = false; + try { + for await (const sse of this.iterMessages()) { + if (done) + continue; + if (sse.data.startsWith('[DONE]')) { + done = true; + continue; + } + if (sse.event === null) { + try { + yield JSON.parse(sse.data); + } + catch (e) { + console.error(`Could not parse message into JSON:`, sse.data); + console.error(`From chunk:`, sse.raw); + throw e; + } + } + } + done = true; + } + catch (e) { + // If the user calls `stream.controller.abort()`, we should exit without throwing. + if (e instanceof Error && e.name === 'AbortError') + return; + throw e; + } + finally { + // If the user `break`s, abort the ongoing request. + if (!done) + this.controller.abort(); + } + } +} +class SSEDecoder { + constructor() { + this.event = null; + this.data = []; + this.chunks = []; + } + decode(line) { + if (line.endsWith('\r')) { + line = line.substring(0, line.length - 1); + } + if (!line) { + // empty line and we didn't previously encounter any messages + if (!this.event && !this.data.length) + return null; + const sse = { + event: this.event, + data: this.data.join('\n'), + raw: this.chunks, + }; + this.event = null; + this.data = []; + this.chunks = []; + return sse; + } + this.chunks.push(line); + if (line.startsWith(':')) { + return null; + } + let [fieldname, _, value] = partition(line, ':'); + if (value.startsWith(' ')) { + value = value.substring(1); + } + if (fieldname === 'event') { + this.event = value; + } + else if (fieldname === 'data') { + this.data.push(value); + } + return null; + } +} +/** + * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally + * reading lines from text. + * + * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258 + */ +class LineDecoder { + constructor() { + this.buffer = []; + this.trailingCR = false; + } + decode(chunk) { + let text = this.decodeText(chunk); + if (this.trailingCR) { + text = '\r' + text; + this.trailingCR = false; + } + if (text.endsWith('\r')) { + this.trailingCR = true; + text = text.slice(0, -1); + } + if (!text) { + return []; + } + const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || ''); + let lines = text.split(LineDecoder.NEWLINE_REGEXP); + if (lines.length === 1 && !trailingNewline) { + this.buffer.push(lines[0]); + return []; + } + if (this.buffer.length > 0) { + lines = [this.buffer.join('') + lines[0], ...lines.slice(1)]; + this.buffer = []; + } + if (!trailingNewline) { + this.buffer = [lines.pop() || '']; + } + return lines; + } + decodeText(bytes) { + var _a; + if (bytes == null) + return ''; + if (typeof bytes === 'string') + return bytes; + // Node: + if (typeof Buffer !== 'undefined') { + if (bytes instanceof Buffer) { + return bytes.toString(); + } + if (bytes instanceof Uint8Array) { + return Buffer.from(bytes).toString(); + } + throw new Error(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global "Buffer" defined, which this library assumes to be Node. Please report this error.`); + } + // Browser + if (typeof TextDecoder !== 'undefined') { + if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) { + (_a = this.textDecoder) !== null && _a !== void 0 ? _a : (this.textDecoder = new TextDecoder('utf8')); + return this.textDecoder.decode(bytes); + } + throw new Error(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`); + } + throw new Error(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`); + } + flush() { + if (!this.buffer.length && !this.trailingCR) { + return []; + } + const lines = [this.buffer.join('')]; + this.buffer = []; + this.trailingCR = false; + return lines; + } +} +// prettier-ignore +LineDecoder.NEWLINE_CHARS = new Set(['\n', '\r', '\x0b', '\x0c', '\x1c', '\x1d', '\x1e', '\x85', '\u2028', '\u2029']); +LineDecoder.NEWLINE_REGEXP = /\r\n|[\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029]/g; +function partition(str, delimiter) { + const index = str.indexOf(delimiter); + if (index !== -1) { + return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)]; + } + return [str, '', '']; +} +/** + * Most browsers don't yet have async iterable support for ReadableStream, + * and Node has a very different way of reading bytes from its "ReadableStream". + * + * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490 + */ +function readableStreamAsyncIterable(stream) { + if (stream[Symbol.asyncIterator]) + return stream; + const reader = stream.getReader(); + return { + async next() { + try { + const result = await reader.read(); + if (result === null || result === void 0 ? void 0 : result.done) + reader.releaseLock(); // release lock when stream becomes closed + return result; + } + catch (e) { + reader.releaseLock(); // release lock when stream becomes errored + throw e; + } + }, + async return() { + const cancelPromise = reader.cancel(); + reader.releaseLock(); + await cancelPromise; + return { done: true, value: undefined }; + }, + [Symbol.asyncIterator]() { + return this; + }, + }; +} From 25774cb1212520be99f3eec41b5afe168126cac6 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sun, 8 Oct 2023 14:18:57 +0800 Subject: [PATCH 10/11] =?UTF-8?q?perf:=20=E6=B7=BB=E5=8A=A0=E6=B5=81?= =?UTF-8?q?=E8=A7=A3=E6=9E=90=E5=A4=B1=E8=B4=A5=E9=94=99=E8=AF=AF=E4=BF=A1?= =?UTF-8?q?=E6=81=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- dist/buildinfo.json | 2 +- dist/index.js | 25 +++++++++++++++---------- dist/timestamp | 2 +- src/openai.js | 22 +++++++++++++--------- src/vendors/stream.js | 1 + src/workers-ai.js | 2 +- 6 files changed, 32 insertions(+), 22 deletions(-) diff --git a/dist/buildinfo.json b/dist/buildinfo.json index a2c45d01..a4459d61 100644 --- a/dist/buildinfo.json +++ b/dist/buildinfo.json @@ -1 +1 @@ -{"sha": "9d714ee", "timestamp": 1696745505} +{"sha": "12fa595", "timestamp": 1696745907} diff --git a/dist/index.js b/dist/index.js index 8d2fbdc9..e9bea65e 100644 --- a/dist/index.js +++ b/dist/index.js @@ -41,9 +41,9 @@ var ENV = { // 检查更新的分支 UPDATE_BRANCH: "master", // 当前版本 - BUILD_TIMESTAMP: 1696745505, + BUILD_TIMESTAMP: 1696745907, // 当前版本 commit id - BUILD_VERSION: "9d714ee", + BUILD_VERSION: "12fa595", I18N: null, LANGUAGE: "zh-cn", // 使用流模式 @@ -756,16 +756,21 @@ async function requestCompletionsFromOpenAI(message, history, context, onStream) let contentFull = ""; let lengthDelta = 0; let updateStep = 20; - for await (const data of stream) { - const c = data.choices[0].delta?.content || ""; - lengthDelta += c.length; - contentFull = contentFull + c; - if (lengthDelta > updateStep) { - lengthDelta = 0; - updateStep += 5; - await onStream(`${contentFull} + try { + for await (const data of stream) { + const c = data.choices[0].delta?.content || ""; + lengthDelta += c.length; + contentFull = contentFull + c; + if (lengthDelta > updateStep) { + lengthDelta = 0; + updateStep += 5; + await onStream(`${contentFull} ${ENV.I18N.message.loading}...`); + } } + } catch (e) { + contentFull += ` +ERROR: ${e.message}`; } return contentFull; } diff --git a/dist/timestamp b/dist/timestamp index f1c01cab..119faa50 100644 --- a/dist/timestamp +++ b/dist/timestamp @@ -1 +1 @@ -1696745505 +1696745907 diff --git a/src/openai.js b/src/openai.js index 9ad67fae..886ac18a 100644 --- a/src/openai.js +++ b/src/openai.js @@ -1,7 +1,7 @@ /* eslint-disable no-unused-vars */ import {Context} from './context.js'; import {DATABASE, ENV} from './env.js'; -import {Stream} from "./vendors/stream.js"; +import {Stream} from './vendors/stream.js'; /** @@ -55,19 +55,23 @@ export async function requestCompletionsFromOpenAI(message, history, context, on signal, }); if (onStream && resp.ok && resp.headers.get('content-type').indexOf('text/event-stream') !== -1) { - const stream = new Stream(resp, controller) + const stream = new Stream(resp, controller); let contentFull = ''; let lengthDelta = 0; let updateStep = 20; - for await (const data of stream) { - const c = data.choices[0].delta?.content || '' - lengthDelta += c.length; - contentFull = contentFull + c; + try { + for await (const data of stream) { + const c = data.choices[0].delta?.content || ''; + lengthDelta += c.length; + contentFull = contentFull + c; if (lengthDelta > updateStep) { - lengthDelta = 0; - updateStep += 5; - await onStream(`${contentFull}\n${ENV.I18N.message.loading}...`); + lengthDelta = 0; + updateStep += 5; + await onStream(`${contentFull}\n${ENV.I18N.message.loading}...`); } + } + } catch (e) { + contentFull += `\nERROR: ${e.message}`; } return contentFull; } diff --git a/src/vendors/stream.js b/src/vendors/stream.js index b77d9095..942c1784 100644 --- a/src/vendors/stream.js +++ b/src/vendors/stream.js @@ -1,4 +1,5 @@ /* eslint-disable */ +// https://github.com/openai/openai-node/blob/master/src/streaming.ts export class Stream { constructor(response, controller) { diff --git a/src/workers-ai.js b/src/workers-ai.js index 5217a337..ae4e01bf 100644 --- a/src/workers-ai.js +++ b/src/workers-ai.js @@ -7,7 +7,7 @@ import {Ai} from './vendors/cloudflare-ai.js'; * @return {boolean} */ export function isWorkersAIEnable(context) { - return AI && AI.fetch + return AI && AI.fetch; } From 01eb8d956011cef961a615028180576866928f03 Mon Sep 17 00:00:00 2001 From: TBXark Date: Sun, 8 Oct 2023 14:24:09 +0800 Subject: [PATCH 11/11] doc: Update README.md --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index a66320bc..140c1278 100644 --- a/README.md +++ b/README.md @@ -39,7 +39,8 @@ ## 更新日志 - v1.6.0 - - 添加`workers ai`支持,具体配置查看[配置文档](./doc/CONFIG.md) + - 添加[`workers ai`](https://developers.cloudflare.com/workers-ai/)支持,具体配置查看[配置文档](./doc/CONFIG.md) + - 优化openai流模式解析器 其他更新日志见[CHANGELOG.md](./doc/CHANGELOG.md)