Skip to content

Commit

Permalink
Merge pull request #27 from RainEggplant/official_api
Browse files Browse the repository at this point in the history
Support official ChatGPT API; Support proxy with custom fetch.
  • Loading branch information
RainEggplant authored Mar 2, 2023
2 parents 4b4ecd5 + 758db92 commit 2c47713
Show file tree
Hide file tree
Showing 7 changed files with 230 additions and 163 deletions.
14 changes: 5 additions & 9 deletions config/default.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
"isGoogleLogin": false,
"isProAccount": false,
"executablePath": "",
"proxy": "",
"nopechaKey": "",
"captchaToken": "",
"userDataDir": ""
Expand All @@ -24,20 +23,17 @@
// Please refer to "https://github.com/transitive-bullshit/chatgpt-api/blob/main/docs/classes/ChatGPTAPI.md#parameters"
"apiKey": "API_KEY",
"apiBaseUrl": "",
"apiReverseProxyUrl": "",
"completionParams": {},
// You can override the default (make the model behave like ChatGPT) prompt prefix and suffix
// (see https://github.com/RainEggplant/chatgpt-telegram-bot/issues/11)
"promptPrefix": "",
"promptSuffix": "",
"userLabel": "",
"assistantLabel": ""
"systemMessage": "",
"maxModelTokens": 0, // set to 0 to use default
"maxResponseTokens": 0 // set to 0 to use default
},
"unofficial": {
// Please refer to "https://github.com/transitive-bullshit/chatgpt-api#usage---chatgptunofficialproxyapi"
"accessToken": "ACCESS_TOKEN",
"apiReverseProxyUrl": "",
"model": ""
}
}
},
"proxy": "" // You can also specify the proxy using the environment variable "HTTP_PROXY"
}
10 changes: 6 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@
"devDependencies": {
"@types/config": "^3.3.0",
"@types/lodash": "^4.14.191",
"@types/node": "^18.14.2",
"@types/node": "^18.14.4",
"@types/node-telegram-bot-api": "^0.57.7",
"@types/promise-queue": "^2.2.0",
"@typescript-eslint/eslint-plugin": "^5.53.0",
"@typescript-eslint/parser": "^5.53.0",
"@typescript-eslint/eslint-plugin": "^5.54.0",
"@typescript-eslint/parser": "^5.54.0",
"eslint": "^8.35.0",
"eslint-config-prettier": "^8.6.0",
"husky": "^8.0.3",
Expand All @@ -46,12 +46,14 @@
"typescript": "^4.9.5"
},
"dependencies": {
"chatgpt": "^4.7.2",
"chatgpt": "^5.0.4",
"chatgpt-v3": "npm:[email protected]",
"config": "^3.3.9",
"dotenv": "^16.0.3",
"extensionless": "^1.1.0",
"https-proxy-agent": "^5.0.1",
"lodash": "^4.17.21",
"node-fetch": "^3.3.0",
"node-telegram-bot-api": "^0.60.0",
"promise-queue": "^2.2.5",
"puppeteer": "^19.7.2",
Expand Down
304 changes: 176 additions & 128 deletions pnpm-lock.yaml

Large diffs are not rendered by default.

2 changes: 0 additions & 2 deletions src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,6 @@ class ChatGPT {
res = await this._apiOfficial.sendMessage(text, {
...this._context,
onProgress,
promptPrefix: this._opts.official?.promptPrefix,
promptSuffix: this._opts.official?.promptSuffix,
});
} else {
res = await this._api.sendMessage(text, {
Expand Down
6 changes: 5 additions & 1 deletion src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,11 @@ async function main() {
await api.init();

// Initialize Telegram Bot and message handler.
const bot = new TelegramBot(opts.bot.token, {polling: true});
const bot = new TelegramBot(opts.bot.token, {
polling: true,
// eslint-disable-next-line @typescript-eslint/no-explicit-any
request: {proxy: opts.proxy} as any,
});
const messageHandler = new MessageHandler(bot, api, opts.bot, opts.debug);
await messageHandler.init();

Expand Down
17 changes: 10 additions & 7 deletions src/types.d.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import type {openai} from 'chatgpt';
import type {openai, FetchFn} from 'chatgpt';

export interface BotOptions {
token: string;
Expand All @@ -16,18 +16,20 @@ export interface APIBrowserOptions {
proxyServer?: string;
nopechaKey?: string;
captchaToken?: string;
userDataDir?: string;
debug?: boolean;
}

export interface APIOfficialOptions {
apiKey: string;
apiBaseUrl?: string;
apiReverseProxyUrl?: string;
completionParams?: Partial<openai.CompletionParams>;
promptPrefix?: string;
promptSuffix?: string;
userLabel?: string;
assistantLabel?: string;
completionParams?: Partial<
Omit<openai.CreateChatCompletionRequest, 'messages' | 'n'>
>;
systemMessage?: string;
maxModelTokens?: number;
maxResponseTokens?: number;
fetch?: FetchFn;
debug?: boolean;
}

Expand All @@ -49,4 +51,5 @@ export interface Config {
debug: number;
bot: BotOptions;
api: APIOptions;
proxy?: string;
}
40 changes: 28 additions & 12 deletions src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import type {openai} from 'chatgpt';
import type {FetchFn, openai} from 'chatgpt';
import config from 'config';
import pkg from 'https-proxy-agent';
import fetch, {type RequestInfo, type RequestInit} from 'node-fetch';
import {Config} from './types';
const {HttpsProxyAgent} = pkg;

function loadConfig(): Config {
function tryGet<T>(key: string): T | undefined {
Expand All @@ -11,6 +14,18 @@ function loadConfig(): Config {
}
}

let fetchFn: FetchFn | undefined = undefined;
const proxy = tryGet<string>('proxy') || process.env.http_proxy;
if (proxy) {
console.log('Use proxy: ' + proxy);
const proxyAgent = new HttpsProxyAgent(proxy);
fetchFn = ((url, opts) =>
fetch(
url as RequestInfo,
{...opts, agent: proxyAgent} as RequestInit
)) as FetchFn;
}

const apiType = config.get<'browser' | 'official' | 'unofficial'>('api.type');
let apiBrowserCfg;
let apiOfficialCfg;
Expand All @@ -22,7 +37,7 @@ function loadConfig(): Config {
isGoogleLogin: tryGet<boolean>('api.browser.isGoogleLogin') || false,
isProAccount: tryGet<boolean>('api.browser.isProAccount') || false,
executablePath: tryGet<string>('api.browser.executablePath') || undefined,
proxyServer: tryGet<string>('api.browser.proxy') || undefined,
proxyServer: tryGet<string>('proxy') || undefined,
nopechaKey: tryGet<string>('api.browser.nopechaKey') || undefined,
captchaToken: tryGet<string>('api.browser.captchaToken') || undefined,
userDataDir: tryGet<string>('api.browser.userDataDir') || undefined,
Expand All @@ -32,17 +47,16 @@ function loadConfig(): Config {
apiOfficialCfg = {
apiKey: config.get<string>('api.official.apiKey'),
apiBaseUrl: tryGet<string>('api.official.apiBaseUrl') || undefined,
apiReverseProxyUrl:
tryGet<string>('api.official.apiReverseProxyUrl') || undefined,
completionParams:
tryGet<Partial<openai.CompletionParams>>(
'api.official.completionParams'
) || undefined,
promptPrefix: tryGet<string>('api.official.promptPrefix') || undefined,
promptSuffix: tryGet<string>('api.official.promptSuffix') || undefined,
userLabel: tryGet<string>('api.official.userLabel') || undefined,
assistantLabel:
tryGet<string>('api.official.assistantLabel') || undefined,
tryGet<
Partial<Omit<openai.CreateChatCompletionRequest, 'messages' | 'n'>>
>('api.official.completionParams') || undefined,
systemMessage: tryGet<string>('api.official.systemMessage') || undefined,
maxModelTokens:
tryGet<number>('api.official.maxModelTokens') || undefined,
maxResponseTokens:
tryGet<number>('api.official.maxResponseTokens') || undefined,
fetch: fetchFn,
debug: config.get<number>('debug') >= 2,
};
} else if (apiType == 'unofficial') {
Expand All @@ -51,6 +65,7 @@ function loadConfig(): Config {
apiReverseProxyUrl:
tryGet<string>('api.unofficial.apiReverseProxyUrl') || undefined,
model: tryGet<string>('api.unofficial.model') || undefined,
fetch: fetchFn,
debug: config.get<number>('debug') >= 2,
};
} else {
Expand All @@ -71,6 +86,7 @@ function loadConfig(): Config {
official: apiOfficialCfg,
unofficial: apiUnofficialCfg,
},
proxy: proxy,
};

return cfg;
Expand Down

0 comments on commit 2c47713

Please sign in to comment.