Skip to content

Commit

Permalink
Implement RirikoLLaMA (self hosted OpenLLaMA for Ririko AI bot), repl…
Browse files Browse the repository at this point in the history
…ace deprecated OpenAI model (davinci 003) with 002, code cleanup (#236)

* Add a new provider: RirikoLLaMA - allowing you to host your own GPT model at home

* Remove openai text-davinci-003 (deprecated model) and replace with text-davinci-002

* Cleanup code in ririko stream checker

* 0.11.0
  • Loading branch information
earnestangel authored Sep 15, 2023
1 parent f8f71b1 commit f7a5953
Show file tree
Hide file tree
Showing 8 changed files with 228 additions and 11 deletions.
11 changes: 8 additions & 3 deletions config.example.js
Original file line number Diff line number Diff line change
Expand Up @@ -66,13 +66,18 @@ module.exports = {
// Prefix of the AI part of the bot
Prefix: ".",

// The provider to use for the bot. Must be one of: NLPCloudProvider or OpenAIProvider
// The provider to use for the bot. Must be one of: NLPCloudProvider | OpenAIProvider | RirikoLLaMAProvider
Provider: "OpenAIProvider",

// Provider Token
Token: "",

GPTModel: "davinci", // Must be one of: davinci or gpt35
// Must be one of: davinci or gpt35
GPTModel: "gpt35",

// URL of the local server for Ririko AI. Leave this empty if you don't have a local server.
// Example: http://localhost:5000/api/v1/ask
LocalServerURL: "",

// Enable or disable the Whitelist.
EnableWhitelist: true,
Expand Down Expand Up @@ -277,5 +282,5 @@ module.exports = {
LogDir: "logs",
},

VERSION: "6", // DO NOT TOUCH
VERSION: "7", // DO NOT TOUCH
};
146 changes: 144 additions & 2 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "ririko",
"version": "0.10.0",
"version": "0.11.0",
"description": "Ririko - A powerful AI-powered general Discord bot that you can call your companion",
"author": "Earnest Angel",
"email": "[email protected]",
Expand Down Expand Up @@ -91,6 +91,7 @@
"quick.db": "^9.1.6",
"redis": "^4.6.7",
"replicate": "^0.17.0",
"ririkollama-bot-client": "^1.0.0",
"save-dev": "^0.0.1-security",
"semver": "^7.5.3",
"source-map-support": "^0.5.21",
Expand All @@ -105,6 +106,7 @@
"@babel/preset-env": "^7.22.5",
"babel-plugin-module-resolver": "^5.0.0",
"babel-plugin-source-map-support": "^2.2.0",
"babel-preset-env": "^1.7.0",
"babel-preset-es2015": "^6.24.1",
"babel-preset-minify": "^0.5.2",
"nodemon": "^3.0.1"
Expand Down
5 changes: 3 additions & 2 deletions src/app/Providers/AI/OpenAIProvider.js
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,10 @@ class OpenAIProvider extends AIProviderBase {

return response.data.choices[0].message.content; // Uncomment for GPT-3.5-turbo
} else if (model === "davinci") {
// Send request to OpenAI for text-davinci-003
// Send request to OpenAI for text-davinci-002
// NOTE: text-davinci-003 is now removed from OpenAI API
const response = await this.openAiClient.createCompletion({
model: "text-davinci-003",
model: "text-davinci-002",
prompt,
temperature: 1,
max_tokens: 2000,
Expand Down
57 changes: 57 additions & 0 deletions src/app/Providers/AI/RirikoLLaMAProvider.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
const {RirikoLLaMAClient} = require("ririkollama-bot-client");
const {AIProviderBase} = require("app/Providers/AIProviderBase");
const config = require("config");
const getconfig = require("helpers/getconfig");

class RirikoLLaMAProvider extends AIProviderBase {
constructor() {
super();
this.ririkoLlamaClient = new RirikoLLaMAClient({
apiUrl: getconfig.localAIServerURL(),
token: this.token = getconfig.AIToken(),
settings: {
"max_new_tokens": 30,
"temperature": 1.0,
"repetition_penalty": 1,
"top_p": 0.2,
"start": "",
"break": "\nHuman:"
}
});
}

getClient() {
return this.ririkoLlamaClient;
}

/**
* Send chat to NLP Cloud
* @param {String} messageText
* @param {String} context
* @param {Array} history
* @returns {Promise<*>}
*/
async sendChat(messageText, context, history) {
try {
let prompt = context + history.join("\n");
prompt = prompt.replace(/\n$/, '');

prompt += "\nHuman: " + messageText + "\nFriend:";

// Send request to NLP Cloud.
const response = await this.ririkoLlamaClient.ask(
prompt,
);

if (typeof response.data["answer"] !== "undefined") {
return response.data["answer"];
} else {
return "(no response)";
}
} catch (e) {
throw e;
}
}
}

module.exports = {RirikoLLaMAProvider};
5 changes: 4 additions & 1 deletion src/app/RirikoAI-NLP.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,9 @@
*/
const colors = require("colors");

const { OpenAIProvider } = require("./Providers/AI/OpenAIProvider");
const { OpenAIProvider } = require("app/Providers/AI/OpenAIProvider");
const { NLPCloudProvider } = require("app/Providers/AI/NLPCloudProvider");
const { RirikoLLaMAProvider } = require("app/Providers/AI/RirikoLLaMAProvider");

const getconfig = require("helpers/getconfig");
const { AIProvider, AIPersonality, AIPrompts } = require("helpers/getconfig");
Expand Down Expand Up @@ -56,6 +57,8 @@ class RirikoAINLP {
} else if (AIProvider() === "OpenAIProvider") {
// If the provider is OpenAIProvider, initialize the OpenAIProvider
this.provider = new OpenAIProvider();
} else if (AIProvider() === 'RirikoLLaMAProvider') {
this.provider = new RirikoLLaMAProvider();
}

// AI provider has been initialized
Expand Down
9 changes: 9 additions & 0 deletions src/helpers/getconfig.js
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,14 @@ const replicateToken = () => {
return process.env.REPLICATE_TOKEN || config.StableDiffusion.ReplicateToken;
};

/**
* @version 7
* @returns {*|string} AI token
*/
const localAIServerURL = () => {
return process.env.AI_LOCAL_SERVER_URL || config.AI.LocalServerURL;
};

module.exports = {
port,
language,
Expand All @@ -248,5 +256,6 @@ module.exports = {
geniusEnabled,
lyristUrl,
lyristEnabled,
localAIServerURL,
replicateToken,
};
2 changes: 0 additions & 2 deletions src/ririkoStreamChecker.js
Original file line number Diff line number Diff line change
Expand Up @@ -188,8 +188,6 @@ async function fetchStreamersInfo(streams) {
if (streams.onlineStreamers.length === 0) return;
if (!accessToken) await twitchLogin();

console.log('accessToken', accessToken)

let onlineStreamers = [],
streamerDetails = [],
users;
Expand Down

0 comments on commit f7a5953

Please sign in to comment.