+
diff --git a/src/components/ChatView.jsx b/src/components/ChatView.jsx
index a8e94eb..b29a38d 100644
--- a/src/components/ChatView.jsx
+++ b/src/components/ChatView.jsx
@@ -8,6 +8,7 @@ import { davinci } from '../utils/davinci';
import { dalle } from '../utils/dalle';
import Modal from './Modal';
import Setting from './Setting';
+import { AiPayClient } from 'ai-pay';
const options = ['ChatGPT', 'DALLĀ·E'];
const gptModel = ['gpt-3.5-turbo', 'gpt-4'];
@@ -42,6 +43,7 @@ const ChatView = () => {
const [gpt, setGpt] = useState(gptModel[0]);
const [messages, addMessage] = useContext(ChatContext);
const [modalOpen, setModalOpen] = useState(false);
+ const [streamedResponse, setStreamedResponse] = useState(undefined);
/**
* Scrolls the chat area to the bottom.
@@ -78,7 +80,8 @@ const ChatView = () => {
e.preventDefault();
const key = window.localStorage.getItem('api-key');
- if (!key) {
+ const sessionId = AiPayClient.getInstance().getClientSessionId();
+ if (!key && !sessionId) {
setModalOpen(true);
return;
}
@@ -97,13 +100,15 @@ const ChatView = () => {
console.log(selected);
try {
if (aiModel === options[0]) {
- const LLMresponse = await davinci(cleanPrompt, key, gptVersion);
- //const data = response.data.choices[0].message.content;
+ const LLMresponse = await davinci(cleanPrompt, key, gptVersion, (streamedResponse) => {
+ setStreamedResponse(streamedResponse);
+ });
+ setStreamedResponse(undefined);
+
LLMresponse && updateMessage(LLMresponse, true, aiModel);
} else {
- const response = await dalle(cleanPrompt, key);
- const data = response.data.data[0].url;
- data && updateMessage(data, true, aiModel);
+ const responseUrl = await dalle(cleanPrompt, key);
+ responseUrl && updateMessage(responseUrl, true, aiModel);
}
} catch (err) {
window.alert(`Error: ${err} please try again later`);
@@ -171,7 +176,14 @@ const ChatView = () => {
)}
- {thinking && }
+ {thinking && !streamedResponse && }
+
+ {streamedResponse && }
@@ -198,7 +210,7 @@ const ChatView = () => {
-
+
diff --git a/src/components/Message.jsx b/src/components/Message.jsx
index b5c2e17..ca0b935 100644
--- a/src/components/Message.jsx
+++ b/src/components/Message.jsx
@@ -28,7 +28,7 @@ const Message = (props) => {
- {moment(createdAt).calendar()}
+ {createdAt && moment(createdAt).calendar()}
diff --git a/src/components/Setting.jsx b/src/components/Setting.jsx
index c18e340..44ed528 100644
--- a/src/components/Setting.jsx
+++ b/src/components/Setting.jsx
@@ -1,5 +1,6 @@
import { useEffect, useState } from 'react';
import { checkApiKey } from '../utils/checkKeys';
+import { useSessionData } from 'ai-pay-react-hooks';
import PropTypes from 'prop-types';
@@ -9,6 +10,17 @@ const Setting = ({ modalOpen, setModalOpen }) => {
const [errorMsg, setErrorMsg] = useState('');
const [input, setInput] = useState('');
+ const {
+ browserExtensionInstalled,
+ sessionState,
+ } = useSessionData();
+
+ useEffect(() => {
+ if (sessionState === "ACTIVE") {
+ setModalOpen(false);
+ }
+ }, [sessionState, setModalOpen])
+
const saveKey = async (e) => {
e.preventDefault();
setLoading(true);
@@ -44,7 +56,7 @@ const Setting = ({ modalOpen, setModalOpen }) => {
);
};
diff --git a/src/components/SideBar.jsx b/src/components/SideBar.jsx
index 27b2e32..372a122 100644
--- a/src/components/SideBar.jsx
+++ b/src/components/SideBar.jsx
@@ -90,11 +90,11 @@ const SideBar = () => {
setModalOpen(true)}>
- OpenAI Key
+ AI Provider
-
+
diff --git a/src/utils/dalle.js b/src/utils/dalle.js
index aad6edd..53b18db 100644
--- a/src/utils/dalle.js
+++ b/src/utils/dalle.js
@@ -1,6 +1,25 @@
import { Configuration, OpenAIApi } from 'openai';
+import { AiPayClient, imageGeneration } from 'ai-pay';
export const dalle = async (prompt, key) => {
+ const AiPaySessionId = AiPayClient.getInstance().getClientSessionId();
+
+ if (AiPaySessionId) {
+ const {
+ error,
+ data,
+ } = await imageGeneration({
+ prompt: `${prompt}`,
+ imageModel: "dall-e-2",
+ size: '512x512',
+ });
+
+ if (!data) {
+ throw new Error(error);
+ }
+ return data.imageUrls[0]
+ }
+
const configuration = new Configuration({
apiKey: key,
});
@@ -12,5 +31,5 @@ export const dalle = async (prompt, key) => {
size: '512x512',
});
- return response;
+ return response.data.data[0].url;
};
diff --git a/src/utils/davinci.js b/src/utils/davinci.js
index c305075..fa89a06 100644
--- a/src/utils/davinci.js
+++ b/src/utils/davinci.js
@@ -7,13 +7,14 @@ import {
MessagesPlaceholder,
} from 'langchain/prompts';
import { BufferMemory } from 'langchain/memory';
+import { AiPayClient } from 'ai-pay';
const memory = new BufferMemory({
returnMessages: true,
memoryKey: 'history',
});
-export const davinci = async (prompt, key, gptVersion) => {
+export const davinci = async (prompt, key, gptVersion, streamCallback) => {
const chatPrompt = ChatPromptTemplate.fromMessages([
SystemMessagePromptTemplate.fromTemplate(
'The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context and always responds in markdown format. If the AI does not know the answer to a question, it truthfully says it does not know.'
@@ -21,11 +22,23 @@ export const davinci = async (prompt, key, gptVersion) => {
new MessagesPlaceholder('history'),
HumanMessagePromptTemplate.fromTemplate('{input}'),
]);
+
+ const aiPaySessionId = AiPayClient.getInstance().getClientSessionId();
+
const model = new ChatOpenAI({
- openAIApiKey: key,
model: gptVersion,
temperature: 0.3,
- });
+ streaming: true,
+
+ ...(aiPaySessionId ? {
+ openAIApiKey: aiPaySessionId,
+ configuration: {
+ baseURL: 'https://api.joinaipay.com/api/openai-compatible'
+ },
+ } : {
+ openAIApiKey: key,
+ })
+});
const chain = new ConversationChain({
memory: memory,
@@ -33,8 +46,18 @@ export const davinci = async (prompt, key, gptVersion) => {
llm: model,
});
- const response = await chain.call({ input: prompt });
- console.log(response);
+ let streamedResponse = ""
+ const response = await chain.call({
+ input: prompt ,
+ callbacks: [
+ {
+ handleLLMNewToken(token) {
+ streamedResponse += token;
+ streamCallback(streamedResponse);
+ }
+ }
+ ]
+ });
return response.response;
};
diff --git a/vite.config.js b/vite.config.js
index b3f5337..79514d5 100644
--- a/vite.config.js
+++ b/vite.config.js
@@ -2,9 +2,10 @@ import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
// https://vitejs.dev/config/
-export default defineConfig({
+export default defineConfig(({base}) => ({
plugins: [react()],
+ base,
build: {
chunkSizeWarningLimit: 2000, // set the limit to 2000 kBs
},
-});
+}));