Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Groq Added #141

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
151 changes: 53 additions & 98 deletions extensions/void/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions extensions/void/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,8 @@
"eslint-plugin-react": "^7.35.1",
"eslint-plugin-react-hooks": "^4.6.2",
"globals": "^15.9.0",
"groq-sdk": "^0.8.0",

"lodash": "^4.17.21",
"marked": "^14.1.0",
"ollama": "^0.5.9",
Expand Down
49 changes: 49 additions & 0 deletions extensions/void/src/common/sendLLMMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,12 @@ import OpenAI from 'openai';
import { Ollama } from 'ollama/browser'
import { Content, GoogleGenerativeAI, GoogleGenerativeAIError, GoogleGenerativeAIFetchError } from '@google/generative-ai';
import { VoidConfig } from '../webviews/common/contextForConfig'

import Groq, { GroqError } from 'groq-sdk';

import { getFIMPrompt, getFIMSystem } from './getPrompt';


export type AbortRef = { current: (() => void) | null }

export type OnText = (newText: string, fullText: string) => void
Expand Down Expand Up @@ -387,6 +391,46 @@ const sendGreptileMsg: SendLLMMessageFnTypeInternal = ({ messages, onText, onFin

}


// Groq
const sendGroqMsg: SendLLMMessageFnTypeInternal = async ({ messages, onText, onFinalMessage, onError, voidConfig, abortRef }) => {
let didAbort = false;
let fullText = '';

abortRef.current = () => {
didAbort = true;
};

const max_tokens = parseMaxTokensStr(voidConfig.default.maxTokens)
const options = { model: voidConfig.groq.model, messages: messages, stream: true, max_tokens: max_tokens, } as const

const groq = new Groq({ apiKey: voidConfig.groq.apikey, dangerouslyAllowBrowser: true });

groq.chat.completions
.create(options)
.then(async response => {
for await (const chunk of response) {
if (didAbort) return;
const newText = chunk.choices[0]?.delta?.content || '';
fullText += newText;
onText(newText, fullText);
}
onFinalMessage(fullText);
})
// when error/fail - this catches errors of both .create() and .then(for await)
.catch(error => {
if (error instanceof GroqError) {
onError(`${error.name}:\n${error.message}`);
}
else {
onError(error);
}
})

};

export const sendLLMMessage: SendLLMMessageFnTypeExternal = ({ messages, onText, onFinalMessage, onError, voidConfig, abortRef }) => {
if (!voidConfig) return;
export const sendLLMMessage: SendLLMMessageFnTypeExternal = ({ mode, messages, fimInfo, onText, onFinalMessage, onError, voidConfig, abortRef }) => {
if (!voidConfig)
return onError('No config file found for LLM.');
Expand Down Expand Up @@ -427,6 +471,11 @@ export const sendLLMMessage: SendLLMMessageFnTypeExternal = ({ mode, messages, f
case 'ollama':
return sendOllamaMsg({ mode, messages, onText, onFinalMessage, onError, voidConfig, abortRef });
case 'greptile':

return sendGreptileMsg({ messages, onText, onFinalMessage, onError, voidConfig, abortRef });
case 'groq':
return sendGroqMsg({ messages, onText, onFinalMessage, onError, voidConfig, abortRef });

return sendGreptileMsg({ mode, messages, onText, onFinalMessage, onError, voidConfig, abortRef });
default:
onError(`Error: whichApi was ${voidConfig.default.whichApi}, which is not recognized!`)
Expand Down
13 changes: 13 additions & 0 deletions extensions/void/src/webviews/common/contextForConfig.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ const configString = (description: string, defaultVal: string) => {
export const configFields = [
'anthropic',
'openAI',
'groq',
'gemini',
'greptile',
'ollama',
Expand Down Expand Up @@ -137,6 +138,18 @@ const voidConfigInfo: Record<
model: configString('The name of the model to use.', 'gpt-4o'),
apikey: configString('Your API key.', ''),
},
groq: {
apikey: configString('Groq API key.', ''),
model: configEnum(
'Groq model to use.',
'mixtral-8x7b-32768',
[
"mixtral-8x7b-32768",
"llama2-70b-4096",
"gemma-7b-it"
] as const
),
},
azure: {
// "void.azure.apiKey": {
// "type": "string",
Expand Down