Skip to content

Commit

Permalink
change default model to GPT-3.5-turbo (#374)
Browse files Browse the repository at this point in the history
* change default model to GPT-3.5-turbo

* eqeqeq eslint
  • Loading branch information
heatherlogan-scottlogic authored Oct 17, 2023
1 parent f4cee29 commit 4f87975
Show file tree
Hide file tree
Showing 5 changed files with 8 additions and 6 deletions.
2 changes: 2 additions & 0 deletions backend/.eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ module.exports = {
ignorePatterns: ["build", "coverage", "node_modules", "jest.config.js"],
rules: {
"@typescript-eslint/init-declarations": "error",

"@typescript-eslint/no-misused-promises": [
"error",
{
Expand All @@ -25,5 +26,6 @@ module.exports = {
"@typescript-eslint/unbound-method": ["error", { ignoreStatic: true }],
"func-style": ["error", "declaration"],
"prefer-template": "error",
eqeqeq: "error",
},
};
6 changes: 3 additions & 3 deletions backend/src/langchain.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ function initQAModel(

// initialise model
const model = new ChatOpenAI({
modelName: CHAT_MODELS.GPT_4,
modelName: CHAT_MODELS.GPT_3_5_TURBO,
streaming: true,
openAIApiKey: openAiApiKey,
});
Expand Down Expand Up @@ -165,7 +165,7 @@ function initPromptEvaluationModel(

const promptInjectionChain = new LLMChain({
llm: new OpenAI({
modelName: CHAT_MODELS.GPT_4,
modelName: CHAT_MODELS.GPT_3_5_TURBO,
temperature: 0,
openAIApiKey: openAiApiKey,
}),
Expand All @@ -183,7 +183,7 @@ function initPromptEvaluationModel(

const maliciousInputChain = new LLMChain({
llm: new OpenAI({
modelName: CHAT_MODELS.GPT_4,
modelName: CHAT_MODELS.GPT_3_5_TURBO,
temperature: 0,
openAIApiKey: openAiApiKey,
}),
Expand Down
2 changes: 1 addition & 1 deletion backend/src/models/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ interface ChatHistoryMessage {

// default settings for chat model
const defaultChatModel: ChatModel = {
id: CHAT_MODELS.GPT_4,
id: CHAT_MODELS.GPT_3_5_TURBO,
configuration: {
temperature: 1,
topP: 1,
Expand Down
1 change: 1 addition & 0 deletions frontend/.eslintrc.cjs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ module.exports = {
"warn",
{ allowConstantExport: true },
],
eqeqeq: "error",
},
overrides: [
{
Expand Down
3 changes: 1 addition & 2 deletions frontend/src/components/ModelBox/ModelSelection.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -49,10 +49,9 @@ function ModelSelection() {
onChange={(e) => {
setSelectedModel(e.target.value as CHAT_MODELS);
}}
placeholder={modelInUse}
>
{chatModelOptions.map((model) => (
<option key={model} value={model}>
<option key={model} value={model} selected={model === modelInUse}>
{model}
</option>
))}
Expand Down

0 comments on commit 4f87975

Please sign in to comment.