Skip to content

Commit

Permalink
Merge pull request #4059 from janhq/fix/remote-models-error-handling
Browse files Browse the repository at this point in the history
chore: remote models error handling
  • Loading branch information
louis-jan authored Nov 20, 2024
2 parents 4f398b8 + e0a4caf commit 3602483
Show file tree
Hide file tree
Showing 8 changed files with 15 additions and 13 deletions.
2 changes: 1 addition & 1 deletion core/src/browser/extensions/engines/helpers/sse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ export function requestInference(
errorCode = ErrorCode.InvalidApiKey
}
const error = {
message: data.error?.message ?? 'Error occurred.',
message: data.error?.message ?? data.message ?? 'Error occurred.',
code: errorCode,
}
subscriber.error(error)
Expand Down
4 changes: 3 additions & 1 deletion extensions/inference-cohere-extension/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,8 @@ export default class JanInferenceCohereExtension extends RemoteOAIEngine {
}

transformResponse = (data: any) => {
return typeof data === 'object' ? data.text : JSON.parse(data).text ?? ''
return typeof data === 'object'
? data.text
: (JSON.parse(data.replace('data: ', '').trim()).text ?? '')
}
}
2 changes: 1 addition & 1 deletion extensions/inference-openai-extension/package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "@janhq/inference-openai-extension",
"productName": "OpenAI Inference Engine",
"version": "1.0.3",
"version": "1.0.4",
"description": "This extension enables OpenAI chat completion API calls",
"main": "dist/index.js",
"module": "dist/module.js",
Expand Down
2 changes: 2 additions & 0 deletions extensions/inference-openai-extension/resources/models.json
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@
"parameters": {
"temperature": 1,
"top_p": 1,
"stream": true,
"max_tokens": 32768,
"frequency_penalty": 0,
"presence_penalty": 0
Expand Down Expand Up @@ -126,6 +127,7 @@
"temperature": 1,
"top_p": 1,
"max_tokens": 65536,
"stream": true,
"frequency_penalty": 0,
"presence_penalty": 0
},
Expand Down
1 change: 0 additions & 1 deletion extensions/inference-openai-extension/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,6 @@ export default class JanInferenceOpenAIExtension extends RemoteOAIEngine {
return {
...params,
max_completion_tokens: max_tokens,
stream: false, // o1 only support stream = false
}
}
// Pass through for non-preview models
Expand Down
3 changes: 0 additions & 3 deletions web/containers/ErrorMessage/index.test.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,6 @@ describe('ErrorMessage Component', () => {

render(<ErrorMessage message={message} />)

expect(
screen.getByText('Apologies, something’s amiss!')
).toBeInTheDocument()
expect(screen.getByText('troubleshooting assistance')).toBeInTheDocument()
})

Expand Down
8 changes: 3 additions & 5 deletions web/containers/ErrorMessage/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,6 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {

const getErrorTitle = () => {
switch (message.error_code) {
case ErrorCode.Unknown:
return 'Apologies, something’s amiss!'
case ErrorCode.InvalidApiKey:
case ErrorCode.AuthenticationError:
case ErrorCode.InvalidRequestError:
Expand All @@ -55,17 +53,17 @@ const ErrorMessage = ({ message }: { message: ThreadMessage }) => {
)
default:
return (
<>
<p>
{message.content[0]?.text?.value && (
<AutoLink text={message.content[0].text.value} />
)}
</>
</p>
)
}
}

return (
<div className="mt-10">
<div className="mx-auto mt-10 max-w-[700px]">
{message.status === MessageStatus.Error && (
<div
key={message.id}
Expand Down
6 changes: 5 additions & 1 deletion web/containers/Providers/EventHandler.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,11 @@ export default function EventHandler({ children }: { children: ReactNode }) {
setIsGeneratingResponse(false)
}
return
} else if (message.status === MessageStatus.Error) {
} else if (
message.status === MessageStatus.Error &&
activeModelRef.current?.engine &&
isLocalEngine(activeModelRef.current.engine)
) {
;(async () => {
if (
!(await extensionManager
Expand Down

0 comments on commit 3602483

Please sign in to comment.