diff --git a/src/common/components/icons/vendors/DeepseekIcon.tsx b/src/common/components/icons/vendors/DeepseekIcon.tsx
new file mode 100644
index 000000000..811410b75
--- /dev/null
+++ b/src/common/components/icons/vendors/DeepseekIcon.tsx
@@ -0,0 +1,9 @@
+import * as React from 'react';
+
+import { SvgIcon, SvgIconProps } from '@mui/joy';
+
+export function DeepseekIcon(props: SvgIconProps) {
+ return
+
+ ;
+}
diff --git a/src/modules/backend/backend.router.ts b/src/modules/backend/backend.router.ts
index 2d39c1030..65759a1cc 100644
--- a/src/modules/backend/backend.router.ts
+++ b/src/modules/backend/backend.router.ts
@@ -49,6 +49,7 @@ export const backendRouter = createTRPCRouter({
hasImagingProdia: !!env.PRODIA_API_KEY,
hasLlmAnthropic: !!env.ANTHROPIC_API_KEY,
hasLlmAzureOpenAI: !!env.AZURE_OPENAI_API_KEY && !!env.AZURE_OPENAI_API_ENDPOINT,
+ hasLlmDeepseek: !!env.DEEPSEEK_API_KEY,
hasLlmGemini: !!env.GEMINI_API_KEY,
hasLlmGroq: !!env.GROQ_API_KEY,
hasLlmLocalAIHost: !!env.LOCALAI_API_HOST,
diff --git a/src/modules/backend/store-backend-capabilities.ts b/src/modules/backend/store-backend-capabilities.ts
index 0f6ee1f3a..2824eb7ff 100644
--- a/src/modules/backend/store-backend-capabilities.ts
+++ b/src/modules/backend/store-backend-capabilities.ts
@@ -13,6 +13,7 @@ export interface BackendCapabilities {
hasImagingProdia: boolean;
hasLlmAnthropic: boolean;
hasLlmAzureOpenAI: boolean;
+ hasLlmDeepseek: boolean;
hasLlmGemini: boolean;
hasLlmGroq: boolean;
hasLlmLocalAIHost: boolean;
@@ -42,6 +43,7 @@ const useBackendCapabilitiesStore = create()(
hasImagingProdia: false,
hasLlmAnthropic: false,
hasLlmAzureOpenAI: false,
+ hasLlmDeepseek: false,
hasLlmGemini: false,
hasLlmGroq: false,
hasLlmLocalAIHost: false,
diff --git a/src/modules/llms/server/llm.server.streaming.ts b/src/modules/llms/server/llm.server.streaming.ts
index a85aa1a6f..7daec37e1 100644
--- a/src/modules/llms/server/llm.server.streaming.ts
+++ b/src/modules/llms/server/llm.server.streaming.ts
@@ -526,6 +526,7 @@ function _prepareRequestData({ access, model, history, context: _context }: Chat
};
case 'azure':
+ case 'deepseek':
case 'groq':
case 'lmstudio':
case 'localai':
diff --git a/src/modules/llms/server/openai/models.data.ts b/src/modules/llms/server/openai/models.data.ts
index 0cf234174..a8a5d57ee 100644
--- a/src/modules/llms/server/openai/models.data.ts
+++ b/src/modules/llms/server/openai/models.data.ts
@@ -321,6 +321,49 @@ export function azureModelToModelDescription(azureDeploymentRef: string, openAIM
}
+// [Deepseek AI]
+const _knownDeepseekChatModels: ManualMappings = [
+ // [Models and Pricing](https://platform.deepseek.com/api-docs/pricing)
+ // [List Models](https://platform.deepseek.com/api-docs/api/list-models)
+ {
+ idPrefix: 'deepseek-chat',
+ label: 'Deepseek Chat V2',
+ description: 'Good at general tasks, 128K context length',
+ contextWindow: 128000,
+ interfaces: [LLM_IF_OAI_Chat],
+ maxCompletionTokens: 4096,
+ pricing: {
+ chatIn: 0.14,
+ chatOut: 0.28,
+ },
+ },
+ {
+ idPrefix: 'deepseek-coder',
+ label: 'Deepseek Coder V2',
+ description: 'Good at coding and math tasks, 128K context length',
+ contextWindow: 128000,
+ interfaces: [LLM_IF_OAI_Chat],
+ maxCompletionTokens: 4096,
+ pricing: {
+ chatIn: 0.14,
+ chatOut: 0.28,
+ },
+ },
+];
+
+export function deepseekModelToModelDescription(deepseekModelId: string): ModelDescriptionSchema {
+ return fromManualMapping(_knownDeepseekChatModels, deepseekModelId, undefined, undefined, {
+ idPrefix: deepseekModelId,
+ label: deepseekModelId.replaceAll(/[_-]/g, ' '),
+ description: 'New Deepseek Model',
+ contextWindow: 128000,
+ maxCompletionTokens: 4096,
+ interfaces: [LLM_IF_OAI_Chat], // assume..
+ hidden: true,
+ });
+}
+
+
// [LM Studio]
export function lmStudioModelToModelDescription(modelId: string): ModelDescriptionSchema {
diff --git a/src/modules/llms/server/openai/openai.router.ts b/src/modules/llms/server/openai/openai.router.ts
index 84ff7f30c..3467faa86 100644
--- a/src/modules/llms/server/openai/openai.router.ts
+++ b/src/modules/llms/server/openai/openai.router.ts
@@ -11,7 +11,7 @@ import { Brand } from '~/common/app.config';
import { fixupHost } from '~/common/util/urlUtils';
import { OpenAIWire, WireOpenAICreateImageOutput, wireOpenAICreateImageOutputSchema, WireOpenAICreateImageRequest } from './openai.wiretypes';
-import { azureModelToModelDescription, groqModelSortFn, groqModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelFilter, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, perplexityAIModelSort, togetherAIModelsToModelDescriptions } from './models.data';
+import { azureModelToModelDescription, deepseekModelToModelDescription, groqModelSortFn, groqModelToModelDescription, lmStudioModelToModelDescription, localAIModelToModelDescription, mistralModelsSort, mistralModelToModelDescription, oobaboogaModelToModelDescription, openAIModelFilter, openAIModelToModelDescription, openRouterModelFamilySortFn, openRouterModelToModelDescription, perplexityAIModelDescriptions, perplexityAIModelSort, togetherAIModelsToModelDescriptions } from './models.data';
import { llmsChatGenerateWithFunctionsOutputSchema, llmsGenerateContextSchema, llmsListModelsOutputSchema, ModelDescriptionSchema } from '../llm.server.types';
import { wilreLocalAIModelsApplyOutputSchema, wireLocalAIModelsAvailableOutputSchema, wireLocalAIModelsListOutputSchema } from './localai.wiretypes';
@@ -21,7 +21,7 @@ const ABERRATION_FIXUP_SQUASH = '\n\n\n---\n\n\n';
const openAIDialects = z.enum([
- 'azure', 'groq', 'lmstudio', 'localai', 'mistral', 'oobabooga', 'openai', 'openrouter', 'perplexity', 'togetherai',
+ 'azure', 'deepseek', 'groq', 'lmstudio', 'localai', 'mistral', 'oobabooga', 'openai', 'openrouter', 'perplexity', 'togetherai',
]);
type OpenAIDialects = z.infer;
@@ -169,6 +169,11 @@ export const llmOpenAIRouter = createTRPCRouter({
// every dialect has a different way to enumerate models - we execute the mapping on the server side
switch (access.dialect) {
+ case 'deepseek':
+ models = openAIModels
+ .map(({ id }) => deepseekModelToModelDescription(id));
+ break;
+
case 'groq':
models = openAIModels
.map(groqModelToModelDescription)
@@ -401,6 +406,7 @@ export const llmOpenAIRouter = createTRPCRouter({
const DEFAULT_HELICONE_OPENAI_HOST = 'oai.hconeai.com';
+const DEFAULT_DEEPSEEK_HOST = 'https://api.deepseek.com';
const DEFAULT_GROQ_HOST = 'https://api.groq.com/openai';
const DEFAULT_LOCALAI_HOST = 'http://127.0.0.1:8080';
const DEFAULT_MISTRAL_HOST = 'https://api.mistral.ai';
@@ -437,6 +443,22 @@ export function openAIAccess(access: OpenAIAccessSchema, modelRefId: string | nu
};
+ case 'deepseek':
+ // https://platform.deepseek.com/api-docs/
+ const deepseekKey = access.oaiKey || env.DEEPSEEK_API_KEY || '';
+ const deepseekHost = fixupHost(access.oaiHost || DEFAULT_DEEPSEEK_HOST, apiPath);
+ if (!deepseekKey || !deepseekHost)
+ throw new Error('Missing Deepseek API Key or Host. Add it on the UI (Models Setup) or server side (your deployment).');
+
+ return {
+ headers: {
+ 'Authorization': `Bearer ${deepseekKey}`,
+ 'Content-Type': 'application/json',
+ },
+ url: deepseekHost + apiPath,
+ };
+
+
case 'lmstudio':
case 'oobabooga':
case 'openai':
diff --git a/src/modules/llms/vendors/deepseek/DeepseekAISourceSetup.tsx b/src/modules/llms/vendors/deepseek/DeepseekAISourceSetup.tsx
new file mode 100644
index 000000000..a117d4d6e
--- /dev/null
+++ b/src/modules/llms/vendors/deepseek/DeepseekAISourceSetup.tsx
@@ -0,0 +1,61 @@
+import * as React from 'react';
+
+import { AlreadySet } from '~/common/components/AlreadySet';
+import { FormInputKey } from '~/common/components/forms/FormInputKey';
+import { InlineError } from '~/common/components/InlineError';
+import { Link } from '~/common/components/Link';
+import { SetupFormRefetchButton } from '~/common/components/forms/SetupFormRefetchButton';
+import { useToggleableBoolean } from '~/common/util/useToggleableBoolean';
+
+import { DModelSourceId } from '../../store-llms';
+import { useLlmUpdateModels } from '../../llm.client.hooks';
+import { useSourceSetup } from '../useSourceSetup';
+
+import { ModelVendorDeepseek } from './deepseekai.vendor';
+
+
+const DEEPSEEK_REG_LINK = 'https://platform.deepseek.com/api_keys';
+
+
+export function DeepseekAISourceSetup(props: { sourceId: DModelSourceId }) {
+
+ // state
+ const advanced = useToggleableBoolean();
+
+ // external state
+ const {
+ source, sourceHasLLMs, access,
+ sourceSetupValid, hasNoBackendCap: needsUserKey, updateSetup,
+ } = useSourceSetup(props.sourceId, ModelVendorDeepseek);
+
+ // derived state
+ const { oaiKey: deepseekKey } = access;
+
+ // validate if url is a well formed proper url with zod
+ const shallFetchSucceed = !needsUserKey || (!!deepseekKey && sourceSetupValid);
+ const showKeyError = !!deepseekKey && !sourceSetupValid;
+
+ // fetch models
+ const { isFetching, refetch, isError, error } =
+ useLlmUpdateModels(!sourceHasLLMs && shallFetchSucceed, source);
+
+
+ return <>
+
+ {needsUserKey
+ ? !deepseekKey && request Key
+ : }
+ >}
+ value={deepseekKey} onChange={value => updateSetup({ deepseekKey: value })}
+ required={needsUserKey} isError={showKeyError}
+ placeholder='...'
+ />
+
+
+
+ {isError && }
+
+ >;
+}
diff --git a/src/modules/llms/vendors/deepseek/deepseekai.vendor.ts b/src/modules/llms/vendors/deepseek/deepseekai.vendor.ts
new file mode 100644
index 000000000..779e61fce
--- /dev/null
+++ b/src/modules/llms/vendors/deepseek/deepseekai.vendor.ts
@@ -0,0 +1,49 @@
+import { DeepseekIcon } from '~/common/components/icons/vendors/DeepseekIcon';
+
+import type { IModelVendor } from '../IModelVendor';
+import type { OpenAIAccessSchema } from '../../server/openai/openai.router';
+
+import { LLMOptionsOpenAI, ModelVendorOpenAI } from '../openai/openai.vendor';
+import { OpenAILLMOptions } from '../openai/OpenAILLMOptions';
+
+import { DeepseekAISourceSetup } from './DeepseekAISourceSetup';
+
+
+export interface SourceSetupDeepseek {
+ deepseekKey: string;
+}
+
+export const ModelVendorDeepseek: IModelVendor = {
+ id: 'deepseek',
+ name: 'Deepseek',
+ rank: 19,
+ location: 'cloud',
+ instanceLimit: 1,
+ hasBackendCapKey: 'hasLlmDeepseek',
+
+ // components
+ Icon: DeepseekIcon,
+ SourceSetupComponent: DeepseekAISourceSetup,
+ LLMOptionsComponent: OpenAILLMOptions,
+
+ // functions
+ initializeSetup: () => ({
+ deepseekKey: '',
+ }),
+ validateSetup: (setup) => {
+ return setup.deepseekKey?.length >= 35;
+ },
+ getTransportAccess: (partialSetup) => ({
+ dialect: 'deepseek',
+ oaiKey: partialSetup?.deepseekKey || '',
+ oaiOrg: '',
+ oaiHost: '',
+ heliKey: '',
+ moderationCheck: false,
+ }),
+
+ // OpenAI transport ('Deepseek' dialect in 'access')
+ rpcUpdateModelsOrThrow: ModelVendorOpenAI.rpcUpdateModelsOrThrow,
+ rpcChatGenerateOrThrow: ModelVendorOpenAI.rpcChatGenerateOrThrow,
+ streamingChatGenerateOrThrow: ModelVendorOpenAI.streamingChatGenerateOrThrow,
+};
diff --git a/src/modules/llms/vendors/vendors.registry.ts b/src/modules/llms/vendors/vendors.registry.ts
index 0193718e4..f4353919f 100644
--- a/src/modules/llms/vendors/vendors.registry.ts
+++ b/src/modules/llms/vendors/vendors.registry.ts
@@ -14,6 +14,7 @@ import { ModelVendorTogetherAI } from './togetherai/togetherai.vendor';
import type { IModelVendor } from './IModelVendor';
import { DLLMId, DModelSource, DModelSourceId, findLLMOrThrow, findSourceOrThrow } from '../store-llms';
+import { ModelVendorDeepseek } from './deepseek/deepseekai.vendor';
export type ModelVendorId =
| 'anthropic'
@@ -28,7 +29,8 @@ export type ModelVendorId =
| 'openai'
| 'openrouter'
| 'perplexity'
- | 'togetherai';
+ | 'togetherai'
+ | 'deepseek';
/** Global: Vendor Instances Registry **/
const MODEL_VENDOR_REGISTRY: Record = {
@@ -45,6 +47,7 @@ const MODEL_VENDOR_REGISTRY: Record = {
openrouter: ModelVendorOpenRouter,
perplexity: ModelVendorPerplexity,
togetherai: ModelVendorTogetherAI,
+ deepseek: ModelVendorDeepseek,
} as Record;
const MODEL_VENDOR_DEFAULT: ModelVendorId = 'openai';
diff --git a/src/server/env.mjs b/src/server/env.mjs
index 8c75b7ec2..00fd86132 100644
--- a/src/server/env.mjs
+++ b/src/server/env.mjs
@@ -53,6 +53,9 @@ export const env = createEnv({
// LLM: Toghether AI
TOGETHERAI_API_KEY: z.string().optional(),
+ // LLM: Deepseek AI
+ DEEPSEEK_API_KEY: z.string().optional(),
+
// Helicone - works on both OpenAI and Anthropic vendors
HELICONE_API_KEY: z.string().optional(),