From e8fb613bcdd968f0b5d42d5dc897a817e9a879ab Mon Sep 17 00:00:00 2001 From: Peter B Smith Date: Sun, 27 Aug 2023 18:34:28 -0400 Subject: [PATCH] Enable User ID in environment and advanced model configuration --- .env.example | 3 +++ src/common/types/env.d.ts | 2 ++ src/modules/llms/openai/OpenAISourceSetup.tsx | 25 ++++++++++++++++--- src/modules/llms/openai/openai.router.ts | 13 ++++++++-- src/modules/llms/openai/openai.vendor.ts | 4 ++- 5 files changed, 40 insertions(+), 7 deletions(-) diff --git a/.env.example b/.env.example index 328413b209..0257d33e31 100644 --- a/.env.example +++ b/.env.example @@ -1,3 +1,6 @@ +# [Optional] Set the user for OpenAI and Helicone to track usage +USER= + # [Recommended for local deployments] Backend API key for OpenAI, so that users don't need one (UI > this > '') OPENAI_API_KEY= # [Optional] Sets the "OpenAI-Organization" header field to support organization users (UI > this > '') diff --git a/src/common/types/env.d.ts b/src/common/types/env.d.ts index c7fe31c01a..3843dbd197 100644 --- a/src/common/types/env.d.ts +++ b/src/common/types/env.d.ts @@ -4,6 +4,8 @@ declare namespace NodeJS { // available to the server-side interface ProcessEnv { + // OpenAI and Helicone + USER_ID: string; // LLM: OpenAI OPENAI_API_KEY: string; diff --git a/src/modules/llms/openai/OpenAISourceSetup.tsx b/src/modules/llms/openai/OpenAISourceSetup.tsx index 9bbbd6e34d..5603bf3088 100644 --- a/src/modules/llms/openai/OpenAISourceSetup.tsx +++ b/src/modules/llms/openai/OpenAISourceSetup.tsx @@ -1,7 +1,7 @@ import * as React from 'react'; -import { Box, Button, FormControl, FormHelperText, FormLabel, Input, Switch } from '@mui/joy'; import SyncIcon from '@mui/icons-material/Sync'; +import { Box, Button, FormControl, FormHelperText, FormLabel, Input, Switch } from '@mui/joy'; import { apiQuery } from '~/modules/trpc/trpc.client'; @@ -12,9 +12,9 @@ import { Link } from '~/common/components/Link'; import { settingsCol1Width, settingsGap } from '~/common/theme'; import { DLLM, DModelSource, DModelSourceId } from '../llm.types'; -import { OpenAI } from './openai.types'; -import { hasServerKeyOpenAI, isValidOpenAIApiKey, LLMOptionsOpenAI, ModelVendorOpenAI } from './openai.vendor'; import { useModelsStore, useSourceSetup } from '../store-llms'; +import { OpenAI } from './openai.types'; +import { LLMOptionsOpenAI, ModelVendorOpenAI, hasServerKeyOpenAI, isValidOpenAIApiKey } from './openai.vendor'; export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) { @@ -25,7 +25,7 @@ export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) { // external state const { source, sourceLLMs, updateSetup, - normSetup: { heliKey, oaiHost, oaiKey, oaiOrg, moderationCheck }, + normSetup: { heliKey, oaiHost, oaiKey, oaiOrg, moderationCheck, userId }, } = useSourceSetup(props.sourceId, ModelVendorOpenAI.normalizeSetup); const hasModels = !!sourceLLMs.length; @@ -110,6 +110,23 @@ export function OpenAISourceSetup(props: { sourceId: DModelSourceId }) { /> } + {showAdvanced && + + + User ID + + + helicone, + OpenAI + + + updateSetup({ userId: event.target.value })} + sx={{ flexGrow: 1 }} + /> + } + {showAdvanced && diff --git a/src/modules/llms/openai/openai.router.ts b/src/modules/llms/openai/openai.router.ts index 04ea7749c3..3ecc402e82 100644 --- a/src/modules/llms/openai/openai.router.ts +++ b/src/modules/llms/openai/openai.router.ts @@ -1,5 +1,5 @@ -import { z } from 'zod'; import { TRPCError } from '@trpc/server'; +import { z } from 'zod'; import { createTRPCRouter, publicProcedure } from '~/modules/trpc/trpc.server'; import { fetchJsonOrTRPCError } from '~/modules/trpc/trpc.serverutils'; @@ -19,12 +19,14 @@ const accessSchema = z.object({ oaiHost: z.string().trim(), heliKey: z.string().trim(), moderationCheck: z.boolean(), + userId: z.string().trim().optional(), }); export const modelSchema = z.object({ id: z.string(), temperature: z.number().min(0).max(1).optional(), maxTokens: z.number().min(1).max(1000000), + userId: z.string().optional() }); export const historySchema = z.array(z.object({ @@ -202,6 +204,9 @@ export function openAIAccess(access: AccessSchema, apiPath: string): { headers: // Organization ID const oaiOrg = access.oaiOrg || process.env.OPENAI_API_ORG_ID || ''; + // User ID + const userId = access.userId || process.env.USER_ID || ''; + // API host let oaiHost = access.oaiHost || process.env.OPENAI_API_HOST || DEFAULT_OPENAI_HOST; if (!oaiHost.startsWith('http')) @@ -221,19 +226,23 @@ export function openAIAccess(access: AccessSchema, apiPath: string): { headers: ...(oaiKey && { Authorization: `Bearer ${oaiKey}` }), 'Content-Type': 'application/json', ...(oaiOrg && { 'OpenAI-Organization': oaiOrg }), - ...(heliKey && { 'Helicone-Auth': `Bearer ${heliKey}` }), + ...(heliKey && { 'Helicone-Auth': `Bearer ${heliKey}`, 'Helicone-User-Id': userId }), }, url: oaiHost + apiPath, }; } export function openAIChatCompletionPayload(model: ModelSchema, history: HistorySchema, functions: FunctionsSchema | null, n: number, stream: boolean): OpenAI.Wire.ChatCompletion.Request { + // User ID + const userId = model.userId || process.env.USER_ID || ''; + return { model: model.id, messages: history, ...(functions && { functions: functions, function_call: 'auto' }), ...(model.temperature && { temperature: model.temperature }), ...(model.maxTokens && { max_tokens: model.maxTokens }), + ...(userId && { user: userId }), n, stream, }; diff --git a/src/modules/llms/openai/openai.vendor.ts b/src/modules/llms/openai/openai.vendor.ts index d917b5c29c..ee1c48b1f2 100644 --- a/src/modules/llms/openai/openai.vendor.ts +++ b/src/modules/llms/openai/openai.vendor.ts @@ -1,7 +1,7 @@ import { apiAsync } from '~/modules/trpc/trpc.client'; -import { DLLM, ModelVendor } from '../llm.types'; import { VChatFunctionIn, VChatMessageIn, VChatMessageOrFunctionCallOut, VChatMessageOut } from '../llm.client'; +import { DLLM, ModelVendor } from '../llm.types'; import { OpenAIIcon } from './OpenAIIcon'; import { OpenAILLMOptions } from './OpenAILLMOptions'; @@ -19,6 +19,7 @@ export interface SourceSetupOpenAI { oaiHost: string; // use OpenAI-compatible non-default hosts (full origin path) heliKey: string; // helicone key (works in conjunction with oaiHost) moderationCheck: boolean; + userId: string; // user id for OpenAi and helicone if heliKey is present } export interface LLMOptionsOpenAI { @@ -45,6 +46,7 @@ export const ModelVendorOpenAI: ModelVendor oaiOrg: '', oaiHost: '', heliKey: '', + userId: '', moderationCheck: false, ...partialSetup, }),