From ce91bf04c4f982581a801c0a6728650192009db8 Mon Sep 17 00:00:00 2001 From: Karthik Kalyanaraman <105607645+karthikscale3@users.noreply.github.com> Date: Thu, 12 Sep 2024 13:39:06 -0700 Subject: [PATCH] support for o1-preview and o1-mini (#275) --- lib/constants.ts | 20 ++++++++++++++++++++ lib/types/playground_types.ts | 12 ++++++++++++ lib/utils.ts | 15 +++++++++++++-- 3 files changed, 45 insertions(+), 2 deletions(-) diff --git a/lib/constants.ts b/lib/constants.ts index 89802fb0..0298f0a4 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -34,6 +34,10 @@ export const TIKTOKEN_MODEL_MAPPING: Record = { "gpt-4o-2024-05-13": "o200k_base", "gpt-4o-mini": "o200k_base", "gpt-4o-mini-2024-07-18": "o200k_base", + "o1-preview": "o200k_base", + "o1-mini": "o200k_base", + "o1-preview-2024-09-12": "o200k_base", + "o1-mini-2024-09-12": "o200k_base", }; export type LangTraceAttributes = LLMSpanAttributes & @@ -53,6 +57,14 @@ export interface CostTableEntry { // cost per 1000 tokens export const OPENAI_PRICING: Record = { + "o1-preview": { + input: 0.015, + output: 0.06, + }, + "o1-mini": { + input: 0.015, + output: 0.06, + }, "gpt-4o-mini": { input: 0.00015, output: 0.0006, @@ -244,6 +256,14 @@ export const AZURE_PRICING: Record = { input: 0.03, output: 0.06, }, + "o1-preview": { + input: 0.015, + output: 0.06, + }, + "o1-mini": { + input: 0.015, + output: 0.06, + }, }; export const PAGE_SIZE = 15; diff --git a/lib/types/playground_types.ts b/lib/types/playground_types.ts index 1ec2da3c..f9d1da8d 100644 --- a/lib/types/playground_types.ts +++ b/lib/types/playground_types.ts @@ -6,6 +6,10 @@ export interface Conversation { } export enum OpenAIModel { + "o1-preview" = "o1-preview", + "o1-preview-2024-09-12" = "o1-preview-2024-09-12", + "o1-mini" = "o1-mini", + "o1-mini-2024-09-12" = "o1-mini-2024-09-12", "gpt-4o-mini" = "gpt-4o-mini", "gpt-4o-mini-2024-07-18" = "gpt-4o-mini-2024-07-18", "gpt-4o" = "gpt-4o", @@ -34,6 +38,14 @@ export enum AnthropicModel { } export const openAIModels = [ + { + value: "o1-preview", + label: "O1 Preview", + }, + { + value: "o1-mini", + label: "O1 Mini", + }, { value: "gpt-4o-mini", label: "GPT-4 Omni Mini", diff --git a/lib/utils.ts b/lib/utils.ts index e794bf55..a1ee9de5 100644 --- a/lib/utils.ts +++ b/lib/utils.ts @@ -528,6 +528,10 @@ export function calculatePriceFromUsage( correctModel = "gpt-4o"; } else if (model.includes("gpt-4")) { correctModel = "gpt-4"; + } else if (model.includes("o1-preview")) { + correctModel = "o1-preview"; + } else if (model.includes("o1-mini")) { + correctModel = "o1-mini"; } } costTable = OPENAI_PRICING[correctModel]; @@ -565,6 +569,10 @@ export function calculatePriceFromUsage( correctModel = "gpt-4o"; } else if (model.includes("gpt-4")) { correctModel = "gpt-4"; + } else if (model.includes("o1-preview")) { + correctModel = "o1-preview"; + } else if (model.includes("o1-mini")) { + correctModel = "o1-mini"; } } costTable = AZURE_PRICING[correctModel]; @@ -647,7 +655,7 @@ export function estimateTokens(prompt: string): number { export function calculateTokens(content: string, model: string): number { try { let tiktokenModel: TiktokenEncoding = "cl100k_base"; - if (model.startsWith("gpt-4o")) { + if (model.startsWith("gpt-4o") || model.startsWith("o1-")) { tiktokenModel = "o200k_base"; } return estimateTokensUsingTikToken(content, tiktokenModel); @@ -744,7 +752,10 @@ export function getVendorFromSpan(span: Span): string { vendor = "gemini"; } else if (span.name.includes("vercel") || serviceName.includes("vercel")) { vendor = "vercel"; - } else if (span.name.includes("embedchain") || serviceName.includes("embedchain")) { + } else if ( + span.name.includes("embedchain") || + serviceName.includes("embedchain") + ) { vendor = "embedchain"; } return vendor;