Skip to content

Commit

Permalink
chore: update benchmark using cortex client (cortex-node)
Browse files Browse the repository at this point in the history
  • Loading branch information
louis-jan committed Jun 27, 2024
1 parent a5f04e1 commit bfaa064
Show file tree
Hide file tree
Showing 7 changed files with 12 additions and 16 deletions.
2 changes: 1 addition & 1 deletion cortex-js/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@
"decompress": "^4.2.1",
"js-yaml": "^4.1.0",
"nest-commander": "^3.13.0",
"openai": "^4.50.0",
"readline": "^1.3.0",
"reflect-metadata": "^0.2.0",
"rxjs": "^7.8.1",
Expand Down Expand Up @@ -81,6 +80,7 @@
"@typescript-eslint/eslint-plugin": "^6.0.0",
"@typescript-eslint/parser": "^6.0.0",
"bun": "^1.1.15",
"cortexso-node": "^0.0.4",
"cpx": "^1.5.0",
"eslint": "^8.42.0",
"eslint-config-prettier": "^9.0.0",
Expand Down
4 changes: 2 additions & 2 deletions cortex-js/src/domain/models/assistant.interface.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Assistant as OpenAiAssistant } from 'openai/resources/beta/assistants';
import { AssistantResponseFormatOption as OpenAIAssistantResponseFormatOption } from 'openai/resources/beta/threads/threads';
import { Assistant as OpenAiAssistant } from 'cortexso-node/resources/beta/assistants';
import { AssistantResponseFormatOption as OpenAIAssistantResponseFormatOption } from 'cortexso-node/resources/beta/threads/threads';

export interface Assistant extends OpenAiAssistant {
avatar?: string;
Expand Down
2 changes: 1 addition & 1 deletion cortex-js/src/domain/models/message.interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ import {
Message as OpenAiMessage,
MessageContent as OpenAiMessageContent,
TextContentBlock as OpenAiTextContentBlock,
} from 'openai/resources/beta/threads/messages';
} from 'cortexso-node/resources/beta/threads/messages';

export interface Message extends OpenAiMessage {}

Expand Down
2 changes: 1 addition & 1 deletion cortex-js/src/domain/models/model.interface.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Model as OpenAiModel } from 'openai/resources/models';
import { Model as OpenAiModel } from 'cortexso-node/resources/models';

export interface Model
extends OpenAiModel,
Expand Down
2 changes: 1 addition & 1 deletion cortex-js/src/domain/models/thread.interface.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Thread as OpenAiThread } from 'openai/resources/beta/threads/threads';
import { Thread as OpenAiThread } from 'cortexso-node/resources/beta/threads/threads';
import { Assistant } from './assistant.interface';

export interface ThreadToolResources extends OpenAiThread.ToolResources {}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ChatCompletionMessageParam } from 'openai/resources';
import { ChatCompletionMessageParam } from 'cortexso-node/resources';

export interface ApiConfig {
base_url: string;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Injectable } from '@nestjs/common';
import si from 'systeminformation';
import fs, { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs';
import OpenAI from 'openai';
import Cortex from 'cortexso-node';
import { Presets, SingleBar } from 'cli-progress';
import yaml from 'js-yaml';
import { FileManagerService } from '@/infrastructure/services/file-manager/file-manager.service';
Expand All @@ -27,7 +27,7 @@ export class BenchmarkCliUsecases {
) {}

config: BenchmarkConfig;
openai?: OpenAI;
cortexClient?: Cortex;
/**
* Benchmark and analyze the performance of a specific AI model using a variety of system resources
*/
Expand All @@ -43,7 +43,7 @@ export class BenchmarkCliUsecases {

const model = params?.model ?? this.config.api.parameters.model;
// TODO: Using OpenAI client or Cortex client to benchmark?
this.openai = new OpenAI({
this.cortexClient = new Cortex({
apiKey: this.config.api.api_key,
baseURL: this.config.api.base_url,
timeout: 20 * 1000,
Expand All @@ -60,11 +60,7 @@ export class BenchmarkCliUsecases {
.then(() =>
this.psUsecases
.getModels()
.then((models) =>
models.find(
(e) => e.modelId === model,
),
),
.then((models) => models.find((e) => e.modelId === model)),
)
.then((model) => {
if (!model)
Expand Down Expand Up @@ -147,7 +143,7 @@ export class BenchmarkCliUsecases {
let firstTokenTime = null;

try {
const stream = await this.openai!.chat.completions.create({
const stream = await this.cortexClient!.chat.completions.create({
model: this.config.api.parameters.model,
messages: this.config.api.parameters.messages,
max_tokens: this.config.api.parameters.max_tokens,
Expand Down

0 comments on commit bfaa064

Please sign in to comment.