Skip to content

Commit

Permalink
fix: more fixes related to model mapping
Browse files Browse the repository at this point in the history
  • Loading branch information
dosco committed Jul 9, 2024
1 parent 176128f commit b01fcb7
Show file tree
Hide file tree
Showing 8 changed files with 44 additions and 29 deletions.
5 changes: 3 additions & 2 deletions src/ax/ai/anthropic/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import type {
AxChatRequest,
AxChatResponse,
AxChatResponseResult,
AxInternalChatRequest,
AxModelConfig
} from '../types.js';

Expand Down Expand Up @@ -88,9 +89,9 @@ export class AxAIAnthropic extends AxBaseAI<
}

override generateChatReq = (
req: Readonly<AxChatRequest>
req: Readonly<AxInternalChatRequest>
): [API, AxAIAnthropicChatRequest] => {
const model = this.config.model;
const model = req.model;

const apiConfig = {
name: '/messages'
Expand Down
14 changes: 9 additions & 5 deletions src/ax/ai/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,8 @@ import type {
AxChatResponse,
AxEmbedRequest,
AxEmbedResponse,
AxInternalChatRequest,
AxInternalEmbedRequest,
AxModelConfig,
AxModelInfo,
AxModelInfoWithProvider,
Expand Down Expand Up @@ -65,10 +67,12 @@ export class AxBaseAI<
> implements AxAIService
{
generateChatReq?: (
req: Readonly<AxChatRequest>,
req: Readonly<AxInternalChatRequest>,
config: Readonly<AxAIPromptConfig>
) => [API, TChatRequest];
generateEmbedReq?: (req: Readonly<AxEmbedRequest>) => [API, TEmbedRequest];
generateEmbedReq?: (
req: Readonly<AxInternalEmbedRequest>
) => [API, TEmbedRequest];
generateChatResp?: (resp: Readonly<TChatResponse>) => AxChatResponse;
generateChatStreamResp?: (
resp: Readonly<TChatResponseDelta>,
Expand All @@ -86,10 +90,10 @@ export class AxBaseAI<
private modelInfo: readonly AxModelInfo[];
private modelUsage?: AxTokenUsage;
private embedModelUsage?: AxTokenUsage;
private models: AxBaseAIArgs['models'];

protected apiURL: string;
protected name: string;
protected models: AxBaseAIArgs['models'];
protected headers: Record<string, string>;
protected supportFor: AxBaseAIFeatures;

Expand Down Expand Up @@ -266,7 +270,7 @@ export class AxBaseAI<
model,
functions,
modelConfig: { ...chatReq.modelConfig, stream }
} as Readonly<AxChatRequest>;
};

const fn = async () => {
const [apiConfig, reqValue] = reqFn(req, options as AxAIPromptConfig);
Expand Down Expand Up @@ -406,7 +410,7 @@ export class AxBaseAI<
const req = {
...embedReq,
embedModel
} as Readonly<AxEmbedRequest>;
};

const fn = async () => {
const [apiConfig, reqValue] = this.generateEmbedReq!(req);
Expand Down
11 changes: 6 additions & 5 deletions src/ax/ai/cohere/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,9 @@ import type {
AxAIServiceOptions,
AxChatRequest,
AxChatResponse,
AxEmbedRequest,
AxEmbedResponse,
AxInternalChatRequest,
AxInternalEmbedRequest,
AxModelConfig
} from '../types.js';

Expand Down Expand Up @@ -98,11 +99,11 @@ export class AxAICohere extends AxBaseAI<
}

override generateChatReq = (
req: Readonly<AxChatRequest>,
req: Readonly<AxInternalChatRequest>,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_config: Readonly<AxAIPromptConfig>
): [API, AxAICohereChatRequest] => {
const model = this.config.model;
const model = req.model;

const lastChatMsg = req.chatPrompt.at(-1);
const restOfChat = req.chatPrompt.slice(0, -1);
Expand Down Expand Up @@ -187,9 +188,9 @@ export class AxAICohere extends AxBaseAI<
};

override generateEmbedReq = (
req: Readonly<AxEmbedRequest>
req: Readonly<AxInternalEmbedRequest>
): [API, AxAICohereEmbedRequest] => {
const model = this.config.embedModel;
const model = req.embedModel;

if (!model) {
throw new Error('Embed model not set');
Expand Down
12 changes: 6 additions & 6 deletions src/ax/ai/google-gemini/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,11 @@ import {
} from '../base.js';
import type {
AxAIServiceOptions,
AxChatRequest,
AxChatResponse,
AxChatResponseResult,
AxEmbedRequest,
AxEmbedResponse,
AxInternalChatRequest,
AxInternalEmbedRequest,
AxModelConfig,
AxTokenUsage
} from '../types.js';
Expand Down Expand Up @@ -150,9 +150,9 @@ export class AxAIGoogleGemini extends AxBaseAI<
}

override generateChatReq = (
req: Readonly<AxChatRequest>
req: Readonly<AxInternalChatRequest>
): [API, AxAIGoogleGeminiChatRequest] => {
const model = this.config.model;
const model = req.model;
const stream = req.modelConfig?.stream ?? this.config.stream;

if (!req.chatPrompt || req.chatPrompt.length === 0) {
Expand Down Expand Up @@ -332,9 +332,9 @@ export class AxAIGoogleGemini extends AxBaseAI<
};

override generateEmbedReq = (
req: Readonly<AxEmbedRequest>
req: Readonly<AxInternalEmbedRequest>
): [API, AxAIGoogleGeminiBatchEmbedRequest] => {
const model = this.config.embedModel;
const model = req.embedModel;

if (!model) {
throw new Error('Embed model not set');
Expand Down
6 changes: 3 additions & 3 deletions src/ax/ai/huggingface/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@ import {
import type {
AxAIPromptConfig,
AxAIServiceOptions,
AxChatRequest,
AxChatResponse,
AxInternalChatRequest,
AxModelConfig
} from '../types.js';

Expand Down Expand Up @@ -88,11 +88,11 @@ export class AxAIHuggingFace extends AxBaseAI<
}

override generateChatReq = (
req: Readonly<AxChatRequest>,
req: Readonly<AxInternalChatRequest>,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_config: Readonly<AxAIPromptConfig>
): [API, AxAIHuggingFaceRequest] => {
const model = this.config.model;
const model = req.model;

const functionsList = req.functions
? `Functions:\n${JSON.stringify(req.functions, null, 2)}\n`
Expand Down
11 changes: 6 additions & 5 deletions src/ax/ai/openai/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,9 @@ import type {
AxChatRequest,
AxChatResponse,
AxChatResponseResult,
AxEmbedRequest,
AxEmbedResponse,
AxInternalChatRequest,
AxInternalEmbedRequest,
AxModelConfig,
AxModelInfo
} from '../types.js';
Expand Down Expand Up @@ -121,11 +122,11 @@ export class AxAIOpenAI extends AxBaseAI<
}

override generateChatReq = (
req: Readonly<AxChatRequest>,
req: Readonly<AxInternalChatRequest>,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
_config: Readonly<AxAIPromptConfig>
): [API, AxAIOpenAIChatRequest] => {
const model = this.config.model;
const model = req.model;

if (!req.chatPrompt || req.chatPrompt.length === 0) {
throw new Error('Chat prompt is empty');
Expand Down Expand Up @@ -182,9 +183,9 @@ export class AxAIOpenAI extends AxBaseAI<
};

override generateEmbedReq = (
req: Readonly<AxEmbedRequest>
req: Readonly<AxInternalEmbedRequest>
): [API, AxAIOpenAIEmbedRequest] => {
const model = this.config.embedModel;
const model = req.embedModel;

if (!model) {
throw new Error('Embed model not set');
Expand Down
6 changes: 6 additions & 0 deletions src/ax/ai/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -143,11 +143,17 @@ export type AxChatRequest = {
model?: string;
};

export type AxInternalChatRequest = Omit<AxChatRequest, 'model'> &
Required<Pick<AxChatRequest, 'model'>>;

export type AxEmbedRequest = {
texts?: readonly string[];
embedModel?: string;
};

export type AxInternalEmbedRequest = Omit<AxEmbedRequest, 'embedModel'> &
Required<Pick<AxEmbedRequest, 'embedModel'>>;

export type AxRateLimiterFunction = <T = unknown>(
reqFunc: () => Promise<T | ReadableStream<T>>,
info: Readonly<{ modelUsage?: AxTokenUsage; embedModelUsage?: AxTokenUsage }>
Expand Down
8 changes: 5 additions & 3 deletions src/examples/summarize.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,16 @@ import { AxAI, AxAIOpenAIModel, AxChainOfThought } from '@ax-llm/ax';

const noteText = `The technological singularity—or simply the singularity[1]—is a hypothetical future point in time at which technological growth becomes uncontrollable and irreversible, resulting in unforeseeable changes to human civilization.[2][3] According to the most popular version of the singularity hypothesis, I.J. Good's intelligence explosion model, an upgradable intelligent agent will eventually enter a "runaway reaction" of self-improvement cycles, each new and more intelligent generation appearing more and more rapidly, causing an "explosion" in intelligence and resulting in a powerful superintelligence that qualitatively far surpasses all human intelligence.[4]`;

// Example with OpenAI using custom labels in place of model names
const ai = new AxAI({
name: 'openai',
apiKey: process.env.OPENAI_APIKEY as string,
config: { model: AxAIOpenAIModel.GPT35Turbo }
config: { model: 'model-a' },
modelMap: {
'model-a': AxAIOpenAIModel.GPT35Turbo
}
});

ai.setOptions({ debug: true });

// const ai = new AxAI({ name: 'ollama', model: 'nous-hermes2' });

const gen = new AxChainOfThought(
Expand Down

0 comments on commit b01fcb7

Please sign in to comment.