Skip to content

Commit

Permalink
feat: support for google vertex
Browse files Browse the repository at this point in the history
fix: bun support
  • Loading branch information
dosco committed Jun 15, 2024
1 parent f7edeb8 commit 49ee383
Show file tree
Hide file tree
Showing 9 changed files with 80 additions and 11 deletions.
5 changes: 2 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# LLMClient - Typescript/JS Library to build with LLMs
# LLMClient - Build LLMs Powered Agents (Typescript)

JS/TS library to make to easy to build with LLMs. Full support for various LLMs and VectorDBs, Agents, Function Calling, Chain-of-Thought, RAG, Semantic Router and more. Based on the popular Stanford DSP paper. Create and compose efficient prompts using prompt signatures. 🌵 🦙 🔥 ❤️ 🖖🏼
JS/TS library to make to easy to build with Agents and agentic workflows with LLMs. Full support for various LLMs and VectorDBs, Function Calling, Chain-of-Thought, RAG, Semantic Router and more. Based on the popular Stanford DSP paper. Build agents or teams or agents to solve complex problems 🌵 🦙 🔥 ❤️ 🖖🏼

[![NPM Package](https://img.shields.io/npm/v/llmclient?style=for-the-badge&color=green)](https://www.npmjs.com/package/llmclient)
[![Twitter](https://img.shields.io/twitter/follow/dosco?style=for-the-badge&color=red)](https://twitter.com/dosco)
Expand Down Expand Up @@ -422,7 +422,6 @@ OPENAI_APIKEY=openai_key npm run tsx ./src/examples/marketing.ts
| streaming1.ts | Output fields validation while streaming |
| streaming2.ts | Per output field validation while streaming |


## Built-in Functions

| Function | Description |
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "llmclient",
"version": "8.1.21",
"version": "8.1.22",
"type": "module",
"description": "The best library to work with LLMs",
"typings": "build/module/src/index.d.ts",
Expand Down
3 changes: 2 additions & 1 deletion src/ai/base.ts
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ export class BaseAI<
throw new Error('No model defined');
}

this.modelInfo = modelInfo.filter((v) => v.name === models.model).at(0) ?? {
const mname = models.model.replace(/-0\d+$|-\d{2,}$/, '');
this.modelInfo = modelInfo.filter((v) => v.name === mname).at(0) ?? {
name: models.model,
currency: 'usd',
promptTokenCostPer1M: 0,
Expand Down
12 changes: 11 additions & 1 deletion src/ai/google-gemini/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,8 @@ export const GoogleGeminiDefaultCreativeConfig = (): GoogleGeminiConfig =>

export interface GoogleGeminiArgs {
apiKey: string;
projectId?: string;
region?: string;
config: Readonly<GoogleGeminiConfig>;
options?: Readonly<AIServiceOptions>;
}
Expand All @@ -93,16 +95,24 @@ export class GoogleGemini extends BaseAI<

constructor({
apiKey,
projectId,
region,
config = GoogleGeminiDefaultConfig(),
options
}: Readonly<GoogleGeminiArgs>) {
if (!apiKey || apiKey === '') {
throw new Error('GoogleGemini AI API key not set');
}

let apiURL = 'https://generativelanguage.googleapis.com/v1beta';

if (projectId && region) {
apiURL = `POST https://${region}-aiplatform.googleapis.com/v1/projects/${projectId}/locations/{REGION}/publishers/google/`;
}

super({
name: 'GoogleGeminiAI',
apiURL: 'https://generativelanguage.googleapis.com/v1beta',
apiURL,
headers: {},
modelInfo: modelInfoGoogleGemini,
models: { model: config.model, embedModel: config.embedModel },
Expand Down
2 changes: 1 addition & 1 deletion src/ai/google-gemini/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -149,7 +149,7 @@ export type GoogleGeminiChatResponseDelta = GoogleGeminiChatResponse;
* @export
*/
export type GoogleGeminiConfig = TextModelConfig & {
model: GoogleGeminiModel;
model: GoogleGeminiModel | string;
embedModel: GoogleGeminiEmbedModels;
safetySettings?: GoogleGeminiSafetySettings;
};
Expand Down
2 changes: 1 addition & 1 deletion src/ai/groq/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export class Groq extends OpenAI {
super({
apiKey,
config,
options,
options: { ...options, streamingUsage: false },
apiURL: 'https://api.groq.com/openai/v1'
});

Expand Down
6 changes: 4 additions & 2 deletions src/ai/openai/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ export interface OpenAIArgs {
apiKey: string;
apiURL?: string;
config?: Readonly<OpenAIConfig>;
options?: Readonly<AIServiceOptions>;
options?: Readonly<AIServiceOptions & { streamingUsage: boolean }>;
}

/**
Expand All @@ -91,6 +91,7 @@ export class OpenAI extends BaseAI<
OpenAIEmbedResponse
> {
private config: OpenAIConfig;
private streamingUsage: boolean;

constructor({
apiKey,
Expand All @@ -111,6 +112,7 @@ export class OpenAI extends BaseAI<
supportFor: { functions: true, streaming: true }
});
this.config = config;
this.streamingUsage = options?.streamingUsage ?? true;
}

override getModelConfig(): TextModelConfig {
Expand Down Expand Up @@ -219,7 +221,7 @@ export class OpenAI extends BaseAI<
user: req.identity?.user ?? this.config.user,
organization: req.identity?.organization,
...(frequencyPenalty ? { frequency_penalty: frequencyPenalty } : {}),
...(stream
...(stream && this.streamingUsage
? { stream: true, stream_options: { include_usage: true } }
: {})
};
Expand Down
9 changes: 8 additions & 1 deletion src/util/apicall.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
import path from 'path';
import { type ReadableStream, TextDecoderStream } from 'stream/web';
import {
type ReadableStream,
TextDecoderStream as TextDecoderStreamNative
} from 'stream/web';

import type { Span } from '../trace/index.js';

import { TextDecoderStreamPolyfill } from './stream.js';
import { JSONStringifyStream } from './transform.js';

/**
* Util: API details
* @export
Expand All @@ -14,6 +19,8 @@ export type API = {
put?: boolean;
};

const TextDecoderStream = TextDecoderStreamNative ?? TextDecoderStreamPolyfill;

export const apiCall = async <TRequest = unknown, TResponse = unknown>(
api: Readonly<
API & {
Expand Down
50 changes: 50 additions & 0 deletions src/util/stream.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import {
type Transformer,
TransformStream,
type TransformStreamDefaultController
} from 'stream/web';

export interface TextDecoderCommon {
readonly encoding: string;
readonly fatal: boolean;
readonly ignoreBOM: boolean;
}

class TextDecodeTransformer
implements Transformer<ArrayBuffer | Uint8Array, string>
{
private decoder;

constructor() {
this.decoder = new TextDecoder();
}

transform(
chunk: ArrayBuffer | Uint8Array,
controller: TransformStreamDefaultController<string>
) {
if (!(chunk instanceof ArrayBuffer || ArrayBuffer.isView(chunk))) {
throw new TypeError('Input data must be a BufferSource');
}
const text = this.decoder.decode(chunk, { stream: true });
if (text.length !== 0) {
controller.enqueue(text);
}
}

flush(controller: TransformStreamDefaultController<string>) {
const text = this.decoder.decode();
if (text.length !== 0) {
controller.enqueue(text);
}
}
}

export class TextDecoderStreamPolyfill extends TransformStream<
ArrayBuffer | Uint8Array,
string
> {
constructor() {
super(new TextDecodeTransformer());
}
}

0 comments on commit 49ee383

Please sign in to comment.