diff --git a/README.md b/README.md index c9687bf..e002d8d 100644 --- a/README.md +++ b/README.md @@ -72,7 +72,7 @@ OPENAI_API_KEY=... #### Using Upstash-hosted Open-Source Models -To use an OpenAI model, first initialize RAGChat: +To use an Upstash model, first initialize RAGChat: ```typescript import { RAGChat, upstashModel } from "@upstash/rag-chat"; @@ -82,11 +82,25 @@ export const ragChat = new RAGChat({ }); ``` +#### Using Custom Providers - TogetherAi, Replicate + +To use an Upstash model, first initialize RAGChat: + +````typescript +import { RAGChat, upstashModel } from "@upstash/rag-chat"; + +export const ragChat = new RAGChat({ + model: customModel("codellama/CodeLlama-70b-Instruct-hf", { + apiKey: "TOGETHER_AI_TOKEN", + baseUrl: "https://api.together.xyz/v1", + }), +}); + And set your Upstash QStash API key environment variable: ```bash QSTASH_TOKEN=... -``` +````
Where do I find my Upstash API key?
diff --git a/examples/nodejs/package.json b/examples/nodejs/package.json index 3fa8035..a07cc26 100644 --- a/examples/nodejs/package.json +++ b/examples/nodejs/package.json @@ -7,7 +7,7 @@ "@ai-sdk/google": "latest", "@ai-sdk/mistral": "latest", "@ai-sdk/openai": "latest", - "@upstash/rag-chat": "0.0.4-2.alpha", + "@upstash/rag-chat": "1.0.1", "ai": "latest", "dotenv": "16.4.5", "zod": "3.23.8", diff --git a/examples/nuxt/package.json b/examples/nuxt/package.json index 4d6f5cb..f23d9f0 100644 --- a/examples/nuxt/package.json +++ b/examples/nuxt/package.json @@ -29,6 +29,6 @@ }, "version": "0.0.0", "dependencies": { - "@upstash/rag-chat": "0.0.4-2.alpha" + "@upstash/rag-chat": "1.0.1" } } diff --git a/examples/solidjs/package.json b/examples/solidjs/package.json index 761329c..e478987 100644 --- a/examples/solidjs/package.json +++ b/examples/solidjs/package.json @@ -19,7 +19,7 @@ "@solidjs/meta": "0.29.4", "@solidjs/router": "^0.13.6", "@solidjs/start": "^1.0.2", - "@upstash/rag-chat": "0.0.4-2.alpha", + "@upstash/rag-chat": "1.0.1", "ai": "latest", "solid-js": "^1.8.17", "zod": "^3.23.8" diff --git a/examples/sveltekit/package.json b/examples/sveltekit/package.json index a23e002..156f2c1 100644 --- a/examples/sveltekit/package.json +++ b/examples/sveltekit/package.json @@ -12,7 +12,7 @@ "dependencies": { "@ai-sdk/openai": "latest", "@ai-sdk/svelte": "latest", - "@upstash/rag-chat": "0.0.4-2.alpha", + "@upstash/rag-chat": "1.0.1", "ai": "latest", "openai": "4.47.1" }, diff --git a/src/rag-chat-base.ts b/src/rag-chat-base.ts index e601fe5..4fdce8c 100644 --- a/src/rag-chat-base.ts +++ b/src/rag-chat-base.ts @@ -106,9 +106,9 @@ export class RAGChatBase { onChunk?.({ content: message, inputTokens: value?.usage_metadata?.input_tokens ?? 0, - outputTokens: value?.usage_metadata?.output_tokens ?? 0, + chunkTokens: value?.usage_metadata?.output_tokens ?? 0, totalTokens: value?.usage_metadata?.total_tokens ?? 0, - // This actually streamed output from LLM, but cast it to UpstashMessage above to make everything type.But, in this case its not needed + // This actually streamed output from LLM, but cast it to UpstashMessage above to make everything type. But, in this case its not needed rawContent: value as unknown as string, }); concatenatedOutput += message; diff --git a/src/types.ts b/src/types.ts index 114931c..7b903ef 100644 --- a/src/types.ts +++ b/src/types.ts @@ -65,12 +65,12 @@ export type ChatOptions = { onChunk?: ({ content, inputTokens, - outputTokens, + chunkTokens, totalTokens, rawContent, }: { inputTokens: number; - outputTokens: number; + chunkTokens: number; totalTokens: number; content: string; rawContent: string;