diff --git a/.github/workflows/compatibility.yml b/.github/workflows/compatibility.yml index d8ab26254f31..cfe5447fa7d3 100644 --- a/.github/workflows/compatibility.yml +++ b/.github/workflows/compatibility.yml @@ -152,8 +152,8 @@ jobs: - name: Test `@langchain/anthropic` with lowest deps run: docker compose -f dependency_range_tests/docker-compose.yml run anthropic-lowest-deps - # VertexAI - vertexai-latest-deps: + # Google VertexAI + google-vertexai-latest-deps: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -162,10 +162,14 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} cache: "yarn" + - name: Install dependencies + run: yarn install --immutable + - name: Build `@langchain/standard-tests` + run: yarn build --filter=@langchain/standard-tests - name: Test `@langchain/google-vertexai` with latest deps - run: docker compose -f dependency_range_tests/docker-compose.yml run vertexai-latest-deps + run: docker compose -f dependency_range_tests/docker-compose.yml run google-vertexai-latest-deps - vertexai-lowest-deps: + google-vertexai-lowest-deps: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -174,5 +178,9 @@ jobs: with: node-version: ${{ env.NODE_VERSION }} cache: "yarn" + - name: Install dependencies + run: yarn install --immutable + - name: Build `@langchain/standard-tests` + run: yarn build --filter=@langchain/standard-tests - name: Test `@langchain/google-vertexai` with lowest deps - run: docker compose -f dependency_range_tests/docker-compose.yml run vertexai-lowest-deps + run: docker compose -f dependency_range_tests/docker-compose.yml run google-vertexai-lowest-deps \ No newline at end of file diff --git a/dependency_range_tests/docker-compose.yml b/dependency_range_tests/docker-compose.yml index 9c2f4ebdbc28..fbc7315d19fa 100644 --- a/dependency_range_tests/docker-compose.yml +++ b/dependency_range_tests/docker-compose.yml @@ -114,25 +114,32 @@ services: - ./scripts:/scripts command: bash /scripts/with_standard_tests/anthropic/test-with-lowest-deps.sh - # VertexAI - vertexai-latest-deps: + # Google VertexAI + google-vertexai-latest-deps: image: node:18 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" + COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: + - ../turbo.json:/turbo.json + - ../package.json:/package.json + - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-google-vertexai:/libs/langchain-google-vertexai - ./scripts:/scripts - command: bash /scripts/vertexai/test-with-latest-deps.sh - vertexai-lowest-deps: + command: bash /scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh + google-vertexai-lowest-deps: image: node:18 environment: PUPPETEER_SKIP_DOWNLOAD: "true" PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "true" + COHERE_API_KEY: ${COHERE_API_KEY} working_dir: /app volumes: + - ../turbo.json:/turbo.json + - ../package.json:/package.json + - ../libs/langchain-standard-tests:/libs/langchain-standard-tests - ../libs/langchain-google-vertexai:/libs/langchain-google-vertexai - ./scripts:/scripts - command: bash /scripts/vertexai/test-with-lowest-deps.sh - \ No newline at end of file + command: bash /scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh \ No newline at end of file diff --git a/dependency_range_tests/scripts/vertexai/test-with-latest-deps.sh b/dependency_range_tests/scripts/vertexai/test-with-latest-deps.sh deleted file mode 100644 index e5eb224b71e1..000000000000 --- a/dependency_range_tests/scripts/vertexai/test-with-latest-deps.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/env bash - -set -euxo pipefail - -export CI=true - -# enable extended globbing for omitting build artifacts -shopt -s extglob - -# avoid copying build artifacts from the host -cp -r ../libs/langchain-google-vertexai/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) ./ - -yarn - -# Check the test command completes successfully -NODE_OPTIONS=--experimental-vm-modules yarn run jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50% diff --git a/dependency_range_tests/scripts/vertexai/test-with-lowest-deps.sh b/dependency_range_tests/scripts/vertexai/test-with-lowest-deps.sh deleted file mode 100644 index 9a95587c845c..000000000000 --- a/dependency_range_tests/scripts/vertexai/test-with-lowest-deps.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/usr/bin/env bash - -set -euxo pipefail - -export CI=true - -# enable extended globbing for omitting build artifacts -shopt -s extglob - -# avoid copying build artifacts from the host -cp -r ../libs/langchain-google-vertexai/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) ./ - -mkdir -p /updater_script -cp -r /scripts/vertexai/node/!(node_modules|dist|dist-cjs|dist-esm|build|.next|.turbo) /updater_script/ - -cd /updater_script - -yarn - -cd /app - -node /updater_script/update_resolutions_lowest.js - -yarn - -# Check the test command completes successfully -NODE_OPTIONS=--experimental-vm-modules yarn run jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50% diff --git a/dependency_range_tests/scripts/vertexai/node/package.json b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/package.json similarity index 100% rename from dependency_range_tests/scripts/vertexai/node/package.json rename to dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/package.json diff --git a/dependency_range_tests/scripts/vertexai/node/update_resolutions_lowest.js b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/update_resolutions_lowest.js similarity index 92% rename from dependency_range_tests/scripts/vertexai/node/update_resolutions_lowest.js rename to dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/update_resolutions_lowest.js index b8768d8a7156..a600da592199 100644 --- a/dependency_range_tests/scripts/vertexai/node/update_resolutions_lowest.js +++ b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/update_resolutions_lowest.js @@ -1,7 +1,7 @@ const fs = require("fs"); const semver = require("semver"); -const communityPackageJsonPath = "package.json"; +const communityPackageJsonPath = "/app/monorepo/libs/langchain-google-vertexai/package.json"; const currentPackageJson = JSON.parse(fs.readFileSync(communityPackageJsonPath)); diff --git a/dependency_range_tests/scripts/vertexai/node/yarn.lock b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/yarn.lock similarity index 100% rename from dependency_range_tests/scripts/vertexai/node/yarn.lock rename to dependency_range_tests/scripts/with_standard_tests/google-vertexai/node/yarn.lock diff --git a/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh new file mode 100644 index 000000000000..3d43d870e321 --- /dev/null +++ b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-latest-deps.sh @@ -0,0 +1,22 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +export CI=true + +# New monorepo directory paths +monorepo_dir="/app/monorepo" +monorepo_openai_dir="/app/monorepo/libs/langchain-google-vertexai" + +# Run the shared script to copy all necessary folders/files +bash /scripts/with_standard_tests/shared.sh google-vertexai + +# Navigate back to monorepo root and install dependencies +cd "$monorepo_dir" +yarn + +# Navigate into `@langchain/google-vertexai` to build and run tests +# We need to run inside the google-vertexai directory so turbo repo does +# not try to build the package/its workspace dependencies. +cd "$monorepo_openai_dir" +yarn test diff --git a/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh new file mode 100644 index 000000000000..a8de92d9b983 --- /dev/null +++ b/dependency_range_tests/scripts/with_standard_tests/google-vertexai/test-with-lowest-deps.sh @@ -0,0 +1,35 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +export CI=true + +monorepo_dir="/app/monorepo" +monorepo_openai_dir="/app/monorepo/libs/langchain-google-vertexai" +updater_script_dir="/app/updater_script" +updater_script_dir="/app/updater_script" +original_updater_script_dir="/scripts/with_standard_tests/google-vertexai/node" + +# Run the shared script to copy all necessary folders/files +bash /scripts/with_standard_tests/shared.sh google-vertexai + +# Copy the updater script to the monorepo +mkdir -p "$updater_script_dir" +cp "$original_updater_script_dir"/* "$updater_script_dir/" + +# Install deps (e.g semver) for the updater script +cd "$updater_script_dir" +yarn +# Run the updater script +node "update_resolutions_lowest.js" + + +# Navigate back to monorepo root and install dependencies +cd "$monorepo_dir" +yarn + +# Navigate into `@langchain/package` to build and run tests +# We need to run inside the package directory so turbo repo does +# not try to build the package/its workspace dependencies. +cd "$monorepo_openai_dir" +yarn test diff --git a/libs/langchain-google-gauth/package.json b/libs/langchain-google-gauth/package.json index e2338094c357..03de6da38f79 100644 --- a/libs/langchain-google-gauth/package.json +++ b/libs/langchain-google-gauth/package.json @@ -42,7 +42,6 @@ "devDependencies": { "@jest/globals": "^29.5.0", "@langchain/scripts": "~0.0.14", - "@langchain/standard-tests": "0.0.0", "@swc/core": "^1.3.90", "@swc/jest": "^0.2.29", "@tsconfig/recommended": "^1.0.3", diff --git a/libs/langchain-google-vertexai-web/package.json b/libs/langchain-google-vertexai-web/package.json index 1111efe30682..bca35887ed7e 100644 --- a/libs/langchain-google-vertexai-web/package.json +++ b/libs/langchain-google-vertexai-web/package.json @@ -27,10 +27,10 @@ "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm", "clean": "rm -rf .turbo dist/", "prepack": "yarn build", - "test": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", - "test:watch": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", - "test:single": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", - "test:integration": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", + "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", + "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", + "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", "format": "prettier --config .prettierrc --write \"src\"", "format:check": "prettier --config .prettierrc --check \"src\"", "move-cjs-to-dist": "yarn lc-build --config ./langchain.config.js --move-cjs-dist", diff --git a/libs/langchain-google-vertexai/package.json b/libs/langchain-google-vertexai/package.json index f6f9a40effc8..ec6a2eef3e5c 100644 --- a/libs/langchain-google-vertexai/package.json +++ b/libs/langchain-google-vertexai/package.json @@ -27,10 +27,13 @@ "lint:fix": "yarn lint:eslint --fix && yarn lint:dpdm", "clean": "rm -rf .turbo dist/", "prepack": "yarn build", - "test": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", - "test:watch": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", - "test:single": "yarn run build:deps && NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", - "test:integration": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "test": "NODE_OPTIONS=--experimental-vm-modules jest --testPathIgnorePatterns=\\.int\\.test.ts --testTimeout 30000 --maxWorkers=50%", + "test:watch": "NODE_OPTIONS=--experimental-vm-modules jest --watch --testPathIgnorePatterns=\\.int\\.test.ts", + "test:single": "NODE_OPTIONS=--experimental-vm-modules yarn run jest --config jest.config.cjs --testTimeout 100000", + "test:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "test:standard:unit": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.standard\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "test:standard:int": "NODE_OPTIONS=--experimental-vm-modules jest --testPathPattern=\\.standard\\.int\\.test.ts --testTimeout 100000 --maxWorkers=50%", + "test:standard": "yarn test:standard:unit && yarn test:standard:int", "format": "prettier --config .prettierrc --write \"src\"", "format:check": "prettier --config .prettierrc --check \"src\"", "move-cjs-to-dist": "yarn lc-build --config ./langchain.config.js --move-cjs-dist", @@ -45,7 +48,9 @@ }, "devDependencies": { "@jest/globals": "^29.5.0", + "@langchain/google-common": "latest", "@langchain/scripts": "~0.0.14", + "@langchain/standard-tests": "0.0.0", "@swc/core": "^1.3.90", "@swc/jest": "^0.2.29", "@tsconfig/recommended": "^1.0.3", diff --git a/libs/langchain-google-vertexai/src/tests/chat_models.standard.int.test.ts b/libs/langchain-google-vertexai/src/tests/chat_models.standard.int.test.ts new file mode 100644 index 000000000000..0b62d2f3ae9b --- /dev/null +++ b/libs/langchain-google-vertexai/src/tests/chat_models.standard.int.test.ts @@ -0,0 +1,58 @@ +/* eslint-disable no-process-env */ +import { test, expect } from "@jest/globals"; +import { ChatModelIntegrationTests } from "@langchain/standard-tests"; +import { AIMessageChunk } from "@langchain/core/messages"; +import { GoogleAIBaseLanguageModelCallOptions } from "@langchain/google-common"; +import { ChatVertexAI } from "../chat_models.js"; + +class ChatVertexAIStandardIntegrationTests extends ChatModelIntegrationTests< + GoogleAIBaseLanguageModelCallOptions, + AIMessageChunk +> { + constructor() { + if (!process.env.GOOGLE_APPLICATION_CREDENTIALS) { + throw new Error( + "GOOGLE_APPLICATION_CREDENTIALS must be set to run standard integration tests." + ); + } + super({ + Cls: ChatVertexAI, + chatModelHasToolCalling: true, + chatModelHasStructuredOutput: true, + constructorArgs: { + model: "gemini-1.5-pro", + }, + }); + } + + async testUsageMetadataStreaming() { + this.skipTestMessage( + "testUsageMetadataStreaming", + "ChatVertexAI", + "Streaming tokens is not currently supported." + ); + } + + async testUsageMetadata() { + this.skipTestMessage( + "testUsageMetadata", + "ChatVertexAI", + "Usage metadata tokens is not currently supported." + ); + } + + async testToolMessageHistoriesListContent() { + this.skipTestMessage( + "testToolMessageHistoriesListContent", + "ChatVertexAI", + "Not implemented." + ); + } +} + +const testClass = new ChatVertexAIStandardIntegrationTests(); + +test("ChatVertexAIStandardIntegrationTests", async () => { + const testResults = await testClass.runTests(); + expect(testResults).toBe(true); +}); diff --git a/libs/langchain-google-vertexai/src/tests/chat_models.standard.test.ts b/libs/langchain-google-vertexai/src/tests/chat_models.standard.test.ts new file mode 100644 index 000000000000..ca7ad3a58e11 --- /dev/null +++ b/libs/langchain-google-vertexai/src/tests/chat_models.standard.test.ts @@ -0,0 +1,39 @@ +/* eslint-disable no-process-env */ +import { test, expect } from "@jest/globals"; +import { ChatModelUnitTests } from "@langchain/standard-tests"; +import { AIMessageChunk } from "@langchain/core/messages"; +import { GoogleAIBaseLanguageModelCallOptions } from "@langchain/google-common"; +import { ChatVertexAI } from "../chat_models.js"; + +class ChatVertexAIStandardUnitTests extends ChatModelUnitTests< + GoogleAIBaseLanguageModelCallOptions, + AIMessageChunk +> { + constructor() { + super({ + Cls: ChatVertexAI, + chatModelHasToolCalling: true, + chatModelHasStructuredOutput: true, + constructorArgs: {}, + }); + // This must be set so method like `.bindTools` or `.withStructuredOutput` + // which we call after instantiating the model will work. + // (constructor will throw if API key is not set) + process.env.GOOGLE_APPLICATION_CREDENTIALS = "test"; + } + + testChatModelInitApiKey() { + this.skipTestMessage( + "testChatModelInitApiKey", + "ChatVertexAI (gauth)", + this.multipleApiKeysRequiredMessage + ); + } +} + +const testClass = new ChatVertexAIStandardUnitTests(); + +test("ChatVertexAIStandardUnitTests", () => { + const testResults = testClass.runTests(); + expect(testResults).toBe(true); +}); diff --git a/libs/langchain-google-webauth/src/tests/chat_models.int.test.ts b/libs/langchain-google-webauth/src/tests/chat_models.int.test.ts deleted file mode 100644 index fac8c6fed811..000000000000 --- a/libs/langchain-google-webauth/src/tests/chat_models.int.test.ts +++ /dev/null @@ -1,446 +0,0 @@ -import { expect, test } from "@jest/globals"; -import { - AIMessage, - AIMessageChunk, - BaseMessage, - BaseMessageChunk, - BaseMessageLike, - HumanMessage, - // MessageContentComplex, - // MessageContentText, - SystemMessage, - ToolMessage, -} from "@langchain/core/messages"; -import { BaseLanguageModelInput } from "@langchain/core/language_models/base"; -import { ChatPromptValue } from "@langchain/core/prompt_values"; -import { GeminiTool, GoogleAISafetySetting } from "@langchain/google-common"; -import { ChatGoogle } from "../chat_models.js"; - -describe("Google APIKey Chat", () => { - test("invoke", async () => { - const model = new ChatGoogle(); - try { - const res = await model.invoke( - "What is the answer to life the universe and everything? Answer briefly." - ); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - expect(aiMessage.content).toBe("42"); - } catch (e) { - console.error(e); - throw e; - } - }); - - test("generate", async () => { - const safetySettings: GoogleAISafetySetting[] = [ - { - category: "HARM_CATEGORY_HARASSMENT", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_HATE_SPEECH", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_SEXUALLY_EXPLICIT", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_DANGEROUS_CONTENT", - threshold: "BLOCK_ONLY_HIGH", - }, - ]; - const model = new ChatGoogle({ safetySettings }); - try { - const messages: BaseMessage[] = [ - new SystemMessage( - "You will reply to all requests to toss a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Toss the coin"), - new AIMessage("T"), - new HumanMessage("Toss the coin again"), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - expect(["H", "T"]).toContainEqual(aiMessage.content); - } catch (e) { - console.error(JSON.stringify(e, null, 1)); - throw e; - } - }); - - test("stream", async () => { - const safetySettings: GoogleAISafetySetting[] = [ - { - category: "HARM_CATEGORY_HARASSMENT", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_HATE_SPEECH", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_SEXUALLY_EXPLICIT", - threshold: "BLOCK_ONLY_HIGH", - }, - { - category: "HARM_CATEGORY_DANGEROUS_CONTENT", - threshold: "BLOCK_ONLY_HIGH", - }, - ]; - const model = new ChatGoogle({ safetySettings }); - try { - const input: BaseLanguageModelInput = new ChatPromptValue([ - new SystemMessage( - "You will reply to all requests to flip a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Flip the coin"), - new AIMessage("T"), - new HumanMessage("Flip the coin again"), - ]); - const res = await model.stream(input); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - expect(resArray).toBeDefined(); - expect(resArray.length).toBeGreaterThanOrEqual(1); - - const lastChunk = resArray[resArray.length - 1]; - expect(lastChunk).toBeDefined(); - expect(lastChunk._getType()).toEqual("ai"); - const aiChunk = lastChunk as AIMessageChunk; - console.log(aiChunk); - - console.log(JSON.stringify(resArray, null, 2)); - } catch (e) { - console.error(JSON.stringify(e, null, 1)); - throw e; - } - }); - - test("function", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ tools }); - const result = await model.invoke("Run a test on the cobalt project"); - expect(result).toHaveProperty("content"); - expect(result.content).toBe(""); - const args = result?.lc_kwargs?.additional_kwargs; - expect(args).toBeDefined(); - expect(args).toHaveProperty("tool_calls"); - expect(Array.isArray(args.tool_calls)).toBeTruthy(); - expect(args.tool_calls).toHaveLength(1); - const call = args.tool_calls[0]; - expect(call).toHaveProperty("type"); - expect(call.type).toBe("function"); - expect(call).toHaveProperty("function"); - const func = call.function; - expect(func).toBeDefined(); - expect(func).toHaveProperty("name"); - expect(func.name).toBe("test"); - expect(func).toHaveProperty("arguments"); - expect(typeof func.arguments).toBe("string"); - expect(func.arguments.replaceAll("\n", "")).toBe('{"testName":"cobalt"}'); - }); - - test("function reply", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ tools }); - const toolResult = { - testPassed: true, - }; - const messages: BaseMessageLike[] = [ - new HumanMessage("Run a test on the cobalt project."), - new AIMessage("", { - tool_calls: [ - { - id: "test", - type: "function", - function: { - name: "test", - arguments: '{"testName":"cobalt"}', - }, - }, - ], - }), - new ToolMessage(JSON.stringify(toolResult), "test"), - ]; - const res = await model.stream(messages); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - console.log(JSON.stringify(resArray, null, 2)); - }); - - test("withStructuredOutput", async () => { - const tool = { - name: "get_weather", - description: - "Get the weather of a specific location and return the temperature in Celsius.", - parameters: { - type: "object", - properties: { - location: { - type: "string", - description: "The name of city to get the weather for.", - }, - }, - required: ["location"], - }, - }; - const model = new ChatGoogle().withStructuredOutput(tool); - const result = await model.invoke("What is the weather in Paris?"); - expect(result).toHaveProperty("location"); - }); -}); - -describe("Google Webauth Chat", () => { - test("invoke", async () => { - const model = new ChatGoogle(); - try { - const res = await model.invoke("What is 1 + 1?"); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - - expect(typeof aiMessage.content).toBe("string"); - const text = aiMessage.content as string; - expect(text).toMatch(/(1 + 1 (equals|is|=) )?2.? ?/); - - /* - expect(aiMessage.content.length).toBeGreaterThan(0); - expect(aiMessage.content[0]).toBeDefined(); - const content = aiMessage.content[0] as MessageContentComplex; - expect(content).toHaveProperty("type"); - expect(content.type).toEqual("text"); - - const textContent = content as MessageContentText; - expect(textContent.text).toBeDefined(); - expect(textContent.text).toEqual("2"); - */ - } catch (e) { - console.error(e); - throw e; - } - }); - - test("generate", async () => { - const model = new ChatGoogle(); - try { - const messages: BaseMessage[] = [ - new SystemMessage( - "You will reply to all requests to flip a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Flip it"), - new AIMessage("T"), - new HumanMessage("Flip the coin again"), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeDefined(); - expect(res._getType()).toEqual("ai"); - - const aiMessage = res as AIMessageChunk; - expect(aiMessage.content).toBeDefined(); - - expect(typeof aiMessage.content).toBe("string"); - const text = aiMessage.content as string; - expect(["H", "T"]).toContainEqual(text); - - /* - expect(aiMessage.content.length).toBeGreaterThan(0); - expect(aiMessage.content[0]).toBeDefined(); - - const content = aiMessage.content[0] as MessageContentComplex; - expect(content).toHaveProperty("type"); - expect(content.type).toEqual("text"); - - const textContent = content as MessageContentText; - expect(textContent.text).toBeDefined(); - expect(["H", "T"]).toContainEqual(textContent.text); - */ - } catch (e) { - console.error(e); - throw e; - } - }); - - test("stream", async () => { - const model = new ChatGoogle(); - try { - const input: BaseLanguageModelInput = new ChatPromptValue([ - new SystemMessage( - "You will reply to all requests to flip a coin with either H, indicating heads, or T, indicating tails." - ), - new HumanMessage("Flip it"), - new AIMessage("T"), - new HumanMessage("Flip the coin again"), - ]); - const res = await model.stream(input); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - expect(resArray).toBeDefined(); - expect(resArray.length).toBeGreaterThanOrEqual(1); - - const lastChunk = resArray[resArray.length - 1]; - expect(lastChunk).toBeDefined(); - expect(lastChunk._getType()).toEqual("ai"); - const aiChunk = lastChunk as AIMessageChunk; - console.log(aiChunk); - - console.log(JSON.stringify(resArray, null, 2)); - } catch (e) { - console.error(e); - throw e; - } - }); - - test("function", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ - tools, - }); - const result = await model.invoke("Run a test on the cobalt project"); - expect(result).toHaveProperty("content"); - const args = result?.lc_kwargs?.additional_kwargs; - expect(args).toBeDefined(); - expect(args).toHaveProperty("tool_calls"); - expect(Array.isArray(args.tool_calls)).toBeTruthy(); - expect(args.tool_calls).toHaveLength(1); - const call = args.tool_calls[0]; - expect(call).toHaveProperty("type"); - expect(call.type).toBe("function"); - expect(call).toHaveProperty("function"); - const func = call.function; - expect(func).toBeDefined(); - expect(func).toHaveProperty("name"); - expect(func.name).toBe("test"); - expect(func).toHaveProperty("arguments"); - expect(typeof func.arguments).toBe("string"); - expect(func.arguments.replaceAll("\n", "")).toBe('{"testName":"cobalt"}'); - }); - - test("function reply", async () => { - const tools: GeminiTool[] = [ - { - functionDeclarations: [ - { - name: "test", - description: - "Run a test with a specific name and get if it passed or failed", - parameters: { - type: "object", - properties: { - testName: { - type: "string", - description: "The name of the test that should be run.", - }, - }, - required: ["testName"], - }, - }, - ], - }, - ]; - const model = new ChatGoogle().bind({ - tools, - }); - const toolResult = { - testPassed: true, - }; - const messages: BaseMessageLike[] = [ - new HumanMessage("Run a test on the cobalt project."), - new AIMessage("", { - tool_calls: [ - { - id: "test", - type: "function", - function: { - name: "test", - arguments: '{"testName":"cobalt"}', - }, - }, - ], - }), - new ToolMessage(JSON.stringify(toolResult), "test"), - ]; - const res = await model.stream(messages); - const resArray: BaseMessageChunk[] = []; - for await (const chunk of res) { - resArray.push(chunk); - } - console.log(JSON.stringify(resArray, null, 2)); - }); -}); diff --git a/libs/langchain-google-webauth/src/tests/llms.int.test.ts b/libs/langchain-google-webauth/src/tests/llms.int.test.ts deleted file mode 100644 index 47cdbd8ee9dc..000000000000 --- a/libs/langchain-google-webauth/src/tests/llms.int.test.ts +++ /dev/null @@ -1,294 +0,0 @@ -import { test } from "@jest/globals"; -import { - AIMessage, - BaseMessage, - HumanMessageChunk, - MessageContentComplex, -} from "@langchain/core/messages"; -import { ChatPromptValue } from "@langchain/core/prompt_values"; -import { GoogleLLM } from "../llms.js"; - -const imgData = { - blueSquare: - "iVBORw0KGgoAAAANSUhEUgAAAAoAAAAKCAIAAAACUFjqAAAACXBIWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH6AIbFwQSRaexCAAAAB1pVFh0Q29tbWVudAAAAAAAQ3JlYXRlZCB3aXRoIEdJTVBkLmUHAAAAJklEQVQY02P8//8/A27AxIAXsEAor31f0CS2OfEQ1j2Q0owU+RsAGNUJD2/04PgAAAAASUVORK5CYII=", -}; - -describe.skip("Google APIKey LLM", () => { - test("platform", async () => { - const model = new GoogleLLM(); - expect(model.platform).toEqual("gai"); - }); - - /* - * This test currently fails in AI Studio due to zealous safety systems - */ - test.skip("call", async () => { - const model = new GoogleLLM(); - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - expect(res.length).toBeGreaterThan(0); - console.log("call result:", res); - } - }); - - test("call", async () => { - const model = new GoogleLLM(); - try { - const res = await model.invoke("If the time is 1:00, what time is it?"); - expect(res.length).toBeGreaterThan(0); - expect(res.substring(0, 4)).toEqual("1:00"); - } catch (xx) { - console.error(xx); - throw xx; - } - }); - - test("stream", async () => { - const model = new GoogleLLM(); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); - -describe.skip("Google WebAuth LLM", () => { - test("platform", async () => { - const model = new GoogleLLM(); - expect(model.platform).toEqual("gcp"); - }); - - test("call", async () => { - const model = new GoogleLLM(); - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - expect(res.length).toBeGreaterThan(0); - console.log("call result:", res); - } - }); - - test("stream", async () => { - const model = new GoogleLLM(); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new GoogleLLM({ - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); - -describe.skip("Google WebAuth gai LLM", () => { - test("platform", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - expect(model.platform).toEqual("gai"); - }); - - /* - * This test currently fails in AI Studio due to zealous safety systems - */ - test.skip("call", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - const res = await model.invoke("1 + 1 = "); - if (res.length === 1) { - expect(res).toBe("2"); - } else { - expect(res.length).toBeGreaterThan(0); - console.log("call result:", res); - } - }); - - test("call", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - try { - const res = await model.invoke("If the time is 1:00, what time is it?"); - expect(res.length).toBeGreaterThan(0); - expect(res.substring(0, 4)).toEqual("1:00"); - } catch (xx) { - console.error(xx); - throw xx; - } - }); - - test("stream", async () => { - const model = new GoogleLLM({ - platformType: "gai", - }); - const stream = await model.stream( - "What is the answer to live, the universe, and everything? Be verbose." - ); - const chunks = []; - for await (const chunk of stream) { - chunks.push(chunk); - } - expect(chunks.length).toBeGreaterThan(1); - }); - - test("predictMessage image", async () => { - const model = new GoogleLLM({ - platformType: "gai", - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const res = await model.predictMessages(messages); - expect(res).toBeInstanceOf(AIMessage); - expect(Array.isArray(res.content)).toEqual(true); - expect(res.content[0]).toHaveProperty("text"); - console.log("res", res); - }); - - test("invoke image", async () => { - const model = new GoogleLLM({ - platformType: "gai", - modelName: "gemini-pro-vision", - }); - const message: MessageContentComplex[] = [ - { - type: "text", - text: "What is in this image?", - }, - { - type: "image_url", - image_url: `data:image/png;base64,${imgData.blueSquare}`, - }, - ]; - - const messages: BaseMessage[] = [ - new HumanMessageChunk({ content: message }), - ]; - const input = new ChatPromptValue(messages); - const res = await model.invoke(input); - expect(res).toBeDefined(); - expect(res.length).toBeGreaterThan(0); - console.log("res", res); - }); -}); diff --git a/libs/langchain-standard-tests/src/integration_tests/chat_models.ts b/libs/langchain-standard-tests/src/integration_tests/chat_models.ts index bb249bb30364..d8416b743aea 100644 --- a/libs/langchain-standard-tests/src/integration_tests/chat_models.ts +++ b/libs/langchain-standard-tests/src/integration_tests/chat_models.ts @@ -36,6 +36,29 @@ class AdderTool extends StructuredTool { } } +interface ChatModelIntegrationTestsFields< + CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, + OutputMessageType extends BaseMessageChunk = BaseMessageChunk, + ConstructorArgs extends RecordStringAny = RecordStringAny +> extends BaseChatModelsTestsFields< + CallOptions, + OutputMessageType, + ConstructorArgs + > { + /** + * Override the default AIMessage response type + * to check for. + * @default AIMessage + */ + invokeResponseType?: typeof AIMessage | typeof AIMessageChunk; + /** + * The ID to set for function calls. + * Set this field to override the default function ID. + * @default "abc123" + */ + functionId?: string; +} + export abstract class ChatModelIntegrationTests< CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions, OutputMessageType extends BaseMessageChunk = BaseMessageChunk, @@ -43,22 +66,19 @@ export abstract class ChatModelIntegrationTests< > extends BaseChatModelsTests { functionId = "abc123"; + invokeResponseType: typeof AIMessage | typeof AIMessageChunk = AIMessage; + constructor( - fields: BaseChatModelsTestsFields< + fields: ChatModelIntegrationTestsFields< CallOptions, OutputMessageType, ConstructorArgs - > & { - /** - * The ID to set for function calls. - * Set this field to override the default function ID. - * @default "abc123" - */ - functionId?: string; - } + > ) { super(fields); this.functionId = fields.functionId ?? this.functionId; + this.invokeResponseType = + fields.invokeResponseType ?? this.invokeResponseType; } async testInvoke( @@ -67,7 +87,7 @@ export abstract class ChatModelIntegrationTests< const chatModel = new this.Cls(this.constructorArgs); const result = await chatModel.invoke("Hello", callOptions); expect(result).toBeDefined(); - expect(result).toBeInstanceOf(AIMessage); + expect(result).toBeInstanceOf(this.invokeResponseType); expect(typeof result.content).toBe("string"); expect(result.content.length).toBeGreaterThan(0); } @@ -98,7 +118,7 @@ export abstract class ChatModelIntegrationTests< expect(batchResults.length).toBe(2); for (const result of batchResults) { expect(result).toBeDefined(); - expect(result).toBeInstanceOf(AIMessage); + expect(result).toBeInstanceOf(this.invokeResponseType); expect(typeof result.content).toBe("string"); expect(result.content.length).toBeGreaterThan(0); } @@ -115,7 +135,7 @@ export abstract class ChatModelIntegrationTests< ]; const result = await chatModel.invoke(messages, callOptions); expect(result).toBeDefined(); - expect(result).toBeInstanceOf(AIMessage); + expect(result).toBeInstanceOf(this.invokeResponseType); expect(typeof result.content).toBe("string"); expect(result.content.length).toBeGreaterThan(0); } @@ -126,7 +146,7 @@ export abstract class ChatModelIntegrationTests< const chatModel = new this.Cls(this.constructorArgs); const result = await chatModel.invoke("Hello", callOptions); expect(result).toBeDefined(); - expect(result).toBeInstanceOf(AIMessage); + expect(result).toBeInstanceOf(this.invokeResponseType); if (!("usage_metadata" in result)) { throw new Error("result is not an instance of AIMessage"); } @@ -211,7 +231,7 @@ export abstract class ChatModelIntegrationTests< messagesStringContent, callOptions ); - expect(resultStringContent).toBeInstanceOf(AIMessage); + expect(resultStringContent).toBeInstanceOf(this.invokeResponseType); } /** @@ -269,7 +289,7 @@ export abstract class ChatModelIntegrationTests< messagesListContent, callOptions ); - expect(resultListContent).toBeInstanceOf(AIMessage); + expect(resultListContent).toBeInstanceOf(this.invokeResponseType); } /** @@ -317,7 +337,7 @@ export abstract class ChatModelIntegrationTests< messagesStringContent, callOptions ); - expect(resultStringContent).toBeInstanceOf(AIMessage); + expect(resultStringContent).toBeInstanceOf(this.invokeResponseType); } async testWithStructuredOutput() { @@ -358,7 +378,7 @@ export abstract class ChatModelIntegrationTests< }); const resultStringContent = await modelWithTools.invoke("What is 1 + 2"); - expect(resultStringContent.raw).toBeInstanceOf(AIMessage); + expect(resultStringContent.raw).toBeInstanceOf(this.invokeResponseType); expect(resultStringContent.parsed.a).toBeDefined(); expect([1, 2].includes(resultStringContent.parsed.a)).toBeTruthy(); expect(resultStringContent.parsed.b).toBeDefined(); diff --git a/yarn.lock b/yarn.lock index 8cffe69926ff..cd629e4535e1 100644 --- a/yarn.lock +++ b/yarn.lock @@ -9680,6 +9680,17 @@ __metadata: languageName: unknown linkType: soft +"@langchain/google-common@npm:latest": + version: 0.0.18 + resolution: "@langchain/google-common@npm:0.0.18" + dependencies: + "@langchain/core": ">0.1.56 <0.3.0" + uuid: ^9.0.0 + zod-to-json-schema: ^3.22.4 + checksum: afd120d664f7c3da35cef4323a9c5c50063021e1b64b8e259b6e9ff7fdfb4b270d7393b3b4e78a280f35578005ff8198c6f1283d8e1ccd651db10a15ef035534 + languageName: node + linkType: hard + "@langchain/google-common@workspace:*, @langchain/google-common@workspace:libs/langchain-google-common, @langchain/google-common@~0.0.18": version: 0.0.0-use.local resolution: "@langchain/google-common@workspace:libs/langchain-google-common" @@ -9721,7 +9732,6 @@ __metadata: "@langchain/core": ">0.1.56 <0.3.0" "@langchain/google-common": ~0.0.18 "@langchain/scripts": ~0.0.14 - "@langchain/standard-tests": 0.0.0 "@swc/core": ^1.3.90 "@swc/jest": ^0.2.29 "@tsconfig/recommended": ^1.0.3 @@ -9820,8 +9830,10 @@ __metadata: dependencies: "@jest/globals": ^29.5.0 "@langchain/core": ">0.1.56 <0.3.0" + "@langchain/google-common": latest "@langchain/google-gauth": ~0.0.18 "@langchain/scripts": ~0.0.14 + "@langchain/standard-tests": 0.0.0 "@swc/core": ^1.3.90 "@swc/jest": ^0.2.29 "@tsconfig/recommended": ^1.0.3