From 5a89f4a35d9ad58d3a8fcbba53bb4ebf4bef6d23 Mon Sep 17 00:00:00 2001 From: bracesproul Date: Wed, 31 Jul 2024 11:57:16 -0700 Subject: [PATCH] tmp --- libs/langchain-google-common/src/chat_models.ts | 1 + libs/langchain-google-common/src/connection.ts | 2 +- libs/langchain-google-common/src/llms.ts | 2 ++ .../langchain-google-vertexai/src/tests/llms.int.test.ts | 9 +++++++++ 4 files changed, 13 insertions(+), 1 deletion(-) diff --git a/libs/langchain-google-common/src/chat_models.ts b/libs/langchain-google-common/src/chat_models.ts index 8d3884e94295..ecd13288c0bd 100644 --- a/libs/langchain-google-common/src/chat_models.ts +++ b/libs/langchain-google-common/src/chat_models.ts @@ -357,6 +357,7 @@ export abstract class ChatGoogleBase ): AsyncGenerator { // Make the call as a streaming request const parameters = this.invocationParams(options); + console.log("IN CHAT MODEL", this.model) const response = await this.streamedConnection.request( _messages, parameters, diff --git a/libs/langchain-google-common/src/connection.ts b/libs/langchain-google-common/src/connection.ts index cdc923922b07..75fdeb51a30b 100644 --- a/libs/langchain-google-common/src/connection.ts +++ b/libs/langchain-google-common/src/connection.ts @@ -106,7 +106,7 @@ export abstract class GoogleConnection< } else { opts.responseType = "json"; } - + console.dir(opts, { depth: null }); const callResponse = await this.caller.callWithOptions( { signal: options?.signal }, async () => this.client.request(opts) diff --git a/libs/langchain-google-common/src/llms.ts b/libs/langchain-google-common/src/llms.ts index 347098177186..50cad905fff7 100644 --- a/libs/langchain-google-common/src/llms.ts +++ b/libs/langchain-google-common/src/llms.ts @@ -187,6 +187,7 @@ export abstract class GoogleBaseLLM prompt: string, options: this["ParsedCallOptions"] ): Promise { + console.log("this.model", this.model) const parameters = copyAIModelParams(this, options); const result = await this.connection.request(prompt, parameters, options); const ret = safeResponseToString(result, this.safetyHandler); @@ -290,6 +291,7 @@ export abstract class GoogleBaseLLM input: BaseLanguageModelInput, options?: BaseLanguageModelCallOptions ): Promise { + console.log("this.model", this.model) const stream = await this._streamIterator(input, options); let generatedOutput = ""; for await (const chunk of stream) { diff --git a/libs/langchain-google-vertexai/src/tests/llms.int.test.ts b/libs/langchain-google-vertexai/src/tests/llms.int.test.ts index a09dd7c880d5..2ec506a64b73 100644 --- a/libs/langchain-google-vertexai/src/tests/llms.int.test.ts +++ b/libs/langchain-google-vertexai/src/tests/llms.int.test.ts @@ -219,3 +219,12 @@ describe("GAuth LLM gai", () => { // console.log("res", res); }); }); + +test.only("invoke", async () => { + const model = new VertexAI({ + model: "gemini-1.5-pro", + }); + + const res = await model.invoke("1 + 1 = "); + console.log("res", res); +}) \ No newline at end of file