Skip to content

Commit

Permalink
mistralai[patch]: Fix flaky test using callbacks (#6001)
Browse files Browse the repository at this point in the history
* mistralai[patch]: Fix flaky test using callbacks

* chore: lint files
  • Loading branch information
bracesproul authored Jul 8, 2024
1 parent 8f93bc5 commit cd54246
Showing 1 changed file with 34 additions and 19 deletions.
53 changes: 34 additions & 19 deletions libs/langchain-mistralai/src/tests/llms.int.test.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
/* eslint-disable no-process-env */

import { test, expect } from "@jest/globals";
import { CallbackManager } from "@langchain/core/callbacks/manager";
import { MistralAI } from "../llms.js";

// Save the original value of the 'LANGCHAIN_CALLBACKS_BACKGROUND' environment variable
const originalBackground = process.env.LANGCHAIN_CALLBACKS_BACKGROUND;

test("Test MistralAI", async () => {
const model = new MistralAI({
maxTokens: 5,
Expand Down Expand Up @@ -75,27 +80,37 @@ test("Test MistralAI with signal in call options", async () => {
}, 5000);

test("Test MistralAI in streaming mode", async () => {
let nrNewTokens = 0;
let streamedCompletion = "";
// Running LangChain callbacks in the background will sometimes cause the callbackManager to execute
// after the test/llm call has already finished & returned. Set that environment variable to false
// to prevent that from happening.
process.env.LANGCHAIN_CALLBACKS_BACKGROUND = "false";

const model = new MistralAI({
maxTokens: 5,
model: "codestral-latest",
streaming: true,
callbacks: CallbackManager.fromHandlers({
async handleLLMNewToken(token: string) {
nrNewTokens += 1;
streamedCompletion += token;
},
}),
});
const res = await model.invoke(
"Log 'Hello world' to the console in javascript: "
);
console.log({ res }, "Test MistralAI in streaming mode");
try {
let nrNewTokens = 0;
let streamedCompletion = "";

expect(nrNewTokens > 0).toBe(true);
expect(res).toBe(streamedCompletion);
const model = new MistralAI({
maxTokens: 5,
model: "codestral-latest",
streaming: true,
callbacks: CallbackManager.fromHandlers({
async handleLLMNewToken(token: string) {
nrNewTokens += 1;
streamedCompletion += token;
},
}),
});
const res = await model.invoke(
"Log 'Hello world' to the console in javascript: "
);
console.log({ res }, "Test MistralAI in streaming mode");

expect(nrNewTokens > 0).toBe(true);
expect(res).toBe(streamedCompletion);
} finally {
// Reset the environment variable
process.env.LANGCHAIN_CALLBACKS_BACKGROUND = originalBackground;
}
});

test("Test MistralAI stream method", async () => {
Expand Down

0 comments on commit cd54246

Please sign in to comment.