From fdadbbcc40aff91bc33617e82c6b77a93ccb6b6b Mon Sep 17 00:00:00 2001 From: "C. J. Tantay" Date: Mon, 16 Oct 2023 01:09:51 -0700 Subject: [PATCH] Update llm_streaming_stream_method.ts Remove unneeded comments and unnecessary code - `.stream()` works without the need to add `streaming: true` as part of the LLM constructor --- examples/src/models/llm/llm_streaming_stream_method.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/examples/src/models/llm/llm_streaming_stream_method.ts b/examples/src/models/llm/llm_streaming_stream_method.ts index 2dec6326cbfb..574e87127f47 100644 --- a/examples/src/models/llm/llm_streaming_stream_method.ts +++ b/examples/src/models/llm/llm_streaming_stream_method.ts @@ -1,9 +1,7 @@ import { OpenAI } from "langchain/llms/openai"; -// To enable streaming, we pass in `streaming: true` to the LLM constructor. const model = new OpenAI({ maxTokens: 25, - streaming: true, }); const stream = await model.stream("Tell me a joke.");