diff --git a/examples/src/models/llm/llm_streaming_stream_method.ts b/examples/src/models/llm/llm_streaming_stream_method.ts index 04310c190d23..574e87127f47 100644 --- a/examples/src/models/llm/llm_streaming_stream_method.ts +++ b/examples/src/models/llm/llm_streaming_stream_method.ts @@ -1,7 +1,5 @@ import { OpenAI } from "langchain/llms/openai"; -// To enable streaming, we pass in `streaming: true` to the LLM constructor. -// Additionally, we pass in a handler for the `handleLLMNewToken` event. const model = new OpenAI({ maxTokens: 25, });