From 2cea61e6237d44e56e32150cb55ece811ac0deb3 Mon Sep 17 00:00:00 2001 From: Chester Curme Date: Fri, 16 Aug 2024 10:28:37 -0400 Subject: [PATCH] add fix --- .../langchain_openai/chat_models/base.py | 22 ++++++++++++++----- 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/libs/partners/openai/langchain_openai/chat_models/base.py b/libs/partners/openai/langchain_openai/chat_models/base.py index d35ff75ecb65e..5028c218c2400 100644 --- a/libs/partners/openai/langchain_openai/chat_models/base.py +++ b/libs/partners/openai/langchain_openai/chat_models/base.py @@ -625,8 +625,12 @@ def _generate( "Cannot currently include response headers when response_format is " "specified." ) - payload.pop("stream") - response = self.root_client.beta.chat.completions.parse(**payload) + if self.root_client: + payload.pop("stream") + response = self.root_client.beta.chat.completions.parse(**payload) + else: + raw_response = self.client.with_raw_response.create(**payload) + response = raw_response.parse() elif self.include_response_headers: raw_response = self.client.with_raw_response.create(**payload) response = raw_response.parse() @@ -813,10 +817,16 @@ async def _agenerate( "Cannot currently include response headers when response_format is " "specified." ) - payload.pop("stream") - response = await self.root_async_client.beta.chat.completions.parse( - **payload - ) + if self.root_client: + payload.pop("stream") + response = await self.root_async_client.beta.chat.completions.parse( + **payload + ) + else: + raw_response = await self.async_client.with_raw_response.create( + **payload + ) + response = raw_response.parse() elif self.include_response_headers: raw_response = await self.async_client.with_raw_response.create(**payload) response = raw_response.parse()