diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py index 3436f165d7cc1..f5787aea65c1c 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_azure.py @@ -1,5 +1,6 @@ """Test AzureChatOpenAI wrapper.""" +import json import os from typing import Any, Optional @@ -225,3 +226,51 @@ def test_openai_invoke(llm: AzureChatOpenAI) -> None: result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"])) assert isinstance(result.content, str) assert result.response_metadata.get("model_name") is not None + + +def test_json_mode() -> None: + llm = AzureChatOpenAI( # type: ignore[call-arg, call-arg, call-arg] + deployment_name="gpt-4o-mini", + openai_api_version=OPENAI_API_VERSION, + azure_endpoint=OPENAI_API_BASE, + openai_api_key=OPENAI_API_KEY, + ) + response = llm.invoke( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ) + assert isinstance(response.content, str) + assert json.loads(response.content) == {"a": 1} + + # Test streaming + full: Optional[BaseMessageChunk] = None + for chunk in llm.stream( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert isinstance(full.content, str) + assert json.loads(full.content) == {"a": 1} + + +async def test_json_mode_async() -> None: + llm = AzureChatOpenAI( # type: ignore[call-arg, call-arg, call-arg] + deployment_name="gpt-4o-mini", + openai_api_version=OPENAI_API_VERSION, + azure_endpoint=OPENAI_API_BASE, + openai_api_key=OPENAI_API_KEY, + ) + response = await llm.ainvoke( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ) + assert isinstance(response.content, str) + assert json.loads(response.content) == {"a": 1} + + # Test streaming + full: Optional[BaseMessageChunk] = None + async for chunk in llm.astream( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert isinstance(full.content, str) + assert json.loads(full.content) == {"a": 1} diff --git a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py index 3e03755b765bd..f1eb6c39a5f5c 100644 --- a/libs/partners/openai/tests/integration_tests/chat_models/test_base.py +++ b/libs/partners/openai/tests/integration_tests/chat_models/test_base.py @@ -1,6 +1,7 @@ """Test ChatOpenAI chat model.""" import base64 +import json from typing import Any, AsyncIterator, List, Literal, Optional, cast import httpx @@ -865,3 +866,41 @@ class InvalidJoke(BaseModelProper): chat.invoke("Tell me a joke about cats.") with pytest.raises(openai.BadRequestError): next(chat.stream("Tell me a joke about cats.")) + + +def test_json_mode() -> None: + llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + response = llm.invoke( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ) + assert isinstance(response.content, str) + assert json.loads(response.content) == {"a": 1} + + # Test streaming + full: Optional[BaseMessageChunk] = None + for chunk in llm.stream( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert isinstance(full.content, str) + assert json.loads(full.content) == {"a": 1} + + +async def test_json_mode_async() -> None: + llm = ChatOpenAI(model="gpt-4o-mini", temperature=0) + response = await llm.ainvoke( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ) + assert isinstance(response.content, str) + assert json.loads(response.content) == {"a": 1} + + # Test streaming + full: Optional[BaseMessageChunk] = None + async for chunk in llm.astream( + "Return this as json: {'a': 1}", response_format={"type": "json_object"} + ): + full = chunk if full is None else full + chunk + assert isinstance(full, AIMessageChunk) + assert isinstance(full.content, str) + assert json.loads(full.content) == {"a": 1}