Skip to content

Commit

Permalink
add tests for json mode
Browse files Browse the repository at this point in the history
  • Loading branch information
ccurme committed Aug 16, 2024
1 parent 253ceca commit bdfa370
Show file tree
Hide file tree
Showing 2 changed files with 88 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Test AzureChatOpenAI wrapper."""

import json
import os
from typing import Any, Optional

Expand Down Expand Up @@ -225,3 +226,51 @@ def test_openai_invoke(llm: AzureChatOpenAI) -> None:
result = llm.invoke("I'm Pickle Rick", config=dict(tags=["foo"]))
assert isinstance(result.content, str)
assert result.response_metadata.get("model_name") is not None


def test_json_mode() -> None:
llm = AzureChatOpenAI( # type: ignore[call-arg, call-arg, call-arg]
deployment_name="gpt-4o-mini",
openai_api_version=OPENAI_API_VERSION,
azure_endpoint=OPENAI_API_BASE,
openai_api_key=OPENAI_API_KEY,
)
response = llm.invoke(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
)
assert isinstance(response.content, str)
assert json.loads(response.content) == {"a": 1}

# Test streaming
full: Optional[BaseMessageChunk] = None
for chunk in llm.stream(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, str)
assert json.loads(full.content) == {"a": 1}


async def test_json_mode_async() -> None:
llm = AzureChatOpenAI( # type: ignore[call-arg, call-arg, call-arg]
deployment_name="gpt-4o-mini",
openai_api_version=OPENAI_API_VERSION,
azure_endpoint=OPENAI_API_BASE,
openai_api_key=OPENAI_API_KEY,
)
response = await llm.ainvoke(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
)
assert isinstance(response.content, str)
assert json.loads(response.content) == {"a": 1}

# Test streaming
full: Optional[BaseMessageChunk] = None
async for chunk in llm.astream(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, str)
assert json.loads(full.content) == {"a": 1}
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Test ChatOpenAI chat model."""

import base64
import json
from typing import Any, AsyncIterator, List, Literal, Optional, cast

import httpx
Expand Down Expand Up @@ -865,3 +866,41 @@ class InvalidJoke(BaseModelProper):
chat.invoke("Tell me a joke about cats.")
with pytest.raises(openai.BadRequestError):
next(chat.stream("Tell me a joke about cats."))


def test_json_mode() -> None:
llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
response = llm.invoke(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
)
assert isinstance(response.content, str)
assert json.loads(response.content) == {"a": 1}

# Test streaming
full: Optional[BaseMessageChunk] = None
for chunk in llm.stream(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, str)
assert json.loads(full.content) == {"a": 1}


async def test_json_mode_async() -> None:
llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
response = await llm.ainvoke(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
)
assert isinstance(response.content, str)
assert json.loads(response.content) == {"a": 1}

# Test streaming
full: Optional[BaseMessageChunk] = None
async for chunk in llm.astream(
"Return this as json: {'a': 1}", response_format={"type": "json_object"}
):
full = chunk if full is None else full + chunk
assert isinstance(full, AIMessageChunk)
assert isinstance(full.content, str)
assert json.loads(full.content) == {"a": 1}

0 comments on commit bdfa370

Please sign in to comment.