Skip to content

Commit

Permalink
fmt
Browse files Browse the repository at this point in the history
  • Loading branch information
baskaryan committed Sep 18, 2024
1 parent c1c24c8 commit 5a968e7
Show file tree
Hide file tree
Showing 2 changed files with 38 additions and 0 deletions.
22 changes: 22 additions & 0 deletions libs/partners/openai/langchain_openai/chat_models/azure.py
Original file line number Diff line number Diff line change
Expand Up @@ -527,6 +527,28 @@ class Joke(BaseModel):
Used for tracing and token counting. Does NOT affect completion.
"""

disabled_params: Optional[Dict[str, Any]] = Field(default=None)
"""Parameters of the OpenAI client or chat.completions endpoint that should be
disabled for the given model.
Should be specified as ``{"param": None | ['val1', 'val2']}`` where the key is the
parameter and the value is either None, meaning that parameter should never be
used, or it's a list of disabled values for the parameter.
For example, older models may not support the 'parallel_tool_calls' parameter at
all, in which case ``disabled_params={"parallel_tool_calls: None}`` can ben passed
in.
If a parameter is disabled then it will not be used by default in any methods, e.g.
in
:meth:`~langchain_openai.chat_models.azure.AzureChatOpenAI.with_structured_output`.
However this does not prevent a user from directly passed in the parameter during
invocation.
By default, unless ``model_name="gpt-4o"`` is specified, then
'parallel_tools_calls' will be disabled.
"""

@classmethod
def get_lc_namespace(cls) -> List[str]:
"""Get the namespace of the langchain object."""
Expand Down
16 changes: 16 additions & 0 deletions libs/partners/openai/langchain_openai/chat_models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -455,6 +455,22 @@ class BaseChatOpenAI(BaseChatModel):
include_response_headers: bool = False
"""Whether to include response headers in the output message response_metadata."""
disabled_params: Optional[Dict[str, Any]] = Field(default=None)
"""Parameters of the OpenAI client or chat.completions endpoint that should be
disabled for the given model.
Should be specified as ``{"param": None | ['val1', 'val2']}`` where the key is the
parameter and the value is either None, meaning that parameter should never be
used, or it's a list of disabled values for the parameter.
For example, older models may not support the 'parallel_tool_calls' parameter at
all, in which case ``disabled_params={"parallel_tool_calls: None}`` can ben passed
in.
If a parameter is disabled then it will not be used by default in any methods, e.g.
in :meth:`~langchain_openai.chat_models.base.ChatOpenAI.with_structured_output`.
However this does not prevent a user from directly passed in the parameter during
invocation.
"""

model_config = ConfigDict(populate_by_name=True)

Expand Down

0 comments on commit 5a968e7

Please sign in to comment.