Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat!: new ChatMessage #8640

Merged
merged 15 commits into from
Dec 17, 2024
6 changes: 3 additions & 3 deletions haystack/components/builders/chat_prompt_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from jinja2.sandbox import SandboxedEnvironment

from haystack import component, default_from_dict, default_to_dict, logging
from haystack.dataclasses.chat_message import ChatMessage, ChatRole
from haystack.dataclasses.chat_message import ChatMessage, ChatRole, TextContent

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -197,10 +197,10 @@ def run(
if message.text is None:
raise ValueError(f"The provided ChatMessage has no text. ChatMessage: {message}")
compiled_template = self._env.from_string(message.text)
rendered_content = compiled_template.render(template_variables_combined)
rendered_text = compiled_template.render(template_variables_combined)
# deep copy the message to avoid modifying the original message
rendered_message: ChatMessage = deepcopy(message)
rendered_message.content = rendered_content
rendered_message._content = [TextContent(text=rendered_text)]
processed_messages.append(rendered_message)
else:
processed_messages.append(message)
Expand Down
7 changes: 1 addition & 6 deletions haystack/components/generators/chat/hugging_face_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,8 @@ def _convert_message_to_hfapi_format(message: ChatMessage) -> Dict[str, str]:
:returns: A dictionary with the following keys:
- `role`
- `content`
- `name` (optional)
"""
formatted_msg = {"role": message.role.value, "content": message.content}
if message.name:
formatted_msg["name"] = message.name

return formatted_msg
return {"role": message.role.value, "content": message.text or ""}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these changes to HF API Chat Generator are only temporary: we will override them soon when porting the support for Tools



@component
Expand Down
9 changes: 2 additions & 7 deletions haystack/components/generators/openai_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,11 @@ def _convert_message_to_openai_format(message: ChatMessage) -> Dict[str, str]:

See the [API reference](https://platform.openai.com/docs/api-reference/chat/create) for details.

:returns: A dictionary with the following key:
:returns: A dictionary with the following keys:
- `role`
- `content`
- `name` (optional)
"""
if message.text is None:
raise ValueError(f"The provided ChatMessage has no text. ChatMessage: {message}")

openai_msg = {"role": message.role.value, "content": message.text}
if message.name:
openai_msg["name"] = message.name

return openai_msg
return {"role": message.role.value, "content": message.text}
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

these changes to the OpenAI Chat Generator are only temporary: we will override them soon when porting the support for Tools

5 changes: 4 additions & 1 deletion haystack/dataclasses/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from haystack.dataclasses.answer import Answer, ExtractedAnswer, GeneratedAnswer
from haystack.dataclasses.byte_stream import ByteStream
from haystack.dataclasses.chat_message import ChatMessage, ChatRole
from haystack.dataclasses.chat_message import ChatMessage, ChatRole, TextContent, ToolCall, ToolCallResult
from haystack.dataclasses.document import Document
from haystack.dataclasses.sparse_embedding import SparseEmbedding
from haystack.dataclasses.streaming_chunk import StreamingChunk
Expand All @@ -17,6 +17,9 @@
"ByteStream",
"ChatMessage",
"ChatRole",
"ToolCall",
"ToolCallResult",
"TextContent",
"StreamingChunk",
"SparseEmbedding",
]
Loading
Loading