Skip to content

Commit

Permalink
Merge branch 'main' into add-recursive-chunking
Browse files Browse the repository at this point in the history
  • Loading branch information
davidsbatista committed Dec 17, 2024
2 parents 3a9d290 + ea36026 commit f35d4e5
Show file tree
Hide file tree
Showing 12 changed files with 560 additions and 157 deletions.
6 changes: 3 additions & 3 deletions haystack/components/builders/chat_prompt_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
from jinja2.sandbox import SandboxedEnvironment

from haystack import component, default_from_dict, default_to_dict, logging
from haystack.dataclasses.chat_message import ChatMessage, ChatRole
from haystack.dataclasses.chat_message import ChatMessage, ChatRole, TextContent

logger = logging.getLogger(__name__)

Expand Down Expand Up @@ -197,10 +197,10 @@ def run(
if message.text is None:
raise ValueError(f"The provided ChatMessage has no text. ChatMessage: {message}")
compiled_template = self._env.from_string(message.text)
rendered_content = compiled_template.render(template_variables_combined)
rendered_text = compiled_template.render(template_variables_combined)
# deep copy the message to avoid modifying the original message
rendered_message: ChatMessage = deepcopy(message)
rendered_message.content = rendered_content
rendered_message._content = [TextContent(text=rendered_text)]
processed_messages.append(rendered_message)
else:
processed_messages.append(message)
Expand Down
7 changes: 1 addition & 6 deletions haystack/components/generators/chat/hugging_face_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,13 +25,8 @@ def _convert_message_to_hfapi_format(message: ChatMessage) -> Dict[str, str]:
:returns: A dictionary with the following keys:
- `role`
- `content`
- `name` (optional)
"""
formatted_msg = {"role": message.role.value, "content": message.content}
if message.name:
formatted_msg["name"] = message.name

return formatted_msg
return {"role": message.role.value, "content": message.text or ""}


@component
Expand Down
9 changes: 2 additions & 7 deletions haystack/components/generators/openai_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,16 +13,11 @@ def _convert_message_to_openai_format(message: ChatMessage) -> Dict[str, str]:
See the [API reference](https://platform.openai.com/docs/api-reference/chat/create) for details.
:returns: A dictionary with the following key:
:returns: A dictionary with the following keys:
- `role`
- `content`
- `name` (optional)
"""
if message.text is None:
raise ValueError(f"The provided ChatMessage has no text. ChatMessage: {message}")

openai_msg = {"role": message.role.value, "content": message.text}
if message.name:
openai_msg["name"] = message.name

return openai_msg
return {"role": message.role.value, "content": message.text}
5 changes: 4 additions & 1 deletion haystack/dataclasses/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from haystack.dataclasses.answer import Answer, ExtractedAnswer, GeneratedAnswer
from haystack.dataclasses.byte_stream import ByteStream
from haystack.dataclasses.chat_message import ChatMessage, ChatRole
from haystack.dataclasses.chat_message import ChatMessage, ChatRole, TextContent, ToolCall, ToolCallResult
from haystack.dataclasses.document import Document
from haystack.dataclasses.sparse_embedding import SparseEmbedding
from haystack.dataclasses.streaming_chunk import StreamingChunk
Expand All @@ -17,6 +17,9 @@
"ByteStream",
"ChatMessage",
"ChatRole",
"ToolCall",
"ToolCallResult",
"TextContent",
"StreamingChunk",
"SparseEmbedding",
]
Loading

0 comments on commit f35d4e5

Please sign in to comment.