diff --git a/integrations/cohere/src/cohere_haystack/chat/chat_generator.py b/integrations/cohere/src/cohere_haystack/chat/chat_generator.py index be236f6ca..1e5aa0e42 100644 --- a/integrations/cohere/src/cohere_haystack/chat/chat_generator.py +++ b/integrations/cohere/src/cohere_haystack/chat/chat_generator.py @@ -158,7 +158,7 @@ def run(self, messages: List[ChatMessage], generation_kwargs: Optional[Dict[str, stream_chunk = self._build_chunk(chunk) self.streaming_callback(stream_chunk) chat_message = ChatMessage.from_assistant(content=response.texts) - chat_message.metadata.update( + chat_message.meta.update( { "model": self.model_name, "usage": response.token_count, @@ -180,9 +180,7 @@ def _build_chunk(self, chunk) -> StreamingChunk: :return: The StreamingChunk. """ # if chunk.event_type == "text-generation": - chat_message = StreamingChunk( - content=chunk.text, metadata={"index": chunk.index, "event_type": chunk.event_type} - ) + chat_message = StreamingChunk(content=chunk.text, meta={"index": chunk.index, "event_type": chunk.event_type}) return chat_message def _build_message(self, cohere_response): @@ -193,7 +191,7 @@ def _build_message(self, cohere_response): """ content = cohere_response.text message = ChatMessage.from_assistant(content=content) - message.metadata.update( + message.meta.update( { "model": self.model_name, "usage": cohere_response.token_count, diff --git a/integrations/cohere/tests/test_cohere_chat_generator.py b/integrations/cohere/tests/test_cohere_chat_generator.py index f9ac7b2c6..20a02863a 100644 --- a/integrations/cohere/tests/test_cohere_chat_generator.py +++ b/integrations/cohere/tests/test_cohere_chat_generator.py @@ -245,7 +245,7 @@ def mock_iter(self): # noqa: ARG001 ) @pytest.mark.integration def test_live_run(self): - chat_messages = [ChatMessage(content="What's the capital of France", role=ChatRole.USER, name="", metadata={})] + chat_messages = [ChatMessage(content="What's the capital of France", role=ChatRole.USER, name="", meta={})] component = CohereChatGenerator( api_key=os.environ.get("COHERE_API_KEY"), generation_kwargs={"temperature": 0.8} ) @@ -291,7 +291,7 @@ def __call__(self, chunk: StreamingChunk) -> None: message: ChatMessage = results["replies"][0] assert "Paris" in message.content[0] - assert message.metadata["finish_reason"] == "COMPLETE" + assert message.meta["finish_reason"] == "COMPLETE" assert callback.counter > 1 assert "Paris" in callback.responses @@ -302,7 +302,7 @@ def __call__(self, chunk: StreamingChunk) -> None: ) @pytest.mark.integration def test_live_run_with_connector(self): - chat_messages = [ChatMessage(content="What's the capital of France", role=ChatRole.USER, name="", metadata={})] + chat_messages = [ChatMessage(content="What's the capital of France", role=ChatRole.USER, name="", meta={})] component = CohereChatGenerator( api_key=os.environ.get("COHERE_API_KEY"), generation_kwargs={"temperature": 0.8} ) @@ -310,8 +310,8 @@ def test_live_run_with_connector(self): assert len(results["replies"]) == 1 message: ChatMessage = results["replies"][0] assert "Paris" in message.content - assert message.metadata["documents"] is not None - assert message.metadata["citations"] is not None + assert message.meta["documents"] is not None + assert message.meta["citations"] is not None @pytest.mark.skipif( not os.environ.get("COHERE_API_KEY", None), @@ -337,10 +337,10 @@ def __call__(self, chunk: StreamingChunk) -> None: message: ChatMessage = results["replies"][0] assert "Paris" in message.content[0] - assert message.metadata["finish_reason"] == "COMPLETE" + assert message.meta["finish_reason"] == "COMPLETE" assert callback.counter > 1 assert "Paris" in callback.responses - assert message.metadata["documents"] is not None - assert message.metadata["citations"] is not None + assert message.meta["documents"] is not None + assert message.meta["citations"] is not None