From a722a22761f3671bfc299467f28769b1eb789f74 Mon Sep 17 00:00:00 2001 From: Stefano Fiorucci Date: Tue, 15 Oct 2024 14:49:16 +0200 Subject: [PATCH] fix: make sure that streaming works with function calls - (drop python3.8) (#1137) * fix streaming w function calls - drop python 3.8 * keep metadata var * fmt --- integrations/google_ai/pyproject.toml | 3 +-- .../generators/google_ai/chat/gemini.py | 19 ++++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/integrations/google_ai/pyproject.toml b/integrations/google_ai/pyproject.toml index d06e0a53f..88fbcd61c 100644 --- a/integrations/google_ai/pyproject.toml +++ b/integrations/google_ai/pyproject.toml @@ -7,7 +7,7 @@ name = "google-ai-haystack" dynamic = ["version"] description = 'Use models like Gemini via Makersuite' readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" license = "Apache-2.0" keywords = [] authors = [{ name = "deepset GmbH", email = "info@deepset.ai" }] @@ -15,7 +15,6 @@ classifiers = [ "License :: OSI Approved :: Apache Software License", "Development Status :: 4 - Beta", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py b/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py index 56c84968b..8efa8cda7 100644 --- a/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py +++ b/integrations/google_ai/src/haystack_integrations/components/generators/google_ai/chat/gemini.py @@ -347,20 +347,21 @@ def _get_stream_response( replies: List[ChatMessage] = [] for chunk in stream: content: Union[str, Dict[str, Any]] = "" - metadata = chunk.to_dict() # we store whole chunk as metadata in streaming calls - for candidate in chunk.candidates: - for part in candidate.content.parts: - if part.text != "": - content = part.text + dict_chunk = chunk.to_dict() + metadata = dict(dict_chunk) # we copy and store the whole chunk as metadata in streaming calls + for candidate in dict_chunk["candidates"]: + for part in candidate["content"]["parts"]: + if "text" in part and part["text"] != "": + content = part["text"] replies.append(ChatMessage(content=content, role=ChatRole.ASSISTANT, meta=metadata, name=None)) - elif part.function_call is not None: - metadata["function_call"] = part.function_call - content = dict(part.function_call.args.items()) + elif "function_call" in part and len(part["function_call"]) > 0: + metadata["function_call"] = part["function_call"] + content = part["function_call"]["args"] replies.append( ChatMessage( content=content, role=ChatRole.ASSISTANT, - name=part.function_call.name, + name=part["function_call"]["name"], meta=metadata, ) )