From 83a98081aa43caeb7c5c9b9e538076e6eb764b5b Mon Sep 17 00:00:00 2001 From: anakin87 Date: Thu, 21 Dec 2023 13:08:39 +0100 Subject: [PATCH 1/3] replace metadata w meta in tests/examples --- e2e/pipelines/test_rag_pipelines.py | 4 ++-- examples/pipelines/rag_pipeline.py | 2 +- test/components/generators/chat/test_openai.py | 8 ++++---- test/components/generators/test_openai.py | 8 ++++---- 4 files changed, 11 insertions(+), 11 deletions(-) diff --git a/e2e/pipelines/test_rag_pipelines.py b/e2e/pipelines/test_rag_pipelines.py index 556e3ce02b..db2cdd7f88 100644 --- a/e2e/pipelines/test_rag_pipelines.py +++ b/e2e/pipelines/test_rag_pipelines.py @@ -35,7 +35,7 @@ def test_bm25_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.metadata", "answer_builder.metadata") + rag_pipeline.connect("llm.meta", "answer_builder.meta") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline @@ -108,7 +108,7 @@ def test_embedding_retrieval_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.metadata", "answer_builder.metadata") + rag_pipeline.connect("llm.meta", "answer_builder.meta") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline diff --git a/examples/pipelines/rag_pipeline.py b/examples/pipelines/rag_pipeline.py index 0f9cb174c1..1e9151c342 100644 --- a/examples/pipelines/rag_pipeline.py +++ b/examples/pipelines/rag_pipeline.py @@ -38,7 +38,7 @@ rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") -rag_pipeline.connect("llm.metadata", "answer_builder.metadata") +rag_pipeline.connect("llm.meta", "answer_builder.meta") rag_pipeline.connect("retriever", "answer_builder.documents") # Ask a question on the data you just added. diff --git a/test/components/generators/chat/test_openai.py b/test/components/generators/chat/test_openai.py index 9d8aa34f43..c224a1705a 100644 --- a/test/components/generators/chat/test_openai.py +++ b/test/components/generators/chat/test_openai.py @@ -275,8 +275,8 @@ def test_live_run(self): assert len(results["replies"]) == 1 message: ChatMessage = results["replies"][0] assert "Paris" in message.content - assert "gpt-3.5" in message.metadata["model"] - assert message.metadata["finish_reason"] == "stop" + assert "gpt-3.5" in message.meta["model"] + assert message.meta["finish_reason"] == "stop" @pytest.mark.skipif( not os.environ.get("OPENAI_API_KEY", None), @@ -311,8 +311,8 @@ def __call__(self, chunk: StreamingChunk) -> None: message: ChatMessage = results["replies"][0] assert "Paris" in message.content - assert "gpt-3.5" in message.metadata["model"] - assert message.metadata["finish_reason"] == "stop" + assert "gpt-3.5" in message.meta["model"] + assert message.meta["finish_reason"] == "stop" assert callback.counter > 1 assert "Paris" in callback.responses diff --git a/test/components/generators/test_openai.py b/test/components/generators/test_openai.py index a5cc2fc38b..ef8406e3f6 100644 --- a/test/components/generators/test_openai.py +++ b/test/components/generators/test_openai.py @@ -271,11 +271,11 @@ def test_live_run(self): component = GPTGenerator(api_key=os.environ.get("OPENAI_API_KEY")) results = component.run("What's the capital of France?") assert len(results["replies"]) == 1 - assert len(results["metadata"]) == 1 + assert len(results["meta"]) == 1 response: str = results["replies"][0] assert "Paris" in response - metadata = results["metadata"][0] + metadata = results["meta"][0] assert "gpt-3.5" in metadata["model"] assert metadata["finish_reason"] == "stop" @@ -314,11 +314,11 @@ def __call__(self, chunk: StreamingChunk) -> None: results = component.run("What's the capital of France?") assert len(results["replies"]) == 1 - assert len(results["metadata"]) == 1 + assert len(results["meta"]) == 1 response: str = results["replies"][0] assert "Paris" in response - metadata = results["metadata"][0] + metadata = results["meta"][0] assert "gpt-3.5" in metadata["model"] assert metadata["finish_reason"] == "stop" From 1f911920d98954b57daacfe8d8ed02fd77d136db Mon Sep 17 00:00:00 2001 From: anakin87 Date: Thu, 21 Dec 2023 13:24:09 +0100 Subject: [PATCH 2/3] do not touch already broken e2e tests --- e2e/pipelines/test_rag_pipelines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/pipelines/test_rag_pipelines.py b/e2e/pipelines/test_rag_pipelines.py index db2cdd7f88..556e3ce02b 100644 --- a/e2e/pipelines/test_rag_pipelines.py +++ b/e2e/pipelines/test_rag_pipelines.py @@ -35,7 +35,7 @@ def test_bm25_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.meta", "answer_builder.meta") + rag_pipeline.connect("llm.metadata", "answer_builder.metadata") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline @@ -108,7 +108,7 @@ def test_embedding_retrieval_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.meta", "answer_builder.meta") + rag_pipeline.connect("llm.metadata", "answer_builder.metadata") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline From da5ab4f0d07919b12cd8b319ba1a115faf5100f3 Mon Sep 17 00:00:00 2001 From: anakin87 Date: Thu, 21 Dec 2023 13:41:27 +0100 Subject: [PATCH 3/3] Revert "do not touch already broken e2e tests" This reverts commit 1f911920d98954b57daacfe8d8ed02fd77d136db. --- e2e/pipelines/test_rag_pipelines.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/e2e/pipelines/test_rag_pipelines.py b/e2e/pipelines/test_rag_pipelines.py index 556e3ce02b..db2cdd7f88 100644 --- a/e2e/pipelines/test_rag_pipelines.py +++ b/e2e/pipelines/test_rag_pipelines.py @@ -35,7 +35,7 @@ def test_bm25_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.metadata", "answer_builder.metadata") + rag_pipeline.connect("llm.meta", "answer_builder.meta") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline @@ -108,7 +108,7 @@ def test_embedding_retrieval_rag_pipeline(tmp_path): rag_pipeline.connect("retriever", "prompt_builder.documents") rag_pipeline.connect("prompt_builder", "llm") rag_pipeline.connect("llm.replies", "answer_builder.replies") - rag_pipeline.connect("llm.metadata", "answer_builder.metadata") + rag_pipeline.connect("llm.meta", "answer_builder.meta") rag_pipeline.connect("retriever", "answer_builder.documents") # Draw the pipeline