From 19bd17e6ac9f4d07e04245ff832e34e0a15ea570 Mon Sep 17 00:00:00 2001 From: Abram Date: Tue, 9 Jan 2024 10:20:16 +0100 Subject: [PATCH] Update - read llm output from json response and added backward compatibility --- agenta-backend/agenta_backend/services/llm_apps_service.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/agenta-backend/agenta_backend/services/llm_apps_service.py b/agenta-backend/agenta_backend/services/llm_apps_service.py index ecd660874d..de8a7203ef 100644 --- a/agenta-backend/agenta_backend/services/llm_apps_service.py +++ b/agenta-backend/agenta_backend/services/llm_apps_service.py @@ -38,7 +38,12 @@ async def get_llm_app_output(uri: str, datapoint: Any, parameters: dict) -> AppO url, json=payload, timeout=httpx.Timeout(timeout=5, read=None, write=5) ) response.raise_for_status() - return AppOutput(output=response.json(), status="success") + response_data = response.json() + if isinstance(response_data, dict): + llm_output = response_data["message"] + else: + llm_output = response_data + return AppOutput(output=llm_output, status="success") async def run_with_retry(