Skip to content

Commit

Permalink
Merge pull request #1614 from Agenta-AI/impr/improve-stacktrace-in-ca…
Browse files Browse the repository at this point in the history
…se-of-error-in-eval

Add traceback for error handling in LLM Apps service
  • Loading branch information
mmabrouk authored May 6, 2024
2 parents f091bc7 + 9a5ced5 commit b696d97
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 30 deletions.
68 changes: 38 additions & 30 deletions agenta-backend/agenta_backend/services/llm_apps_service.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import json
import logging
import asyncio
import traceback
import aiohttp
from typing import Any, Dict, List


from agenta_backend.models.db_models import InvokationResult, Result, Error
from agenta_backend.utils import common

# Set logger
logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -96,37 +98,40 @@ async def invoke_app(
)

except aiohttp.ClientResponseError as e:
# Parse error details from the API response
error_message = "Error in invoking the LLM App:"
try:
error_message = e.message
except ValueError:
# Fallback if the error response is not JSON or doesn't have the expected structure
logger.error(f"Failed to parse error response: {e}")

logger.error(f"Error occurred during request: {error_message}")
return InvokationResult(
result=Result(
type="error",
error=Error(
message=f"{e.code}: {error_message}",
stacktrace=str(e),
),
)
)

error_message = f"HTTP error {e.status}: {e.message}"
stacktrace = "".join(traceback.format_exception_only(type(e), e))
logger.error(f"HTTP error occurred during request: {error_message}")
common.capture_exception_in_sentry(e)
except aiohttp.ServerTimeoutError as e:
error_message = "Request timed out"
stacktrace = "".join(traceback.format_exception_only(type(e), e))
logger.error(error_message)
common.capture_exception_in_sentry(e)
except aiohttp.ClientConnectionError as e:
error_message = f"Connection error: {str(e)}"
stacktrace = "".join(traceback.format_exception_only(type(e), e))
logger.error(error_message)
common.capture_exception_in_sentry(e)
except json.JSONDecodeError as e:
error_message = "Failed to decode JSON from response"
stacktrace = "".join(traceback.format_exception_only(type(e), e))
logger.error(error_message)
common.capture_exception_in_sentry(e)
except Exception as e:
# Catch-all for any other unexpected errors
logger.error(f"Unexpected error: {e}")
return InvokationResult(
result=Result(
type="error",
error=Error(
message="Unexpected error while invoking the LLM App",
stacktrace=str(e),
),
)
error_message = f"Unexpected error: {str(e)}"
stacktrace = "".join(traceback.format_exception_only(type(e), e))
logger.error(error_message)
common.capture_exception_in_sentry(e)

return InvokationResult(
result=Result(
type="error",
error=Error(
message=error_message,
stacktrace=stacktrace,
),
)
)


async def run_with_retry(
Expand Down Expand Up @@ -165,10 +170,13 @@ async def run_with_retry(
retries += 1
except Exception as e:
last_exception = e
logger.info(f"Error processing datapoint: {input_data}")
logger.info(f"Error processing datapoint: {input_data}. {str(e)}")
logger.info("".join(traceback.format_exception_only(type(e), e)))
common.capture_exception_in_sentry(e)

# If max retries is reached or an exception that isn't in the second block,
# update & return the last exception
logging.info("Max retries reached")
exception_message = (
"Max retries reached"
if retries == max_retry_count
Expand Down
6 changes: 6 additions & 0 deletions agenta-backend/agenta_backend/utils/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from fastapi.types import DecoratedCallable
from fastapi import APIRouter as FastAPIRouter
from sentry_sdk import capture_exception

logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
Expand Down Expand Up @@ -76,3 +77,8 @@ def isCloudDev():

def isOss():
return os.environ["FEATURE_FLAG"] == "oss"


def capture_exception_in_sentry(e: Exception):
if isCloudProd():
capture_exception(e)

0 comments on commit b696d97

Please sign in to comment.