Skip to content

Commit

Permalink
Improve bot error handling/retry logic
Browse files Browse the repository at this point in the history
Signed-off-by: John Strunk <[email protected]>
  • Loading branch information
JohnStrunk committed May 22, 2024
1 parent c938732 commit 60a25a7
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 25 deletions.
70 changes: 50 additions & 20 deletions bot.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
import logging
import os
import time
from datetime import datetime
from datetime import UTC, datetime

import requests
from atlassian import Jira # type: ignore
Expand Down Expand Up @@ -57,33 +57,63 @@ def main():
logging.basicConfig(level=getattr(logging, args.log_level))
max_depth = args.max_depth
send_updates = not args.no_update
delay = args.seconds
since = datetime.fromisoformat(args.modified_since)
delay: int = args.seconds
since = datetime.fromisoformat(args.modified_since).astimezone(UTC)

jira = Jira(url=os.environ["JIRA_URL"], token=os.environ["JIRA_TOKEN"])

most_recent_modification = since
while True:
start_time = datetime.now()
logging.info("Starting iteration at %s", start_time.isoformat())
try:
issue_keys = get_issues_to_summarize(jira, since)
for issue_key in issue_keys:
issue_start_time = datetime.now()
issue = issue_cache.get_issue(jira, issue_key)
summary = summarize_issue(
issue, max_depth=max_depth, send_updates=send_updates
issue_keys: list[str] = []
successful = False
while not successful:
try:
issue_keys = get_issues_to_summarize(jira, since)
successful = True
except requests.exceptions.HTTPError as error:
logging.error(
"HTTPError exception (%s): %s",
error.request.url,
error.response.reason,
)
elapsed = datetime.now() - issue_start_time
print(f"Summarized {issue_key} ({elapsed}s):\n{summary}\n")
since = start_time # Only update if we succeeded
except requests.exceptions.HTTPError as error:
logging.error("HTTPError exception: %s", error.response.reason)
except requests.exceptions.ReadTimeout as error:
logging.error("ReadTimeout exception: %s", error, exc_info=True)
time.sleep(5)
except requests.exceptions.ReadTimeout as error:
logging.error("ReadTimeout exception: %s", error, exc_info=True)
time.sleep(5)
for issue_key in issue_keys:
successful = False
while not successful:
try:
issue_start_time = datetime.now()
issue = issue_cache.get_issue(jira, issue_key)
summary = summarize_issue(
issue, max_depth=max_depth, send_updates=send_updates
)
elapsed = datetime.now() - issue_start_time
print(f"Summarized {issue_key} ({elapsed}s):\n{summary}\n")
if issue.updated > most_recent_modification:
most_recent_modification = issue.updated
successful = True
except requests.exceptions.HTTPError as error:
logging.error(
"HTTPError exception (%s): %s",
error.request.url,
error.response.reason,
)
time.sleep(5)
except requests.exceptions.ReadTimeout as error:
logging.error("ReadTimeout exception: %s", error, exc_info=True)
time.sleep(5)
since = most_recent_modification # Only update if we succeeded
logging.info("Cache stats: %s", issue_cache)
print(f"Iteration elapsed time: {datetime.now() - start_time}")
print(f"Sleeping for {delay} seconds...")
time.sleep(delay)
now = datetime.now()
elapsed = now - start_time
print(f"Iteration elapsed time: {elapsed}")
sleep_time = max(delay - elapsed.total_seconds(), 0)
print(f"Sleeping for {sleep_time} seconds...")
time.sleep(sleep_time)


if __name__ == "__main__":
Expand Down
12 changes: 7 additions & 5 deletions summarizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,13 +81,13 @@ def summarize_issue(
A string containing the summary
"""

_logger.info("Summarizing %s...", issue.key)
# If the current summary is up-to-date and we're not asked to regenerate it,
# return what's there
if not regenerate and is_summary_current(issue):
_logger.debug("Summary for %s is current, using that.", issue.key)
_logger.info("Summarizing (using current): %s", issue)
return _wrapper.get(issue.status_summary) or ""

_logger.info("Summarizing: %s", issue)
# if we have not reached max-depth, summarize the child issues for inclusion in this summary
child_summaries: List[Tuple[RelatedIssue, str]] = []
for child in issue.children:
Expand Down Expand Up @@ -343,8 +343,8 @@ def get_issues_to_summarize(
since_string = since.astimezone(user_zi).strftime("%Y-%m-%d %H:%M")
updated_issues = check_response(
client.jql(
f"labels = '{SUMMARY_ALLOWED_LABEL}' and updated >= '{since_string}' ORDER BY updated DESC", # pylint: disable=line-too-long
limit=50,
f"labels = '{SUMMARY_ALLOWED_LABEL}' and updated >= '{since_string}' ORDER BY updated ASC", # pylint: disable=line-too-long
limit=100,
fields="key,updated",
)
)
Expand All @@ -358,7 +358,9 @@ def get_issues_to_summarize(
filtered_keys.append(key)
keys = filtered_keys

_logger.info("Issues updated since %s: %s", since_string, ", ".join(keys))
_logger.info(
"Issues updated since %s: (%d) %s", since_string, len(keys), ", ".join(keys)
)

# Given the updated issues, we also need to propagate the summaries up the
# hierarchy. We first need to add the parent issues of all the updated
Expand Down

0 comments on commit 60a25a7

Please sign in to comment.