Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add script to generate status roll-ups #107

Merged
merged 2 commits into from
Jun 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions .github/workflows/workflow.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ jobs:
pre-commit|

- name: Run pre-commit checks
run: pipx run pre-commit run -a
run: pipx run --python ${{ steps.setup-py.outputs.python-version}} pre-commit run -a

- name: Run pre-commit gc
run: pipx run pre-commit gc
run: pipx run --python ${{ steps.setup-py.outputs.python-version}} pre-commit gc

tests:
name: "Tests"
Expand All @@ -67,7 +67,7 @@ jobs:
python-version: "3.12"

- name: Install pipenv
run: pipx install pipenv
run: pipx install --python ${{ steps.setup-py.outputs.python-version}} pipenv

- name: Install dependencies
run: pipenv install --dev
Expand Down
32 changes: 29 additions & 3 deletions jiraissues.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,19 @@ def __init__(self, data: dict[str, Any]) -> None:
def __str__(self) -> str:
return f"{self.display_name} ({self.key})"

def __hash__(self) -> int:
return hash((self.key, self.name, self.display_name, self.timezone))

def __eq__(self, other: object) -> bool:
if not isinstance(other, User):
return False
return (
self.key == other.key
and self.name == other.name
and self.display_name == other.display_name
and self.timezone == other.timezone
)


class Issue: # pylint: disable=too-many-instance-attributes
"""
Expand All @@ -152,7 +165,18 @@ def __init__(self, client: Jira, issue_key: str) -> None:
CF_BLOCKED_REASON,
CF_CONTRIBUTORS,
"comment",
"assignee",
]
# Need to Handle 403 errors
# DEBUG:urllib3.connectionpool:https://server.com:443 "GET
# /rest/api/2/issue/XXXX-16688?fields=summary,...,comment HTTP/1.1" 403
# None
# DEBUG:atlassian.rest_client:HTTP: GET
# rest/api/2/issue/XXXX-16688?fields=summary,...,comment -> 403
# Forbidden
# DEBUG:atlassian.rest_client:HTTP: Response text ->
# {"errorMessages":["You do not have the permission to see the specified
# issue."],"errors":{}}
data = check_response(
with_retry(lambda: client.issue(issue_key, fields=",".join(fields)))
)
Expand Down Expand Up @@ -185,14 +209,16 @@ def __init__(self, client: Jira, issue_key: str) -> None:
self.blocked = str(blocked_dict.get("value", "False")).lower() in ["true"]
self.blocked_reason = str(data["fields"].get(CF_BLOCKED_REASON) or "")
self.contributors = {
User(user) for user in data["fields"].get(CF_CONTRIBUTORS, [])
User(user) for user in (data["fields"].get(CF_CONTRIBUTORS) or [])
}
self.assignee = (
User(data["fields"]["assignee"]) if data["fields"]["assignee"] else None
)
_logger.info("Retrieved issue: %s", self)

def __str__(self) -> str:
updated = self.updated.strftime("%Y-%m-%d %H:%M:%S")
return (
f"{self.key} ({self.issue_type}) {updated} - "
f"{self.key} ({self.issue_type}) - "
+ f"{self.summary} ({self.status}/{self.resolution})"
)

Expand Down
140 changes: 140 additions & 0 deletions rollup.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,140 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Create a JIRA client\n",
"from os import environ\n",
"from atlassian import Jira\n",
"\n",
"jira_api_token = environ.get(\"JIRA_TOKEN\", \"\")\n",
"jira_url = environ.get(\"JIRA_URL\", \"\")\n",
"client = Jira(url=jira_url, token=jira_api_token)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Import our local modules\n",
"from jiraissues import Issue, issue_cache\n",
"from summarizer import summarize_issue, get_chat_model, rollup_contributors"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"epic_to_summarize = \"OCTO-2\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Get the existing summaries from the Jira issues\n",
"child_inputs = []\n",
"epic = issue_cache.get_issue(client, epic_to_summarize)\n",
"for child in epic.children:\n",
" issue = issue_cache.get_issue(client, child.key)\n",
" text = f\"{issue}\\n\"\n",
" text += summarize_issue(issue, max_depth=1)\n",
" child_inputs.append({\"issue\": issue, \"summary\": text})\n",
"\n",
"# Sort the issues by key\n",
"child_inputs.sort(key=lambda x: x[\"issue\"].key)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate the individual exec summaries\n",
"import textwrap\n",
"llm = get_chat_model(\"meta-llama/llama-3-70b-instruct\", max_new_tokens=2048)\n",
"for item in child_inputs:\n",
" data = f\"\"\"\\\n",
"{item[\"issue\"]}\n",
"{item[\"summary\"]}\n",
"Contributors: {', '.join(c.display_name for c in item[\"issue\"].contributors)}\"\"\"\n",
" prompt = f\"\"\"\\\n",
"Condense the following technical status update into a short, high-level summary for an engineering leader.\n",
"Focus on the high-level objective, keeping the technical detail to a minimum.\n",
"Where possible, avoid mentioning specific issue IDs.\n",
"\n",
"{data}\n",
"\n",
"Please provide your converted summary with no formatting or bullet points:\n",
"\"\"\"\n",
" summary = llm.invoke(prompt, stop=[\"<|endoftext|>\"])\n",
" item[\"exec_summary\"] = textwrap.fill(summary).strip()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"for item in child_inputs:\n",
" issue = item[\"issue\"]\n",
" print(f\"**{issue.key} - {issue.summary}**\")\n",
" print(item[\"exec_summary\"])\n",
" contributors = sorted(rollup_contributors(item[\"issue\"]), key=lambda x: x.display_name.split()[-1])\n",
" if contributors:\n",
" print(f\"Contributors: {', '.join([c.display_name for c in contributors])}\")\n",
" print()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Generate the overall exec summary\n",
"prompt = f\"\"\"\\\n",
"Given the following high-level summaries of our group's work, please provide a short, one-paragraph summary of this initiative for a corporate leader:\n",
"\n",
"{\"\\n\".join([item[\"exec_summary\"] for item in child_inputs])}\n",
"\n",
"Please provide just the summary paragraph, with no header.\n",
"\"\"\"\n",
"paragraph = llm.invoke(prompt, stop=[\"<|endoftext|>\"])\n",
"print(paragraph.strip())\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.3"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
119 changes: 119 additions & 0 deletions rollup_status.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
#! /usr/bin/env python

"""Roll-up the status of Jira issues into a single document"""

import argparse
import logging
import os
import textwrap
from dataclasses import dataclass, field

from atlassian import Jira # type: ignore

from jiraissues import Issue, User, issue_cache
from summarizer import get_chat_model, rollup_contributors, summarize_issue

LINK_BASE = "https://issues.redhat.com/browse/"


@dataclass
class IssueSummary:
"""Summary of an issue"""

issue: Issue
summary: str = ""
exec_summary: str = ""
contributors: set[User] = field(default_factory=set)


def main() -> None: # pylint: disable=too-many-locals
"""Main function"""
# pylint: disable=duplicate-code
parser = argparse.ArgumentParser(description="Generate an issue summary roll-up")
parser.add_argument(
"--log-level",
default="WARNING",
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"],
help="Set the logging level",
)
parser.add_argument("jira_issue_key", type=str, help="JIRA issue key")

args = parser.parse_args()
logging.basicConfig(level=getattr(logging, str(args.log_level).upper()))
issue_key: str = args.jira_issue_key

client = Jira(url=os.environ["JIRA_URL"], token=os.environ["JIRA_TOKEN"])

# Get the existing summaries from the Jira issues
logging.info("Collecting issue summaries for children of %s", issue_key)
child_inputs: list[IssueSummary] = []
epic = issue_cache.get_issue(client, issue_key)
for child in epic.children:
issue = issue_cache.get_issue(client, child.key)
text = f"{issue}\n"
text += summarize_issue(issue, max_depth=1)
child_inputs.append(
IssueSummary(
issue=issue, summary=text, contributors=rollup_contributors(issue)
)
)

# Sort the issues by key
child_inputs.sort(key=lambda x: x.issue.key)

# Generate the individual exec summaries
llm = get_chat_model("meta-llama/llama-3-70b-instruct", max_new_tokens=2048)
for item in child_inputs:
logging.info("Generating an executive summary for %s", item.issue.key)
data = f"""\
{item.issue}
{item.summary}
Contributors: {', '.join(c.display_name for c in item.contributors)}"""
prompt = f"""\
Condense the following technical status update into a short, high-level summary for an engineering leader.
Focus on the high-level objective, keeping the technical detail to a minimum.
Where possible, avoid mentioning specific issue IDs.

{data}

Please provide just the summary paragraph, with no header.
"""
summary = llm.invoke(prompt, stop=["<|endoftext|>"]).strip()
item.exec_summary = textwrap.fill(summary)

# Generate the overall exec summary
logging.info("Generating the overall executive summary")
prompt = f"""\
Given the following high-level summaries of our group's work, please provide a short, one-paragraph summary of this initiative for a corporate leader:

{"\n".join([item.exec_summary for item in child_inputs])}

Please provide just the summary paragraph, with no header.
"""
exec_paragraph = textwrap.fill(llm.invoke(prompt, stop=["<|endoftext|>"]).strip())

# Generate the overall status update
print(f"# Executive Summary for [{issue_key}]({LINK_BASE}{issue_key})")
print()
print(exec_paragraph)
print()
print("## Individual issue status")
print()
for item in child_inputs:
issue = item.issue
print(f"### [{issue.key}]({LINK_BASE}{issue.key}) - {issue.summary}")
print()
print(item.exec_summary)
print()
contributors = sorted(
item.contributors, key=lambda x: x.display_name.split()[-1]
)
if contributors:
print(
f"**Contributors:** {', '.join([c.display_name for c in contributors])}"
)
print()


if __name__ == "__main__":
main()
Loading
Loading