-
Notifications
You must be signed in to change notification settings - Fork 2
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #107 from JohnStrunk/rollup
Add script to generate status roll-ups
- Loading branch information
Showing
5 changed files
with
317 additions
and
9 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,140 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# Create a JIRA client\n", | ||
"from os import environ\n", | ||
"from atlassian import Jira\n", | ||
"\n", | ||
"jira_api_token = environ.get(\"JIRA_TOKEN\", \"\")\n", | ||
"jira_url = environ.get(\"JIRA_URL\", \"\")\n", | ||
"client = Jira(url=jira_url, token=jira_api_token)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# Import our local modules\n", | ||
"from jiraissues import Issue, issue_cache\n", | ||
"from summarizer import summarize_issue, get_chat_model, rollup_contributors" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"epic_to_summarize = \"OCTO-2\"" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# Get the existing summaries from the Jira issues\n", | ||
"child_inputs = []\n", | ||
"epic = issue_cache.get_issue(client, epic_to_summarize)\n", | ||
"for child in epic.children:\n", | ||
" issue = issue_cache.get_issue(client, child.key)\n", | ||
" text = f\"{issue}\\n\"\n", | ||
" text += summarize_issue(issue, max_depth=1)\n", | ||
" child_inputs.append({\"issue\": issue, \"summary\": text})\n", | ||
"\n", | ||
"# Sort the issues by key\n", | ||
"child_inputs.sort(key=lambda x: x[\"issue\"].key)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# Generate the individual exec summaries\n", | ||
"import textwrap\n", | ||
"llm = get_chat_model(\"meta-llama/llama-3-70b-instruct\", max_new_tokens=2048)\n", | ||
"for item in child_inputs:\n", | ||
" data = f\"\"\"\\\n", | ||
"{item[\"issue\"]}\n", | ||
"{item[\"summary\"]}\n", | ||
"Contributors: {', '.join(c.display_name for c in item[\"issue\"].contributors)}\"\"\"\n", | ||
" prompt = f\"\"\"\\\n", | ||
"Condense the following technical status update into a short, high-level summary for an engineering leader.\n", | ||
"Focus on the high-level objective, keeping the technical detail to a minimum.\n", | ||
"Where possible, avoid mentioning specific issue IDs.\n", | ||
"\n", | ||
"{data}\n", | ||
"\n", | ||
"Please provide your converted summary with no formatting or bullet points:\n", | ||
"\"\"\"\n", | ||
" summary = llm.invoke(prompt, stop=[\"<|endoftext|>\"])\n", | ||
" item[\"exec_summary\"] = textwrap.fill(summary).strip()" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"for item in child_inputs:\n", | ||
" issue = item[\"issue\"]\n", | ||
" print(f\"**{issue.key} - {issue.summary}**\")\n", | ||
" print(item[\"exec_summary\"])\n", | ||
" contributors = sorted(rollup_contributors(item[\"issue\"]), key=lambda x: x.display_name.split()[-1])\n", | ||
" if contributors:\n", | ||
" print(f\"Contributors: {', '.join([c.display_name for c in contributors])}\")\n", | ||
" print()" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"# Generate the overall exec summary\n", | ||
"prompt = f\"\"\"\\\n", | ||
"Given the following high-level summaries of our group's work, please provide a short, one-paragraph summary of this initiative for a corporate leader:\n", | ||
"\n", | ||
"{\"\\n\".join([item[\"exec_summary\"] for item in child_inputs])}\n", | ||
"\n", | ||
"Please provide just the summary paragraph, with no header.\n", | ||
"\"\"\"\n", | ||
"paragraph = llm.invoke(prompt, stop=[\"<|endoftext|>\"])\n", | ||
"print(paragraph.strip())\n" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": ".venv", | ||
"language": "python", | ||
"name": "python3" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 3 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython3", | ||
"version": "3.12.3" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 2 | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,119 @@ | ||
#! /usr/bin/env python | ||
|
||
"""Roll-up the status of Jira issues into a single document""" | ||
|
||
import argparse | ||
import logging | ||
import os | ||
import textwrap | ||
from dataclasses import dataclass, field | ||
|
||
from atlassian import Jira # type: ignore | ||
|
||
from jiraissues import Issue, User, issue_cache | ||
from summarizer import get_chat_model, rollup_contributors, summarize_issue | ||
|
||
LINK_BASE = "https://issues.redhat.com/browse/" | ||
|
||
|
||
@dataclass | ||
class IssueSummary: | ||
"""Summary of an issue""" | ||
|
||
issue: Issue | ||
summary: str = "" | ||
exec_summary: str = "" | ||
contributors: set[User] = field(default_factory=set) | ||
|
||
|
||
def main() -> None: # pylint: disable=too-many-locals | ||
"""Main function""" | ||
# pylint: disable=duplicate-code | ||
parser = argparse.ArgumentParser(description="Generate an issue summary roll-up") | ||
parser.add_argument( | ||
"--log-level", | ||
default="WARNING", | ||
choices=["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"], | ||
help="Set the logging level", | ||
) | ||
parser.add_argument("jira_issue_key", type=str, help="JIRA issue key") | ||
|
||
args = parser.parse_args() | ||
logging.basicConfig(level=getattr(logging, str(args.log_level).upper())) | ||
issue_key: str = args.jira_issue_key | ||
|
||
client = Jira(url=os.environ["JIRA_URL"], token=os.environ["JIRA_TOKEN"]) | ||
|
||
# Get the existing summaries from the Jira issues | ||
logging.info("Collecting issue summaries for children of %s", issue_key) | ||
child_inputs: list[IssueSummary] = [] | ||
epic = issue_cache.get_issue(client, issue_key) | ||
for child in epic.children: | ||
issue = issue_cache.get_issue(client, child.key) | ||
text = f"{issue}\n" | ||
text += summarize_issue(issue, max_depth=1) | ||
child_inputs.append( | ||
IssueSummary( | ||
issue=issue, summary=text, contributors=rollup_contributors(issue) | ||
) | ||
) | ||
|
||
# Sort the issues by key | ||
child_inputs.sort(key=lambda x: x.issue.key) | ||
|
||
# Generate the individual exec summaries | ||
llm = get_chat_model("meta-llama/llama-3-70b-instruct", max_new_tokens=2048) | ||
for item in child_inputs: | ||
logging.info("Generating an executive summary for %s", item.issue.key) | ||
data = f"""\ | ||
{item.issue} | ||
{item.summary} | ||
Contributors: {', '.join(c.display_name for c in item.contributors)}""" | ||
prompt = f"""\ | ||
Condense the following technical status update into a short, high-level summary for an engineering leader. | ||
Focus on the high-level objective, keeping the technical detail to a minimum. | ||
Where possible, avoid mentioning specific issue IDs. | ||
{data} | ||
Please provide just the summary paragraph, with no header. | ||
""" | ||
summary = llm.invoke(prompt, stop=["<|endoftext|>"]).strip() | ||
item.exec_summary = textwrap.fill(summary) | ||
|
||
# Generate the overall exec summary | ||
logging.info("Generating the overall executive summary") | ||
prompt = f"""\ | ||
Given the following high-level summaries of our group's work, please provide a short, one-paragraph summary of this initiative for a corporate leader: | ||
{"\n".join([item.exec_summary for item in child_inputs])} | ||
Please provide just the summary paragraph, with no header. | ||
""" | ||
exec_paragraph = textwrap.fill(llm.invoke(prompt, stop=["<|endoftext|>"]).strip()) | ||
|
||
# Generate the overall status update | ||
print(f"# Executive Summary for [{issue_key}]({LINK_BASE}{issue_key})") | ||
print() | ||
print(exec_paragraph) | ||
print() | ||
print("## Individual issue status") | ||
print() | ||
for item in child_inputs: | ||
issue = item.issue | ||
print(f"### [{issue.key}]({LINK_BASE}{issue.key}) - {issue.summary}") | ||
print() | ||
print(item.exec_summary) | ||
print() | ||
contributors = sorted( | ||
item.contributors, key=lambda x: x.display_name.split()[-1] | ||
) | ||
if contributors: | ||
print( | ||
f"**Contributors:** {', '.join([c.display_name for c in contributors])}" | ||
) | ||
print() | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
Oops, something went wrong.