Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor: Add debug mode and refactor print_notifs #84

Merged
merged 11 commits into from
Apr 19, 2024
Merged
203 changes: 146 additions & 57 deletions gh-notify
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,46 @@ MIN_FZF_VERSION="0.29.0"
# export variables for use in child processes
# https://docs.github.com/en/rest/overview/api-versions
export GH_REST_API_VERSION="X-GitHub-Api-Version:2022-11-28"
# The maximum number of notifications per page (set by GitHub)
export MAX_PER_PAGE_LIMIT=50
# Enable terminal-style output even when the output is redirected.
export GH_FORCE_TTY=1

# Assign 'GH_NOTIFY_DEBUG_MODE' with 'true' to see more information
export GH_NOTIFY_DEBUG_MODE=${GH_NOTIFY_DEBUG_MODE:-false}
if $GH_NOTIFY_DEBUG_MODE; then
export gh_notify_debug_log="${BASH_SOURCE%/*}/gh_notify_debug.log"

# Tell the user where we saved the debug information
trap 'echo [DEBUG] $gh_notify_debug_log' EXIT

# Clear the file on every run
: >"$gh_notify_debug_log"

# Unset GH_FORCE_TTY to avoid unnecessary color codes in the debug file
unset GH_FORCE_TTY

# Redirect stdout and stderr to the terminal and a file
exec &> >(tee -a "$gh_notify_debug_log")

# [DISABLED] 'GH_DEBUG' sends the output to file descriptor 2, but these error messages can be
# caught by adding '2>&5' to all gh api calls, but this would also hide the actual error message
# from a failed gh api call. It would be great to have an actual environment variable like
# 'BASH_XTRACEFD' to set the desired file descriptor for the verbose output of GH_DEBUG

# 'GH_DEBUG' is useful for determining why a call to the GitHub API might have failed
# export GH_DEBUG=api
# Redirect possible errors and debug information from 'gh api' calls to a file
# exec 5> >(tee -a "$gh_notify_debug_log")

# Redirect xtrace output to a file
exec 6>>"$gh_notify_debug_log"
# Write the trace output to file descriptor 6
export BASH_XTRACEFD=6
# More verbose execution trace prompt
export PS4='+$(date +%Y-%m-%d:%H:%M:%S) ${FUNCNAME[0]:-}:L${LINENO:-}: '
set -o xtrace
fi
# 'SHLVL' variable represents the nesting level of the current shell
export NESTED_START_LVL="$SHLVL"
export FINAL_MSG='All caught up!'
Expand All @@ -34,7 +71,7 @@ export WHITE_BOLD='\033[1m'

export exclusion_string='XXX_BOGUS_STRING_THAT_SHOULD_NOT_EXIST_XXX'
export filter_string=''
export num_notifications='0'
export num_notifications=0
export only_participating_flag=false
export include_all_flag=false
export preview_window_visibility='hidden'
Expand Down Expand Up @@ -90,7 +127,7 @@ ${WHITE_BOLD}Key Bindings fzf${NC}

${WHITE_BOLD}Table Format${NC}
${GREEN}unread symbol${NC} indicates unread status
${GREEN}time ${NC} last time the notification was read
${GREEN}time ${NC} last read (unread \u25cf) otherwise update time
${GREEN}repo ${NC} related repository
${GREEN}type ${NC} notification type
${GREEN}number ${NC} associated number
Expand Down Expand Up @@ -131,16 +168,12 @@ done
# ===================== helper functions ==========================

get_notifs() {
local page_num local_page_size
page_num="${1:-1}"
local_page_size=100
if [ "$num_notifications" != "0" ]; then
local_page_size=$num_notifications
fi
local page_num="$1"

# "marching ants" because sometimes this takes a bit.
printf >&2 "."
gh api --header "$GH_REST_API_VERSION" --method GET notifications --cache=0s \
--field per_page="$local_page_size" --field page="$page_num" \
--field per_page="$MAX_PER_PAGE_LIMIT" --field page="$page_num" \
--field participating="$only_participating_flag" --field all="$include_all_flag" \
--jq \
$'def colors:
Expand All @@ -165,7 +198,7 @@ get_notifs() {
repo_full_name: .repository.full_name,
unread_symbol: colored((if .unread then "\u25cf" else "\u00a0" end); "magenta"),
# make sure each outcome has an equal number of fields separated by spaces
timefmt: colored((.last_read_at // .updated_at | fromdateiso8601) as $time_sec |
timefmt: colored(((if .unread then .last_read_at // .updated_at else .updated_at end) | fromdateiso8601) as $time_sec |
# difference is less than one hour
if ((now - $time_sec) / 3600) < 1 then
(now - $time_sec) / 60 | floor | tostring + "min ago"
Expand Down Expand Up @@ -200,54 +233,37 @@ get_notifs() {
}

print_notifs() {
local all_notifs page_num page new_notifs graphql_query_discussion result
all_notifs=''
page_num=1
graphql_query_discussion=$'query ($filter: String!) { search(query: $filter, type: DISCUSSION, first: 1) { nodes { ... on Discussion { number }}}}'
while true; do
page=$(get_notifs $page_num) || die "Failed to get notifications."
if [ "$page" == "" ]; then
break
local local_page_size page new_notifs result
local page_num=1
local total_requested="$num_notifications" # Total number of notifications requested
local fetched_count=0 # A counter for the number of fetched notifications
local all_notifs=""
while :; do
local_page_size=$((total_requested - fetched_count > MAX_PER_PAGE_LIMIT ? \
MAX_PER_PAGE_LIMIT : total_requested - fetched_count))
page=$(get_notifs "$page_num") || die "Failed to get notifications."
[[ -z $page ]] && break

page_num=$((page_num + 1))
# On each run, we can fetch up to 50 notifications. If a user requested 56, we can't specify
# 6 notifications 'per_page' for page number 2. This would incorrectly return notifications
# 6-11 from page number 1, which we already have. Therefore, if a user requests 56
# notifications, we need to call the REST API twice with the maximum 'per_page' size and
# then truncate the second page accordingly.
if ((total_requested > 0)) && ((local_page_size < MAX_PER_PAGE_LIMIT)); then
page=$(head -n "$local_page_size" <<<"$page")
else
page_num=$((page_num + 1))
local_page_size=$(sed -n '$=' <<<"$page")
fi

new_notifs=$(process_page "$page") || die "Failed to process page."
all_notifs="${all_notifs}${new_notifs}"
fetched_count=$((fetched_count + local_page_size))
# If the number of fetched results equals the number of requested results, or if the number
# of items retrieved in this round is less than the maximum per page limit, we stop.
if ((fetched_count == total_requested)) || ((local_page_size < MAX_PER_PAGE_LIMIT)); then
break
fi
new_notifs=$(
echo "$page" | while IFS=$'\t' read -r updated_short iso8601 thread_id thread_state \
comment_url repo_full_name unread_symbol timefmt repo_abbreviated type url reason \
title number; do
if grep -q "Discussion" <<<"$type"; then
# https://docs.github.com/en/search-github/searching-on-github/searching-discussions
number="#$(gh api graphql --cache=100h --raw-field filter="$title in:title updated:>=$updated_short repo:$repo_full_name" \
--raw-field query="$graphql_query_discussion" --jq '.data.search.nodes | .[].number')" ||
die "Failed GraphQL discussion query."
elif ! grep -q "^null" <<<"$url"; then
if grep -q "Commit" <<<"$type"; then
number=$(basename "$url" | head -c 7)
elif grep -q "Release" <<<"$type"; then
# directly read the output into number and prerelease variables
if IFS=$'\t' read -r number prerelease < <(gh api --cache=100h --header "$GH_REST_API_VERSION" \
--method GET "$url" --jq '[.tag_name, .prerelease] | @tsv'); then
"$prerelease" && type="Pre-release"
else
# it may happen that URLs are retrieved but are already dead and therefore skipped
continue
fi
else
# gh api calls cost time, try to avoid them as much as possible
number=${url/*\//#}
fi
fi
printf "\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%b%s%b\t%s\t%s\n" \
"$iso8601" "$thread_id" "$thread_state" "$comment_url" "$repo_full_name" \
"$unread_symbol" "$timefmt" "$repo_abbreviated" "$type" "$GREEN" "$number" \
"$NC" "$reason" "$title"
done
) || die "Something went wrong"
all_notifs="$all_notifs$new_notifs"
# this is going to be a bit funky.
# if you specify a number larger than 100
# GitHub will ignore it and give you only 100
[[ $num_notifications != "0" ]] && break
done
# clear the dots we printed
echo >&2 -ne "\r\033[K"
Expand All @@ -265,6 +281,78 @@ print_notifs() {
fi
}

# Processes a page of GitHub notifications, extracting and formatting relevant details.
process_page() {
local page="$1"
while IFS=$'\t' read -r updated_short iso8601 thread_id thread_state \
comment_url repo_full_name unread_symbol timefmt repo_abbreviated type url reason \
title; do
local number="" modified_type
if grep -q "Discussion" <<<"$type"; then
number=$(process_discussion "$title" "$updated_short" "$repo_full_name") || return 1
elif ! grep -q "^null" <<<"$url"; then
if ! output=$(process_url "$type" "$url"); then
return 1
fi
read -r number modified_type <<<"$output"
if [[ -z $number ]]; then
continue
fi
fi
printf "\n%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%b%s%b\t%s\t%s\n" \
"$iso8601" "$thread_id" "$thread_state" "$comment_url" "$repo_full_name" \
"$unread_symbol" "$timefmt" "$repo_abbreviated" "${modified_type:-$type}" \
"$GREEN" "$number" "$NC" "$reason" "$title"
done <<<"$page"
}

# Extracts and formats relevant information from a GitHub URL based on its type
# Returns: a number and optionally a new type, or raises an error for release types
process_url() {
local type="$1" url="$2"
local number prerelease
if grep -q "Commit" <<<"$type"; then
basename "$url" | head -c 7
elif grep -q "Release" <<<"$type"; then
if IFS=$'\t' read -r number prerelease < <(gh api "$url" \
--cache=100h \
--header "$GH_REST_API_VERSION" \
--method GET \
--jq '[.tag_name, .prerelease] | @tsv'); then
if "$prerelease"; then
echo "$number Pre-release"
else
echo "$number"
fi
else
# Release URLs may already be inaccessible and are therefore skipped unless in Debug
# mode. Since nothing will be sent, the notification will be skipped in the
# 'process_page' function.
if $GH_NOTIFY_DEBUG_MODE; then
die "Failed to retrieve the release information: $url"
fi
fi
else
# Minimize gh API calls as they are time-consuming
echo "${url/*\//#}"
fi
}

# Executes a GraphQL query for Discussion search using the provided information
# Returns the found number or raises an error
process_discussion() {
local title="$1" updated_short="$2" repo_full_name="$3"
local graphql_query_discussion
# https://docs.github.com/en/search-github/searching-on-github/searching-discussions
graphql_query_discussion=$'query ($filter: String!) {
search(query: $filter, type: DISCUSSION, first: 1) { nodes { ... on Discussion { number }}}}'
gh api graphql \
--cache=100h \
--raw-field query="$graphql_query_discussion" \
--raw-field filter="$title in:title updated:>=$updated_short repo:$repo_full_name" \
--jq '.data.search.nodes | "#\(.[].number)"' || die "Failed GraphQL discussion query."
}

highlight_output() {
if type -p delta >/dev/null; then
# https://dandavison.github.io/delta
Expand Down Expand Up @@ -370,6 +458,7 @@ select_notif() {
# 'SHELL="$(which bash)"' is needed to use exported functions when the default shell
# is not bash
export -f print_help_text print_notifs get_notifs
export -f process_page process_discussion process_url
export -f highlight_output open_in_browser view_notification
export -f mark_all_read mark_individual_read
# The 'die' function is not exported because 'fzf' warns you about the error in
Expand Down Expand Up @@ -467,7 +556,7 @@ update_subscription() {
if IFS=$'\t' read -r object_type node_id viewer_can_subscribe viewer_subscription < <(gh api graphql \
--raw-field url_input="$update_subscription_url" \
--raw-field query="$graphql_query_resource" \
--jq '.data.resource | map(.) | @tsv' 2>/dev/null); then
--jq '.data.resource | map(.) | @tsv'); then
if [[ -z $object_type ]]; then
die "Your input appears to be an invalid URL: '$update_subscription_url'."
elif [[ $viewer_subscription != "SUBSCRIBED" && ! $viewer_can_subscribe ]]; then
Expand Down
18 changes: 9 additions & 9 deletions readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ gh notify [Flags]

### Table Format

| Field | Description |
| ------------- | ----------------------------------- |
| unread symbol | indicates unread status |
| time | last time the notification was read |
| repo | related repository |
| type | notification type |
| number | associated number |
| reason | trigger reason |
| title | notification title |
| Field | Description |
| ------------- | ------------------------------------------------ |
| unread symbol | indicates unread status |
| time | last read (unread &#9679;) otherwise update time |
| repo | related repository |
| type | notification type |
| number | associated number |
| reason | trigger reason |
| title | notification title |

---

Expand Down
Loading