From dec84e5d47ccddf54c85681a6d446e414fbf6ca1 Mon Sep 17 00:00:00 2001 From: MoojMidge <56883549+MoojMidge@users.noreply.github.com> Date: Tue, 12 Nov 2024 07:59:45 +1100 Subject: [PATCH] Add progress dialog to My Subscription loading --- .../kodion/context/xbmc/xbmc_context.py | 1 + .../youtube_plugin/youtube/client/youtube.py | 52 +++++++++++++++---- .../lib/youtube_plugin/youtube/helper/v3.py | 31 +++++++++-- .../youtube/helper/yt_specials.py | 36 +++++++++---- 4 files changed, 96 insertions(+), 24 deletions(-) diff --git a/resources/lib/youtube_plugin/kodion/context/xbmc/xbmc_context.py b/resources/lib/youtube_plugin/kodion/context/xbmc/xbmc_context.py index 4e6dad5d1..febf7bf08 100644 --- a/resources/lib/youtube_plugin/kodion/context/xbmc/xbmc_context.py +++ b/resources/lib/youtube_plugin/kodion/context/xbmc/xbmc_context.py @@ -142,6 +142,7 @@ class XbmcContext(AbstractContext): 'my_channel': 30507, 'my_location': 30654, 'my_subscriptions': 30510, + 'my_subscriptions.loading': 575, 'my_subscriptions.filter.add': 30587, 'my_subscriptions.filter.added': 30589, 'my_subscriptions.filter.remove': 30588, diff --git a/resources/lib/youtube_plugin/youtube/client/youtube.py b/resources/lib/youtube_plugin/youtube/client/youtube.py index c2d64151c..5408e3818 100644 --- a/resources/lib/youtube_plugin/youtube/client/youtube.py +++ b/resources/lib/youtube_plugin/youtube/client/youtube.py @@ -1516,6 +1516,7 @@ def get_my_subscriptions(self, logged_in=False, do_filter=False, refresh=False, + progress_dialog=None, **kwargs): """ modified by PureHemp, using YouTube RSS for fetching latest videos @@ -1622,7 +1623,10 @@ def _get_channels(output, _params=params): 'Accept-Language': 'en-US,en;q=0.7,de;q=0.3' } - def _get_feed_cache(output, channel_id, _cache=cache, _refresh=refresh): + def _get_feed_cache(output, + channel_id, + _cache=cache, + _refresh=refresh): cached = _cache.get_item(channel_id) if cached: feed_details = cached['value'] @@ -1643,6 +1647,7 @@ def _get_feed_cache(output, channel_id, _cache=cache, _refresh=refresh): feeds[channel_id].update(feed_details) else: feeds[channel_id] = feed_details + return True, False def _get_feed(output, channel_id, _headers=headers): @@ -1670,10 +1675,19 @@ def _get_feed(output, channel_id, _headers=headers): } def _parse_feeds(feeds, + sort_method, + sort_limits, + progress_dialog=None, utf8=self._context.get_system_version().compatible(19), filters=subscription_filters, _ns=namespaces, _cache=cache): + if progress_dialog: + total = len(feeds) + progress_dialog.reset_total(new_total=total, + current=0, + total=total) + all_items = {} new_cache = {} for channel_id, feed in feeds.items(): @@ -1713,7 +1727,7 @@ def _parse_feeds(feeds, 'video_ids': set(), } feed_items.sort(reverse=True, - key=partial(_sort_by_date_time, + key=partial(sort_method, limits=feed_limits)) feed_items = feed_items[:min(1000, feed_limits['num'])] new_cache[channel_id] = { @@ -1734,9 +1748,19 @@ def _parse_feeds(feeds, else: all_items[channel_id] = feed_items + if progress_dialog: + progress_dialog.update(current=len(all_items)) + if new_cache: _cache.set_items(new_cache) - return list(chain.from_iterable(all_items.values())) + # filter, sorting by publish date and trim + if all_items: + return sorted( + chain.from_iterable(all_items.values()), + reverse=True, + key=partial(sort_method, limits=sort_limits), + ) + return None def _threaded_fetch(kwargs, output, @@ -1849,6 +1873,13 @@ def _threaded_fetch(kwargs, del payloads[pool_id] completed = [] iterator = iter(payloads) + if progress_dialog: + total = len(threaded_output['channel_ids']) + progress_dialog.grow_total( + new_total=total, + current=len(threaded_output['feeds']), + total=total, + ) continue payload = payloads[pool_id] @@ -1889,14 +1920,13 @@ def _threaded_fetch(kwargs, counter.acquire(True) new_thread.start() - items = _parse_feeds(threaded_output['feeds']) - - # filter, sorting by publish date and trim - if items: - items.sort(reverse=True, - key=partial(_sort_by_date_time, - limits=totals)) - else: + items = _parse_feeds( + threaded_output['feeds'], + sort_method=_sort_by_date_time, + sort_limits=totals, + progress_dialog=progress_dialog, + ) + if not items: return None if totals['num'] > totals['end']: diff --git a/resources/lib/youtube_plugin/youtube/helper/v3.py b/resources/lib/youtube_plugin/youtube/helper/v3.py index b8c9ba892..1c5e9f628 100644 --- a/resources/lib/youtube_plugin/youtube/helper/v3.py +++ b/resources/lib/youtube_plugin/youtube/helper/v3.py @@ -34,7 +34,11 @@ from ...kodion.utils import strip_html_from_text -def _process_list_response(provider, context, json_data, item_filter): +def _process_list_response(provider, + context, + json_data, + item_filter=None, + progress_dialog=None): yt_items = json_data.get('items', []) if not yt_items: context.log_warning('v3 response: Items list is empty') @@ -68,6 +72,12 @@ def _process_list_response(provider, context, json_data, item_filter): fanart_type = False untitled = context.localize('untitled') + if progress_dialog: + total = len(yt_items) + progress_dialog.reset_total(new_total=total, + current=0, + total=total) + for yt_item in yt_items: kind, is_youtube, is_plugin, kind_type = _parse_kind(yt_item) if not (is_youtube or is_plugin) or not kind_type: @@ -298,6 +308,8 @@ def _process_list_response(provider, context, json_data, item_filter): do_callbacks = True items.append(item) + if progress_dialog: + progress_dialog.update(current=len(items)) # this will also update the channel_id_dict with the correct channel_id # for each video. @@ -415,6 +427,12 @@ def _fetch(resource): completed = [] iterator = iter(resources) threads['loop'].set() + + if progress_dialog: + progress_dialog.reset_total(new_total=remaining, + current=0, + total=remaining) + while threads['loop'].wait(): try: resource_id = next(iterator) @@ -433,6 +451,8 @@ def _fetch(resource): if resource['complete']: remaining -= 1 completed.append(resource_id) + if progress_dialog: + progress_dialog.update(current=len(completed)) continue defer = resource['defer'] @@ -480,7 +500,8 @@ def response_to_items(provider, sort=None, reverse=False, process_next_page=True, - item_filter=None): + item_filter=None, + progress_dialog=None): kind, is_youtube, is_plugin, kind_type = _parse_kind(json_data) if not is_youtube and not is_plugin: context.log_debug('v3 response discarded: |%s|' % kind) @@ -494,7 +515,11 @@ def response_to_items(provider, override=params.get('item_filter'), ) result = _process_list_response( - provider, context, json_data, item_filter + provider, + context, + json_data, + item_filter=item_filter, + progress_dialog=progress_dialog, ) if not result: return [] diff --git a/resources/lib/youtube_plugin/youtube/helper/yt_specials.py b/resources/lib/youtube_plugin/youtube/helper/yt_specials.py index feabbc150..9c2cad36d 100644 --- a/resources/lib/youtube_plugin/youtube/helper/yt_specials.py +++ b/resources/lib/youtube_plugin/youtube/helper/yt_specials.py @@ -298,17 +298,33 @@ def _process_saved_playlists_tv(provider, context, client): def _process_my_subscriptions(provider, context, client, filtered=False): context.set_content(CONTENT.VIDEO_CONTENT) - params = context.get_params() - json_data = client.get_my_subscriptions( - page_token=params.get('page', 1), - logged_in=provider.is_logged_in(), - do_filter=filtered, - refresh=params.get('refresh'), - ) + with context.get_ui().create_progress_dialog( + heading=context.localize('my_subscriptions.loading'), + message=context.localize('please_wait'), + background=True, + message_template=( + '{wait} {{current}}/{{total}}'.format( + wait=context.localize('please_wait'), + ) + ), + ) as progress_dialog: + params = context.get_params() + json_data = client.get_my_subscriptions( + page_token=params.get('page', 1), + logged_in=provider.is_logged_in(), + do_filter=filtered, + refresh=params.get('refresh'), + progress_dialog=progress_dialog, + ) - if not json_data: - return False - return v3.response_to_items(provider, context, json_data) + if not json_data: + return False + return v3.response_to_items( + provider, + context, + json_data, + progress_dialog=progress_dialog, + ) def process(provider, context, re_match):