From bb14d9aa7ddb117714e64bc087bcf52a3733c921 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Tue, 16 Aug 2022 21:46:48 +0200 Subject: [PATCH 01/39] Update database schemes to 4.0 - rename dependency to library - drop load_order from libraries table - add python_versions to library_releases table - add migration scheme file --- app/lib/refresh_packages.py | 54 ++++----- app/models/{dependency.py => library.py} | 138 +++++++++++------------ app/models/package/find.py | 8 +- app/models/package/modify.py | 4 +- app/models/package/sources.py | 2 +- app/tasks/crawl.py | 20 ++-- setup/sql/down.sql | 3 +- setup/sql/migrations/libraries.sql | 11 ++ setup/sql/up.sql | 14 +-- 9 files changed, 132 insertions(+), 122 deletions(-) rename app/models/{dependency.py => library.py} (75%) create mode 100644 setup/sql/migrations/libraries.sql diff --git a/app/lib/refresh_packages.py b/app/lib/refresh_packages.py index cc0e656..aa2d3f0 100644 --- a/app/lib/refresh_packages.py +++ b/app/lib/refresh_packages.py @@ -4,23 +4,23 @@ import traceback from .package_control.providers import REPOSITORY_PROVIDERS, CHANNEL_PROVIDERS -from .package_control.download_manager import downloader, close_all_connections +from .package_control.download_manager import close_all_connections from .package_control.clients.readme_client import ReadmeClient from .. import config -from ..models import package, dependency +from ..models import package, library from .readme_renderer import render from .readme_images import cache -def refresh_packages(invalid_sources=None, invalid_dependency_sources=None): +def refresh_packages(invalid_package_sources=None, invalid_library_sources=None): """ Refresh the package information in the database - :param invalid_sources: + :param invalid_package_sources: A list of source URLs to ignore - :param invalid_dependency_sources: - A list of dependency source URLs to ignore + :param invalid_library_sources: + A list of library source URLs to ignore :return: A list of the names of all of the packages that were refreshed @@ -83,26 +83,26 @@ def resolve_path(path): readme_client = ReadmeClient(settings) - if invalid_sources: + if invalid_package_sources: if search and replace: mapped_invalid_sources = [] - for source in invalid_sources: + for source in invalid_package_sources: if source not in ignore: source = source.replace(replace, search) mapped_invalid_sources.append(source) - invalid_sources = mapped_invalid_sources + invalid_package_sources = mapped_invalid_sources - if invalid_dependency_sources: + if invalid_library_sources: if search and replace: - mapped_invalid_dependency_sources = [] - for source in invalid_dependency_sources: + mapped_invalid_library_sources = [] + for source in invalid_library_sources: if source not in ignore: source = source.replace(replace, search) - mapped_invalid_dependency_sources.append(source) - invalid_dependency_sources = mapped_invalid_dependency_sources + mapped_invalid_library_sources.append(source) + invalid_library_sources = mapped_invalid_library_sources - if not invalid_dependency_sources: - invalid_dependency_sources = None + if not invalid_library_sources: + invalid_library_sources = None for provider_cls in CHANNEL_PROVIDERS: if not provider_cls.match_url(channel): @@ -112,14 +112,14 @@ def resolve_path(path): break affected_packages = [] - affected_dependencies = [] + affected_libraries = [] for repository in repositories: for provider_cls in REPOSITORY_PROVIDERS: if not provider_cls.match_url(repository): continue provider = provider_cls(repository, settings) - for name, info in provider.get_packages(invalid_sources): + for name, info in provider.get_packages(invalid_package_sources): try: if search and replace: mapped_sources = [] @@ -155,7 +155,7 @@ def resolve_path(path): traceback.print_exc(file=sys.stderr) print('-' * 60, file=sys.stderr) - for name, info in provider.get_dependencies(invalid_dependency_sources): + for name, info in provider.get_libraries(invalid_library_sources): try: if search and replace: mapped_sources = [] @@ -163,27 +163,27 @@ def resolve_path(path): mapped_sources.append(source.replace(search, replace)) info['sources'] = mapped_sources - dependency.mark_found(name) - dependency.store(info) - affected_dependencies.append(name) + library.mark_found(name) + library.store(info) + affected_libraries.append(name) except (Exception) as e: - print('Exception processing dependency "%s":' % name, file=sys.stderr) + print('Exception processing library "%s":' % name, file=sys.stderr) print('-' * 60, file=sys.stderr) traceback.print_exc(file=sys.stderr) print('-' * 60, file=sys.stderr) for source, exception in provider.get_failed_sources(): package.modify.mark_missing(source, clean_url(exception), needs_review(exception)) - dependency.mark_missing(source, clean_url(exception), needs_review(exception)) + library.mark_missing(source, clean_url(exception), needs_review(exception)) for package_name, exception in provider.get_broken_packages(): package.modify.mark_missing_by_name(package_name, clean_url(exception), needs_review(exception)) - for dependency_name, exception in provider.get_broken_dependencies(): - dependency.mark_missing_by_name(dependency_name, clean_url(exception), needs_review(exception)) + for library_name, exception in provider.get_broken_libraries(): + library.mark_missing_by_name(library_name, clean_url(exception), needs_review(exception)) break close_all_connections() - return (affected_packages, affected_dependencies) + return (affected_packages, affected_libraries) diff --git a/app/models/dependency.py b/app/models/library.py similarity index 75% rename from app/models/dependency.py rename to app/models/library.py index ebe78e6..180bcfb 100644 --- a/app/models/dependency.py +++ b/app/models/library.py @@ -4,16 +4,15 @@ from ..lib.connection import connection -def all(limit_one_per_dependency=False): +def all(limit_one_per_library=False): """ - Fetches info about all dependencies for the purpose of writing JSON files + Fetches info about all libraries for the purpose of writing JSON files :return: A dict in the form: { - 'Dependency Name': { + 'Library Name': { 'name': 'Package Name', - 'load_order': '01', 'authors': ['author', 'names'], 'description': 'Package description', 'issues': 'http://example.com/issues', @@ -22,7 +21,8 @@ def all(limit_one_per_dependency=False): 'version': '1.0.0', 'url': 'https://example.com/download', 'sublime_text': '*', - 'platforms': ['*'] + 'platforms': ['*'], + 'python_versions': ['3.3', '3.8'] } ] } @@ -35,12 +35,11 @@ def all(limit_one_per_dependency=False): SELECT sources[1] AS repository, name, - load_order, authors, description, issues FROM - dependencies + libraries WHERE is_missing != TRUE AND removed != TRUE AND @@ -53,7 +52,6 @@ def all(limit_one_per_dependency=False): output[row['name']] = { 'repository': row['repository'], 'name': row['name'], - 'load_order': row['load_order'], 'authors': row['authors'], 'description': row['description'], 'issues': row['issues'], @@ -62,8 +60,9 @@ def all(limit_one_per_dependency=False): cursor.execute(""" SELECT - dr.dependency, + dr.library, dr.platforms, + dr.python_versions, dr.sublime_text, dr.version, dr.url, @@ -76,8 +75,8 @@ def all(limit_one_per_dependency=False): ELSE 0 END AS semver_variant FROM - dependency_releases AS dr INNER JOIN - dependencies AS d ON dr.dependency = d.name + library_releases AS dr INNER JOIN + libraries AS d ON dr.library = d.name WHERE d.is_missing != TRUE AND d.removed != TRUE AND @@ -103,45 +102,46 @@ def all(limit_one_per_dependency=False): END DESC """) - dependencies_found = {} + libraries_found = {} for row in cursor.fetchall(): - dependency = row['dependency'] - # Skip pre-releases for dependencies + library = row['library'] + # Skip pre-releases for libraries if row['semver_variant'] == -1: continue - key = '%s-%s-%s' % (dependency, row['sublime_text'], ','.join(row['platforms'])) - if limit_one_per_dependency: - if key in dependencies_found: + key = '%s-%s-%s' % (library, row['sublime_text'], ','.join(row['platforms'])) + if limit_one_per_library: + if key in libraries_found: continue release = { - 'platforms': row['platforms'], - 'sublime_text': row['sublime_text'], - 'version': row['version'], - 'url': row['url'] + 'platforms': row['platforms'], + 'python_versions': row['python_versions'], + 'sublime_text': row['sublime_text'], + 'version': row['version'], + 'url': row['url'] } if row['sha256']: release['sha256'] = row['sha256'] - output[dependency]['releases'].append(release) + output[library]['releases'].append(release) - if limit_one_per_dependency: - dependencies_found[key] = True + if limit_one_per_library: + libraries_found[key] = True return output def dependent_sources(source): """ - Fetches a list of sources needed to fully refresh all dependencies from the specified source + Fetches a list of sources needed to fully refresh all libraries from the specified source :param source: - The string source (URL) to find the dependencies of + The string source (URL) to find the libraries of :return: - A list of sources (URLs) for dependencies to be refreshed + A list of sources (URLs) for libraries to be refreshed """ with connection() as cursor: @@ -149,7 +149,7 @@ def dependent_sources(source): SELECT DISTINCT unnest(sources) AS source FROM - dependencies + libraries WHERE sources @> ARRAY[%s]::varchar[] """, [source]) @@ -158,13 +158,13 @@ def dependent_sources(source): def outdated_sources(minutes, limit): """ - Fetches a list of outdated dependency sources in the DB + Fetches a list of outdated library sources in the DB :param minutes: The int number of minutes to be considered "outdated" :return: - A list of sources (URLs) for dependencies that need to be refreshed + A list of sources (URLs) for libraries that need to be refreshed """ outdated_date = datetime.utcnow() - timedelta(minutes=minutes) @@ -178,7 +178,7 @@ def outdated_sources(minutes, limit): SELECT sources FROM - dependencies + libraries WHERE last_seen <= %s ORDER BY @@ -198,7 +198,7 @@ def invalid_sources(valid_sources): The list of sources that are valid :return: - A list of sources (URLs) for dependencies that should be ignored + A list of sources (URLs) for libraries that should be ignored """ with connection() as cursor: @@ -206,7 +206,7 @@ def invalid_sources(valid_sources): SELECT DISTINCT unnest(sources) AS source FROM - dependencies + libraries """) all_sources = [row['source'] for row in cursor] @@ -215,7 +215,7 @@ def invalid_sources(valid_sources): def old(): """ - Finds all dependencies that haven't been seen in at least two hours + Finds all libraries that haven't been seen in at least two hours :return: A list of dict objects containing the keys: @@ -231,7 +231,7 @@ def old(): sources, is_missing FROM - dependencies + libraries WHERE last_seen < CURRENT_TIMESTAMP - INTERVAL '2 hours' AND removed != TRUE AND @@ -241,30 +241,30 @@ def old(): return cursor.fetchall() -def mark_found(dependencies): +def mark_found(libraries): """ - Marks a dependencies as no longer missing + Marks a libraries as no longer missing - :param dependencies: - The name of the dependencies + :param libraries: + The name of the libraries """ with connection() as cursor: cursor.execute(""" UPDATE - dependencies + libraries SET is_missing = FALSE, missing_error = '', removed = FALSE WHERE name = %s - """, [dependencies]) + """, [libraries]) def mark_missing(source, error, needs_review): """ - Marks all dependencies from a source as currently missing + Marks all libraries from a source as currently missing :param source: The URL of the source that could not be contacted @@ -273,13 +273,13 @@ def mark_missing(source, error, needs_review): A unicode string of the error :param needs_review: - A bool if the dependency needs to be reviewed + A bool if the library needs to be reviewed """ with connection() as cursor: cursor.execute(""" UPDATE - dependencies + libraries SET is_missing = TRUE, missing_error = %s, @@ -289,45 +289,45 @@ def mark_missing(source, error, needs_review): """, [error, needs_review, source]) -def mark_missing_by_name(dependency, error, needs_review): +def mark_missing_by_name(library, error, needs_review): """ - Marks a dependency as missing + Marks a library as missing - :param dependency: - The name of the dependency + :param library: + The name of the library :param error: A unicode string of the error :param needs_review: - A bool if the dependency needs to be reviewed + A bool if the library needs to be reviewed """ with connection() as cursor: cursor.execute(""" UPDATE - dependencies + libraries SET is_missing = TRUE, missing_error = %s, needs_review = %s WHERE name = %s - """, [error, needs_review, dependency]) + """, [error, needs_review, library]) -def mark_removed(dependency): +def mark_removed(library): """ - Marks a dependency as removed + Marks a library as removed - :param dependency: - The name of the dependency + :param library: + The name of the library """ with connection() as cursor: cursor.execute(""" UPDATE - dependencies + libraries SET removed = TRUE, is_missing = FALSE, @@ -335,17 +335,16 @@ def mark_removed(dependency): needs_review = TRUE WHERE name = %s - """, [dependency]) + """, [library]) def store(values): """ - Stores dependency info in the database + Stores library info in the database :param values: A dict containing the following keys: `name` - `load_order` `author` `description` `issues` @@ -356,12 +355,11 @@ def store(values): name = values['name'] with connection() as cursor: - cursor.execute("SELECT name FROM dependencies WHERE name = %s", [name]) + cursor.execute("SELECT name FROM libraries WHERE name = %s", [name]) if cursor.fetchone() == None: sql = """ - INSERT INTO dependencies ( - load_order, + INSERT INTO libraries ( authors, description, issues, @@ -372,7 +370,6 @@ def store(values): %s, %s, %s, - %s, CURRENT_TIMESTAMP, %s, %s @@ -381,9 +378,8 @@ def store(values): else: sql = """ UPDATE - dependencies + libraries SET - load_order = %s, authors = %s, description = %s, issues = %s, @@ -394,12 +390,11 @@ def store(values): """ if not isinstance(values['author'], list): - authors = re.split('\s*,\s*', values['author']) + authors = re.split(r'\s*,\s*', values['author']) else: authors = values['author'] cursor.execute(sql, [ - values['load_order'], authors, values['description'], values['issues'], @@ -407,13 +402,14 @@ def store(values): name ]) - cursor.execute("DELETE FROM dependency_releases WHERE dependency = %s", [name]) + cursor.execute("DELETE FROM library_releases WHERE library = %s", [name]) for release in values['releases']: sql = """ - INSERT INTO dependency_releases ( - dependency, + INSERT INTO library_releases ( + library, platforms, + python_versions, sublime_text, version, url, @@ -424,6 +420,7 @@ def store(values): %s, %s, %s, + %s, %s ) """ @@ -445,6 +442,7 @@ def store(values): cursor.execute(sql, [ name, release['platforms'], + release['python_versions'], sublime_text, release['version'], release['url'], diff --git a/app/models/package/find.py b/app/models/package/find.py index ef24558..63ea5d0 100644 --- a/app/models/package/find.py +++ b/app/models/package/find.py @@ -45,7 +45,7 @@ def all(limit_one_per_package=False, only_package_control=False): 'version': '1.0.0', 'url': 'http://example.com/package.zip', 'date': '2015-01-01 10:15:00', - 'dependencies': [] + 'libraries': [] }, ... ] @@ -111,7 +111,7 @@ def all(limit_one_per_package=False, only_package_control=False): r.version, r.url, r.date, - r.dependencies, + r.libraries, CASE WHEN r.version ~ E'^\\\\d+\\\\.\\\\d+\\\\.\\\\d+-' then -1 @@ -199,8 +199,8 @@ def all(limit_one_per_package=False, only_package_control=False): 'date': row['date'] } - if row['dependencies']: - release['dependencies'] = row['dependencies'] + if row['libraries']: + release['libraries'] = row['libraries'] output[package]['releases'].append(release) diff --git a/app/models/package/modify.py b/app/models/package/modify.py index 6f6fc44..73508ab 100644 --- a/app/models/package/modify.py +++ b/app/models/package/modify.py @@ -568,7 +568,7 @@ def store(values): version, url, date, - dependencies + libraries ) VALUES ( %s, %s, @@ -587,7 +587,7 @@ def store(values): release['version'], release['url'], release['date'], - release.get('dependencies', []) + release.get('libraries', []) ]) diff --git a/app/models/package/sources.py b/app/models/package/sources.py index 1adc5ec..510032f 100644 --- a/app/models/package/sources.py +++ b/app/models/package/sources.py @@ -8,7 +8,7 @@ def dependent_sources(source): Fetches a list of sources needed to fully refresh all packages from the specified source :param source: - The string source (URL) to find the dependencies of + The string source (URL) to find the libraries of :return: A list of sources (URLs) for packages to be refreshed diff --git a/app/tasks/crawl.py b/app/tasks/crawl.py index 0029668..10665c6 100644 --- a/app/tasks/crawl.py +++ b/app/tasks/crawl.py @@ -3,7 +3,7 @@ import sys from ..models import package -from ..models import dependency +from ..models import library from ..lib.refresh_packages import refresh_packages @@ -31,18 +31,18 @@ if explicit_package: - valid_sources = package.sources.sources_for(explicit_package) - valid_dependency_sources = [] + valid_package_sources = package.sources.sources_for(explicit_package) + valid_library_sources = [] else: - valid_sources = package.sources.outdated_sources(60, 200) - valid_dependency_sources = dependency.outdated_sources(60, 200) + valid_package_sources = package.sources.outdated_sources(60, 200) + valid_library_sources = library.outdated_sources(60, 200) -invalid_sources = package.sources.invalid_sources(valid_sources) -invalid_dependency_sources = dependency.invalid_sources(valid_dependency_sources) +invalid_package_sources = package.sources.invalid_sources(valid_package_sources) +invalid_library_sources = library.invalid_sources(valid_library_sources) -affected_packages, affected_dependencies = refresh_packages(invalid_sources, invalid_dependency_sources) +affected_packages, affected_libraries = refresh_packages(invalid_package_sources, invalid_library_sources) print('AFFECTED PACKAGES') print(affected_packages) -print('\nAFFECTED DEPENDENCIES') -print(affected_dependencies) +print('\nAFFECTED LIBRARIES') +print(affected_libraries) diff --git a/setup/sql/down.sql b/setup/sql/down.sql index 486fa0a..e2c7690 100644 --- a/setup/sql/down.sql +++ b/setup/sql/down.sql @@ -17,7 +17,8 @@ DROP TABLE system_stats; DROP TABLE package_stats; DROP TABLE readmes; DROP TABLE releases; -DROP TABLE dependencies; +DROP TABLE library_releases; +DROP TABLE libraries; DROP TABLE packages; DROP TABLE usage; DROP TABLE unique_package_installs; diff --git a/setup/sql/migrations/libraries.sql b/setup/sql/migrations/libraries.sql new file mode 100644 index 0000000..3e89052 --- /dev/null +++ b/setup/sql/migrations/libraries.sql @@ -0,0 +1,11 @@ +ALTER TABLE releases RENAME dependencies TO libraries; + +ALTER TABLE dependencies RENAME TO libraries; +ALTER TABLE libraries DROP COLUMN load_order; + +ALTER TABLE dependency_releases RENAME TO library_releases; +ALTER TABLE library_releases RENAME dependency TO library; + +ALTER TABLE library_releases ADD COLUMN python_versions varchar[]; +UPDATE library_releases SET python_versions = '{3.3}'; +ALTER TABLE library_releases ALTER python_versions SET NOT NULL; diff --git a/setup/sql/up.sql b/setup/sql/up.sql index 4019c00..81bde0f 100644 --- a/setup/sql/up.sql +++ b/setup/sql/up.sql @@ -84,9 +84,8 @@ CREATE TABLE packages ( ); -CREATE TABLE dependencies ( +CREATE TABLE libraries ( name varchar(500) NOT NULL PRIMARY KEY, - load_order varchar(2) NOT NULL, description varchar NOT NULL DEFAULT '', authors varchar[], issues varchar NOT NULL DEFAULT '', @@ -99,14 +98,15 @@ CREATE TABLE dependencies ( ); -CREATE TABLE dependency_releases ( - dependency varchar(500) NOT NULL REFERENCES dependencies(name) ON DELETE CASCADE ON UPDATE CASCADE, +CREATE TABLE library_releases ( + library varchar(500) NOT NULL REFERENCES libraries(name) ON DELETE CASCADE ON UPDATE CASCADE, platforms varchar[] NOT NULL, + python_versions varchar[] NOT NULL, sublime_text varchar NOT NULL, version varchar NOT NULL, url varchar NOT NULL, sha256 varchar, - PRIMARY KEY(dependency, platforms, sublime_text, version) + PRIMARY KEY(library, platforms, sublime_text, version) ); @@ -118,7 +118,7 @@ CREATE TABLE releases ( version varchar NOT NULL, url varchar NOT NULL, date timestamp NOT NULL, - dependencies varchar[], + libraries varchar[], PRIMARY KEY(package, platforms, sublime_text, version) ); @@ -272,7 +272,7 @@ $$ LANGUAGE plpgsql; CREATE FUNCTION array_unique(arr anyarray) RETURNS anyarray LANGUAGE sql AS $$ SELECT array_agg(DISTINCT a) FROM ( - SELECT unnest(arr) a + SELECT unnest(arr) a ORDER BY a ) sq $$; From c021c4488a047e1ee32b7ab4e447b7d687b1fe89 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Thu, 18 Aug 2022 17:54:13 +0200 Subject: [PATCH 02/39] Update package_control to origin/four-point-oh commit aede06d414bb9a3a6f710690f2f3f698c7303beb --- app/lib/ca_certs_legacy.py | 6 +- app/lib/package_control/ca_certs.py | 35 +- .../clients/bitbucket_client.py | 135 ++-- .../clients/client_exception.py | 13 +- .../package_control/clients/github_client.py | 188 ++--- .../package_control/clients/gitlab_client.py | 307 +++----- .../clients/json_api_client.py | 11 +- .../package_control/clients/readme_client.py | 10 +- app/lib/package_control/cmd.py | 35 +- app/lib/package_control/console_write.py | 13 +- app/lib/package_control/download_manager.py | 60 +- .../downloaders/basic_auth_downloader.py | 8 +- .../downloaders/binary_not_found_error.py | 13 +- .../downloaders/caching_downloader.py | 25 +- .../downloaders/cli_downloader.py | 26 +- .../downloaders/curl_downloader.py | 64 +- .../downloaders/decoding_downloader.py | 12 +- .../downloaders/downloader_exception.py | 13 +- .../package_control/downloaders/http_error.py | 13 +- .../downloaders/limiting_downloader.py | 11 +- .../downloaders/non_clean_exit_error.py | 19 +- .../downloaders/non_http_error.py | 13 +- .../downloaders/oscrypto_downloader.py | 36 +- .../downloaders/rate_limit_exception.py | 2 +- .../downloaders/urllib_downloader.py | 82 +-- .../downloaders/wget_downloader.py | 36 +- .../downloaders/wininet_downloader.py | 99 ++- .../package_control/file_not_found_error.py | 17 - app/lib/package_control/http/__init__.py | 64 -- .../http/debuggable_http_connection.py | 18 +- .../http/debuggable_http_handler.py | 7 +- .../http/debuggable_http_response.py | 21 +- .../http/invalid_certificate_exception.py | 22 +- .../http/persistent_handler.py | 63 +- .../http/validating_https_connection.py | 129 ++-- .../http/validating_https_handler.py | 103 +-- app/lib/package_control/http_cache.py | 56 +- app/lib/package_control/open_compat.py | 39 - .../providers/base_repository_provider.py | 131 ++++ .../bitbucket_repository_provider.py | 87 +-- .../providers/channel_provider.py | 179 ++--- .../providers/github_repository_provider.py | 92 +-- .../providers/github_user_provider.py | 102 +-- .../providers/gitlab_repository_provider.py | 91 +-- .../providers/gitlab_user_provider.py | 100 +-- .../providers/provider_exception.py | 13 +- .../providers/release_selector.py | 53 +- .../providers/repository_provider.py | 695 ++++++++---------- .../providers/schema_compat.py | 37 + app/lib/package_control/versions.py | 2 +- app/tasks/generate_channel_v3_json.py | 6 +- app/tasks/update_package_control_lib.py | 1 - setup/scripts/extract_package_control.py | 1 - 53 files changed, 1347 insertions(+), 2067 deletions(-) delete mode 100644 app/lib/package_control/file_not_found_error.py delete mode 100644 app/lib/package_control/open_compat.py create mode 100644 app/lib/package_control/providers/base_repository_provider.py diff --git a/app/lib/ca_certs_legacy.py b/app/lib/ca_certs_legacy.py index a5437c8..18d70cc 100644 --- a/app/lib/ca_certs_legacy.py +++ b/app/lib/ca_certs_legacy.py @@ -4,8 +4,6 @@ from .package_control.cmd import Cli from .package_control.ca_certs import get_system_ca_bundle_path -from .package_control.open_compat import open_compat, read_compat - def find_root_ca_cert(settings, domain): @@ -50,8 +48,8 @@ def find_root_ca_cert(settings, domain): def get_ca_cert_by_subject(settings, subject): bundle_path = get_system_ca_bundle_path(settings) - with open_compat(bundle_path, 'r') as f: - contents = read_compat(f) + with open(bundle_path, 'r', encoding='utf-8') as f: + contents = f.read() temp = [] diff --git a/app/lib/package_control/ca_certs.py b/app/lib/package_control/ca_certs.py index 199a83d..afccd19 100644 --- a/app/lib/package_control/ca_certs.py +++ b/app/lib/package_control/ca_certs.py @@ -3,7 +3,6 @@ import sys from .console_write import console_write -from .open_compat import open_compat, read_compat from .sys_path import pc_cache_dir, user_config_dir from .deps.oscrypto import use_ctypes @@ -42,22 +41,22 @@ def get_ca_bundle_path(settings): regenerate = regenerate or os.path.getmtime(user_ca_bundle_path) > os.path.getmtime(merged_ca_bundle_path) if regenerate: - with open(merged_ca_bundle_path, 'wb') as merged: + with open(merged_ca_bundle_path, 'w', encoding='utf-8') as merged: if system_ca_bundle_path: - with open_compat(system_ca_bundle_path, 'r') as system: - system_certs = read_compat(system).strip() - merged.write(system_certs.encode('utf-8')) + with open(system_ca_bundle_path, 'r', encoding='utf-8') as system: + system_certs = system.read().strip() + merged.write(system_certs) if len(system_certs) > 0: - merged.write(b'\n') + merged.write('\n') if os.path.exists(user_ca_bundle_path): - with open_compat(user_ca_bundle_path, 'r') as user: - user_certs = read_compat(user).strip() - merged.write(user_certs.encode('utf-8')) + with open(user_ca_bundle_path, 'r', encoding='utf-8') as user: + user_certs = user.read().strip() + merged.write(user_certs) if len(user_certs) > 0: - merged.write(b'\n') + merged.write('\n') if settings.get('debug'): console_write( - u''' + ''' Regenerated the merged CA bundle from the system and user CA bundles ''' ) @@ -82,7 +81,7 @@ def get_user_ca_bundle_path(settings): if not os.path.exists(user_ca_bundle_path): if settings.get('debug'): console_write( - u''' + ''' Created blank user CA bundle ''' ) @@ -103,14 +102,14 @@ def print_cert_subject(cert, reason): if reason is None: console_write( - u''' + ''' Exported certificate: %s ''', cert.subject.human_friendly ) else: console_write( - u''' + ''' Skipped certificate: %s - reason %s ''', (cert.subject.human_friendly, reason) @@ -156,7 +155,7 @@ def get_system_ca_bundle_path(settings): cert_callback = None if debug: console_write( - u''' + ''' Generating new CA bundle from system keychain ''' ) @@ -164,7 +163,7 @@ def get_system_ca_bundle_path(settings): trust_list.get_path(ca_bundle_dir, hours_to_cache, cert_callback=cert_callback) if debug: console_write( - u''' + ''' Finished generating new CA bundle at %s (%d bytes) ''', (ca_path, os.stat(ca_path).st_size) @@ -172,7 +171,7 @@ def get_system_ca_bundle_path(settings): elif debug: console_write( - u''' + ''' Found previously exported CA bundle at %s (%d bytes) ''', (ca_path, os.stat(ca_path).st_size) @@ -200,7 +199,7 @@ def get_system_ca_bundle_path(settings): if debug and ca_path: console_write( - u''' + ''' Found system CA bundle at %s (%d bytes) ''', (ca_path, os.stat(ca_path).st_size) diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index 0fb7ed5..1518d0c 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -1,12 +1,9 @@ import re +from urllib.parse import quote from ..versions import version_sort, version_process from .json_api_client import JSONApiClient - -try: - from urllib import quote -except (ImportError): - from urllib.parse import quote +from ..downloaders.downloader_exception import DownloaderException # A predefined list of readme filenames to look for @@ -25,40 +22,60 @@ class BitBucketClient(JSONApiClient): - def make_tags_url(self, repo): + @staticmethod + def make_repo_url(owner_name, repo_name): + """ + Generate the tags URL for a GitHub repo if the value passed is a GitHub + repository URL + + :param owener_name: + The repository owner name + + :param repo_name: + The repository name + + :return: + The repositoy URL of given owner and repo name + """ + + return 'https://bitbucket.com/%s/%s' % (quote(owner_name), quote(repo_name)) + + @staticmethod + def make_tags_url(repo_url): """ Generate the tags URL for a BitBucket repo if the value passed is a BitBucket repository URL - :param repo: + :param repo_url: The repository URL :return: - The tags URL if repo was a BitBucket repo, otherwise False + The tags URL if repo_url was a BitBucket repo, otherwise False """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) if not match: return False return 'https://bitbucket.org/%s#tags' % match.group(1) - def make_branch_url(self, repo, branch): + @staticmethod + def make_branch_url(repo_url, branch): """ Generate the branch URL for a BitBucket repo if the value passed is a BitBucket repository URL - :param repo: + :param repo_url: The repository URL :param branch: The branch name :return: - The branch URL if repo was a BitBucket repo, otherwise False + The branch URL if repo_url was a BitBucket repo, otherwise False """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) if not match: return False @@ -77,7 +94,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -91,12 +109,13 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://bitbucket.org/([^/]+/[^#/]+)/?#tags$', url) + output = [] version = None url_pattern = 'https://bitbucket.org/%s/get/%s.zip' - output = [] + # tag based releases + tags_match = re.match('https?://bitbucket.org/([^/]+/[^#/]+)/?#tags$', url) if tags_match: user_repo = tags_match.group(1) @@ -113,34 +132,44 @@ def download_info(self, url, tag_prefix=None): if not tag_info: return False - used_versions = {} + max_releases = self.settings.get('max_releases', 0) + + used_versions = set() for info in tag_info: version = info['version'] if version in used_versions: continue + tag = info['prefix'] + version output.append({ 'url': url_pattern % (user_repo, tag), 'version': version, 'date': tags_list[tag] }) - used_versions[version] = True + used_versions.add(version) + if max_releases > 0 and len(used_versions) >= max_releases: + break + # branch based releases else: user_repo, branch = self._user_repo_branch(url) if not user_repo: - return user_repo + return None + + if branch is None: + repo_info = self.fetch_json(self._make_api_url(user_repo)) + branch = repo_info['mainbranch'].get('name', 'master') branch_url = self._make_api_url(user_repo, '/refs/branches/%s' % branch) branch_info = self.fetch_json(branch_url) timestamp = branch_info['target']['date'][0:19].replace('T', ' ') - output.append({ + output = [{ 'url': url_pattern % (user_repo, branch), 'version': re.sub(r'[\-: ]', '.', timestamp), 'date': timestamp - }) + }] return output @@ -173,25 +202,40 @@ def repo_info(self, url): return user_repo api_url = self._make_api_url(user_repo) + repo_info = self.fetch_json(api_url) - info = self.fetch_json(api_url) + if branch is None: + branch = repo_info['mainbranch'].get('name', 'master') - issues_url = u'https://bitbucket.org/%s/issues' % user_repo + issues_url = 'https://bitbucket.org/%s/issues' % user_repo - author = info['owner'].get('nickname') + author = repo_info['owner'].get('nickname') if author is None: - author = info['owner'].get('username') + author = repo_info['owner'].get('username') return { - 'name': info['name'], - 'description': info['description'] or 'No description provided', - 'homepage': info['website'] or url, + 'name': repo_info['name'], + 'description': repo_info['description'] or 'No description provided', + 'homepage': repo_info['website'] or url, 'author': author, 'donate': None, 'readme': self._readme_url(user_repo, branch), - 'issues': issues_url if info['has_issues'] else None + 'issues': issues_url if repo_info['has_issues'] else None } + def user_info(self, url): + """ + For API compatibility with other clients. + + :param url: + The URL to the repository, in one of the forms: + https://bitbucket.org/{user} + + :return: + None + """ + return None + def _main_branch_name(self, user_repo): """ Fetch the name of the default branch @@ -251,14 +295,19 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): listing_url = self._make_api_url(user_repo, '/src/%s/?pagelen=100' % branch) - while listing_url: - root_dir_info = self.fetch_json(listing_url, prefer_cached) + try: + while listing_url: + root_dir_info = self.fetch_json(listing_url, prefer_cached) - for entry in root_dir_info['values']: - if entry['path'].lower() in _readme_filenames: - return 'https://bitbucket.org/%s/raw/%s/%s' % (user_repo, branch, entry['path']) + for entry in root_dir_info['values']: + if entry['path'].lower() in _readme_filenames: + return 'https://bitbucket.org/%s/raw/%s/%s' % (user_repo, branch, entry['path']) - listing_url = root_dir_info['next'] if 'next' in root_dir_info else None + listing_url = root_dir_info['next'] if 'next' in root_dir_info else None + + except (DownloaderException) as e: + if 'HTTP error 404' not in str(e): + raise return None @@ -279,18 +328,12 @@ def _user_repo_branch(self, url): A tuple of (user/repo, branch name) or (None, None) if not matching """ - repo_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', url) branch_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/src/([^/]+)/?$', url) + if branch_match: + return branch_match.groups() + repo_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', url) if repo_match: - user_repo = repo_match.group(1) - branch = self._main_branch_name(user_repo) - - elif branch_match: - user_repo = branch_match.group(1) - branch = branch_match.group(2) - - else: - return (None, None) + return (repo_match.group(1), None) - return (user_repo, branch) + return (None, None) diff --git a/app/lib/package_control/clients/client_exception.py b/app/lib/package_control/clients/client_exception.py index da90c1c..a776f9d 100644 --- a/app/lib/package_control/clients/client_exception.py +++ b/app/lib/package_control/clients/client_exception.py @@ -1,17 +1,6 @@ -import sys - - class ClientException(Exception): """If a client could not fetch information""" - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index cf5b57a..4ec1ee2 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -1,13 +1,5 @@ import re - -try: - # Python 3 - from urllib.parse import urlencode, quote - str_cls = str -except (ImportError): - # Python 2 - from urllib import urlencode, quote - str_cls = unicode # noqa +from urllib.parse import urlencode, quote from ..versions import version_sort, version_process from .json_api_client import JSONApiClient @@ -16,40 +8,60 @@ class GitHubClient(JSONApiClient): - def make_tags_url(self, repo): + @staticmethod + def make_repo_url(owner_name, repo_name): """ Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL - :param repo: + :param owener_name: + The repository owner name + + :param repo_name: + The repository name + + :return: + The repositoy URL of given owner and repo name + """ + + return 'https://github.com/%s/%s' % (quote(owner_name), quote(repo_name)) + + @staticmethod + def make_tags_url(repo_url): + """ + Generate the tags URL for a GitHub repo if the value passed is a GitHub + repository URL + + :param repo_url: The repository URL :return: - The tags URL if repo was a GitHub repo, otherwise False + The tags URL if repo was a GitHub repo_url, otherwise False """ - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo_url) if not match: return False return 'https://github.com/%s/tags' % match.group(1) - def make_branch_url(self, repo, branch): + @staticmethod + def make_branch_url(repo_url, branch): """ Generate the branch URL for a GitHub repo if the value passed is a GitHub repository URL - :param repo: + :param repo_url: The repository URL :param branch: The branch name :return: - The branch URL if repo was a GitHub repo, otherwise False + The branch URL if repo_url was a GitHub repo, otherwise False """ - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo_url) if not match: return False @@ -68,7 +80,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -82,61 +95,64 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://github.com/([^/]+/[^/]+)/tags/?$', url) + output = [] version = None url_pattern = 'https://codeload.github.com/%s/zip/%s' - output = [] + # tag based releases + tags_match = re.match('https?://github.com/([^/]+/[^/]+)/tags/?$', url) if tags_match: user_repo = tags_match.group(1) tags_url = self._make_api_url(user_repo, '/tags?per_page=100') - tags_list = self.fetch_json(tags_url) - tags = [tag['name'] for tag in tags_list] - tag_info = version_process(tags, tag_prefix) + tags_json = self.fetch_json(tags_url) + tag_urls = {tag['name']: tag['commit']['url'] for tag in tags_json} + tag_info = version_process(tag_urls.keys(), tag_prefix) tag_info = version_sort(tag_info, reverse=True) if not tag_info: return False - used_versions = {} + max_releases = self.settings.get('max_releases', 0) + + used_versions = set() for info in tag_info: version = info['version'] if version in used_versions: continue + tag = info['prefix'] + version + tag_info = self.fetch_json(tag_urls[tag]) + timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') + output.append({ 'url': url_pattern % (user_repo, tag), - 'commit': tag, - 'version': version + 'version': version, + 'date': timestamp }) - used_versions[version] = True + used_versions.add(version) + if max_releases > 0 and len(used_versions) >= max_releases: + break + # branch based releases else: user_repo, branch = self._user_repo_branch(url) if not user_repo: - return user_repo + return None if branch is None: repo_info = self.fetch_json(self._make_api_url(user_repo)) branch = repo_info.get('default_branch', 'master') - output.append({ - 'url': url_pattern % (user_repo, branch), - 'commit': branch - }) - - for release in output: - query_string = urlencode({'sha': release['commit'], 'per_page': 1}) - commit_url = self._make_api_url(user_repo, '/commits?%s' % query_string) - commit_info = self.fetch_json(commit_url) - - timestamp = commit_info[0]['commit']['committer']['date'][0:19].replace('T', ' ') + branch_url = self._make_api_url(user_repo, '/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) - if 'version' not in release: - release['version'] = re.sub(r'[\-: ]', '.', timestamp) - release['date'] = timestamp + timestamp = branch_info['commit']['commit']['committer']['date'][0:19].replace('T', ' ') - del release['commit'] + output = [{ + 'url': url_pattern % (user_repo, branch), + 'version': re.sub(r'[\-: ]', '.', timestamp), + 'date': timestamp + }] return output @@ -169,21 +185,12 @@ def repo_info(self, url): return user_repo api_url = self._make_api_url(user_repo) + repo_info = self.fetch_json(api_url) - info = self.fetch_json(api_url) if branch is None: - branch = info.get('default_branch', 'master') - - output = self._extract_repo_info(info) - output['readme'] = None + branch = repo_info.get('default_branch', 'master') - readme_info = self._readme_info(user_repo, branch) - if not readme_info: - return output - - output['readme'] = 'https://raw.githubusercontent.com/%s/%s/%s' % ( - user_repo, branch, readme_info['path']) - return output + return self._extract_repo_info(branch, repo_info) def user_info(self, url): """ @@ -218,26 +225,18 @@ def user_info(self, url): repos_info = self.fetch_json(api_url) - output = [] - for info in repos_info: - user_repo = '%s/%s' % (user, info['name']) - branch = info.get('default_branch', 'master') - - repo_output = self._extract_repo_info(info) - repo_output['readme'] = None - - readme_info = self._readme_info(user_repo, branch) - if readme_info: - repo_output['readme'] = 'https://raw.githubusercontent.com/%s/%s/%s' % ( - user_repo, branch, readme_info['path']) + return [ + self._extract_repo_info(info.get('default_branch', 'master'), info) + for info in repos_info + ] - output.append(repo_output) - return output - - def _extract_repo_info(self, result): + def _extract_repo_info(self, branch, result): """ Extracts information about a repository from the API result + :param branch: + The branch to return data from + :param result: A dict representing the data returned from the GitHub API @@ -247,18 +246,26 @@ def _extract_repo_info(self, result): `description` `homepage` - URL of the homepage `author` + `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page """ - issues_url = u'https://github.com/%s/%s/issues' % (result['owner']['login'], result['name']) + user_name = result['owner']['login'] + repo_name = result['name'] + user_repo = '%s/%s' % (user_name, repo_name) + + issues_url = None + if result['has_issues']: + issues_url = 'https://github.com/%s/issues' % user_repo return { - 'name': result['name'], + 'name': repo_name, 'description': result['description'] or 'No description provided', 'homepage': result['homepage'] or result['html_url'], - 'author': result['owner']['login'], - 'issues': issues_url if result['has_issues'] else None, + 'author': user_name, + 'readme': self._readme_url(user_repo, branch), + 'issues': issues_url, 'donate': None } @@ -278,7 +285,7 @@ def _make_api_url(self, user_repo, suffix=''): return 'https://api.github.com/repos/%s%s' % (user_repo, suffix) - def _readme_info(self, user_repo, branch, prefer_cached=False): + def _readme_url(self, user_repo, branch, prefer_cached=False): """ Fetches the raw GitHub API information about a readme @@ -301,12 +308,17 @@ def _readme_info(self, user_repo, branch, prefer_cached=False): query_string = urlencode({'ref': branch}) readme_url = self._make_api_url(user_repo, '/readme?%s' % query_string) + try: - return self.fetch_json(readme_url, prefer_cached) + readme_file = self.fetch_json(readme_url, prefer_cached).get('path') + if readme_file: + return 'https://raw.githubusercontent.com/%s/%s/%s' % (user_repo, branch, readme_file) + except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: - return None - raise + if 'HTTP error 404' not in str(e): + raise + + return None def _user_repo_branch(self, url): """ @@ -321,14 +333,12 @@ def _user_repo_branch(self, url): A tuple of (user/repo, branch name) or (None, None) if no match """ - branch = None - branch_match = re.match('https?://github.com/[^/]+/[^/]+/tree/([^/]+)/?$', url) - if branch_match is not None: - branch = branch_match.group(1) + branch_match = re.match('https?://github.com/([^/]+/[^/]+)/tree/([^/]+)/?$', url) + if branch_match: + return branch_match.groups() - repo_match = re.match('https?://github.com/([^/]+/[^/]+)($|/.*$)', url) - if repo_match is None: - return (None, None) + repo_match = re.match('https?://github.com/([^/]+/[^/]+)(?:$|/.*$)', url) + if repo_match: + return (repo_match.group(1), None) - user_repo = repo_match.group(1) - return (user_repo, branch) + return (None, None) diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index dc6ddfa..3fa1d21 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -1,61 +1,71 @@ import re +from urllib.parse import quote from ..downloaders.downloader_exception import DownloaderException from ..versions import version_process, version_sort from .json_api_client import JSONApiClient -try: - # Python 3 - from urllib.parse import urlencode, quote - str_cls = str -except (ImportError): - # Python 2 - from urllib import urlencode, quote +class GitLabClient(JSONApiClient): - str_cls = unicode # noqa + @staticmethod + def make_repo_url(owner_name, repo_name): + """ + Generate the tags URL for a GitHub repo if the value passed is a GitHub + repository URL + :param owener_name: + The repository owner name -class GitLabClient(JSONApiClient): - def make_tags_url(self, repo): + :param repo_name: + The repository name + + :return: + The repositoy URL of given owner and repo name + """ + + return 'https://gitlab.com/%s/%s' % (quote(owner_name), quote(repo_name)) + + @staticmethod + def make_tags_url(repo_url): """ Generate the tags URL for a GitLab repo if the value passed is a GitLab repository URL - :param repo: + :param repo_url: The repository URL :return: - The tags URL if repo was a GitLab repo, otherwise False + The tags URL if repo_url was a GitLab repo, otherwise False """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo_url) if not match: return False return 'https://gitlab.com/%s/-/tags' % match.group(1) - def make_branch_url(self, repo, branch): + @staticmethod + def make_branch_url(repo_url, branch): """ Generate the branch URL for a GitLab repo if the value passed is a GitLab repository URL - :param repo: + :param repo_url: The repository URL :param branch: The branch name :return: - The branch URL if repo was a GitLab repo, otherwise False + The branch URL if repo_url was a GitLab repo, otherwise False """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo) + match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo_url) if not match: return False - return 'https://gitlab.com/%s/-/tree/%s' % (match.group(1), - quote(branch)) + return 'https://gitlab.com/%s/-/tree/%s' % (match.group(1), quote(branch)) def download_info(self, url, tag_prefix=None): """ @@ -70,7 +80,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -84,94 +95,69 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://gitlab.com/([^/]+)/([^/]+)/-/tags/?$', - url) + output = [] version = None - url_pattern = 'https://gitlab.com/%s/-/archive/%s/%s-%s.zip' + url_pattern = 'https://gitlab.com/%s/%s/-/archive/%s/%s-%s.zip' - output = [] + # tag based releases + tags_match = re.match('https?://gitlab.com/([^/]+)/([^/]+)/-/tags/?$', url) if tags_match: - (user_id, user_repo_type) = self._extract_user_id(tags_match.group(1)) - - repo_id, _ = self._extract_repo_id_default_branch( - user_id, - tags_match.group(2), - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: - return None - - user_repo = '%s/%s' % (tags_match.group(1), tags_match.group(2)) - tags_url = self._make_api_url( - repo_id, - '/repository/tags?per_page=100' - ) - tags_list = self.fetch_json(tags_url) - tags = [tag['name'] for tag in tags_list] - tag_info = version_process(tags, tag_prefix) + user_name, repo_name = tags_match.groups() + repo_id = '%s%%2F%s' % (user_name, repo_name) + tags_url = self._make_api_url(repo_id, '/repository/tags?per_page=100') + tags_json = self.fetch_json(tags_url) + tags_list = { + tag['name']: tag['commit']['committed_date'][0:19].replace('T', ' ') + for tag in tags_json + } + + tag_info = version_process(tags_list.keys(), tag_prefix) tag_info = version_sort(tag_info, reverse=True) if not tag_info: return False - used_versions = {} + max_releases = self.settings.get('max_releases', 0) + + used_versions = set() for info in tag_info: version = info['version'] if version in used_versions: continue + tag = info['prefix'] + version - repo_name = user_repo.split('/')[1] output.append({ - 'url': url_pattern % (user_repo, tag, repo_name, tag), - 'commit': tag, + 'url': url_pattern % (user_name, repo_name, tag, repo_name, tag), 'version': version, + 'date': tags_list[tag] }) - used_versions[version] = True + used_versions.add(version) + if max_releases > 0 and len(used_versions) >= max_releases: + break + # branch based releases else: - user_repo, commit = self._user_repo_ref(url) + user_repo, branch = self._user_repo_branch(url) if not user_repo: - return user_repo - user, repo = user_repo.split('/') - (user_id, user_repo_type) = self._extract_user_id(user) - - repo_id, default_branch = self._extract_repo_id_default_branch( - user_id, - repo, - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: return None - if commit is None: - commit = default_branch - - repo_name = user_repo.split('/')[1] - output.append({ - 'url': url_pattern % (user_repo, commit, repo_name, commit), - 'commit': commit - }) - - for release in output: - query_string = urlencode({ - 'ref_name': release['commit'], - 'per_page': 1 - }) - commit_url = self._make_api_url( - repo_id, - '/repository/commits?%s' % query_string - ) - commit_info = self.fetch_json(commit_url) - if not commit_info[0].get('commit'): - timestamp = commit_info[0]['committed_date'][0:19].replace('T', ' ') - else: - timestamp = commit_info[0]['commit']['committed_date'][0:19].replace('T', ' ') + user_name, repo_name = user_repo.split('/') + repo_id = '%s%%2F%s' % (user_name, repo_name) + + if branch is None: + repo_info = self.fetch_json(self._make_api_url(repo_id)) + branch = repo_info.get('default_branch', 'master') - if 'version' not in release: - release['version'] = re.sub(r'[\-: ]', '.', timestamp) - release['date'] = timestamp + branch_url = self._make_api_url(repo_id, '/repository/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) - del release['commit'] + timestamp = branch_info['commit']['committed_date'][0:19].replace('T', ' ') + + output = [{ + 'url': url_pattern % (user_name, repo_name, branch, repo_name, branch), + 'version': re.sub(r'[\-: ]', '.', timestamp), + 'date': timestamp + }] return output @@ -196,39 +182,19 @@ def repo_info(self, url): `donate` - URL of a donate page """ - user_repo, branch = self._user_repo_ref(url) + user_repo, branch = self._user_repo_branch(url) if not user_repo: - return user_repo - - user, repo = user_repo.split('/') - - (user_id, user_repo_type) = self._extract_user_id(user) - - repo_id, default_branch = self._extract_repo_id_default_branch( - user_id, - repo, - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: return None - if branch is None: - branch = default_branch + user_name, repo_name = user_repo.split('/') + repo_id = '%s%%2F%s' % (user_name, repo_name) + repo_url = self._make_api_url(repo_id) + repo_info = self.fetch_json(repo_url) - api_url = self._make_api_url(repo_id) - info = self.fetch_json(api_url) + if not branch: + branch = repo_info.get('default_branch', 'master') - output = self._extract_repo_info(info) - - if not output['readme']: - return output - - output['readme'] = 'https://gitlab.com/%s/-/%s/%s' % ( - user_repo, - branch, - output['readme'].split('/')[-1], - ) - return output + return self._extract_repo_info(branch, repo_info) def user_info(self, url): """ @@ -259,33 +225,25 @@ def user_info(self, url): return None user = user_match.group(1) - (user_id, user_repo_type) = self._extract_user_id(user) + user_id, user_repo_type = self._extract_user_id(user) api_url = 'https://gitlab.com/api/v4/%s/%s/projects' % ( 'users' if user_repo_type else 'groups', user_id) repos_info = self.fetch_json(api_url) - output = [] - for info in repos_info: - user_repo = '%s/%s' % (user, info['name']) - branch = info['default_branch'] - - repo_output = self._extract_repo_info(info) - - if repo_output['readme']: - repo_output['readme'] = 'https://gitlab.com/%s/-/raw/%s/%s' % ( - user_repo, - branch, - repo_output['readme'].split('/')[-1], - ) - output.append(repo_output) - return output + return [ + self._extract_repo_info(info.get('default_branch', 'master'), info) + for info in repos_info + ] - def _extract_repo_info(self, result): + def _extract_repo_info(self, branch, result): """ Extracts information about a repository from the API result + :param branch: + The branch to return data from + :param result: A dict representing the data returned from the GitLab API @@ -295,16 +253,27 @@ def _extract_repo_info(self, result): `description` `homepage` - URL of the homepage `author` + `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page """ + user_name = result['owner']['username'] if result.get('owner') else result['namespace']['name'] + repo_name = result['name'] + user_repo = '%s/%s' % (user_name, repo_name) + + readme_url = None + if result['readme_url']: + readme_url = 'https://gitlab.com/%s/-/raw/%s/%s' % ( + user_repo, branch, result['readme_url'].split('/')[-1] + ) + return { - 'name': result['name'], + 'name': repo_name, 'description': result['description'] or 'No description provided', 'homepage': result['web_url'] or None, - 'readme': result['readme_url'] if result['readme_url'] else None, - 'author': result['owner']['username'] if result.get('owner') else result['namespace']['name'], + 'author': user_name, + 'readme': readme_url, 'issues': result.get('issues', None) if result.get('_links') else None, 'donate': None, } @@ -325,37 +294,29 @@ def _make_api_url(self, project_id, suffix=''): return 'https://gitlab.com/api/v4/projects/%s%s' % (project_id, suffix) - def _user_repo_ref(self, url): + def _user_repo_branch(self, url): """ - Extract the username/repo and ref name from the URL + Extract the username/repo and branch name from the URL :param url: The URL to extract the info from, in one of the forms: https://gitlab.com/{user}/{repo} - https://gitlab.com/{user}/{repo}/-/tree/{ref} + https://gitlab.com/{user}/{repo}/-/tree/{branch} :return: - A tuple of (user/repo, ref name) or (None, None) if no match. - The ref name may be a branch name or a commit + A tuple of (user/repo, branch name) or (None, None) if no match. + The branch name may be a branch name or a commit """ - branch = None - branch_match = re.match( - r'https?://gitlab.com/[^/]+/[^/]+/-/tree/([^/]+)/?$', - url - ) - if branch_match is not None: - branch = branch_match.group(1) - - repo_match = re.match( - r'https?://gitlab.com/([^/]+/[^/]+)($|/.*$)', - url - ) - if repo_match is None: - return (None, None) + branch_match = re.match('https?://gitlab.com/([^/]+/[^/]+)/-/tree/([^/]+)/?$', url) + if branch_match: + return branch_match.groups() + + repo_match = re.match('https?://gitlab.com/([^/]+/[^/]+)(?:$|/.*$)', url) + if repo_match: + return (repo_match.group(1), None) - user_repo = repo_match.group(1) - return (user_repo, branch) + return (None, None) def _extract_user_id(self, username): """ @@ -372,7 +333,7 @@ def _extract_user_id(self, username): try: repos_info = self.fetch_json(user_url) except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: + if str(e).find('HTTP error 404') != -1: return self._extract_group_id(username) raise @@ -396,7 +357,7 @@ def _extract_group_id(self, group_name): try: repos_info = self.fetch_json(group_url) except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: + if str(e).find('HTTP error 404') != -1: return (None, None) raise @@ -404,37 +365,3 @@ def _extract_group_id(self, group_name): return (None, None) return (repos_info[0]['id'], False) - - def _extract_repo_id_default_branch(self, user_id, repo_name, repo_type): - """ - Extract the repo id from the repo results - - :param user_id: - The user_id of the user who owns the repo - - :param repo_name: - The name of the repository - - :param repo_type: - A string "users" or "groups", based on the user_id being from a - user or a group - - :return: - A 2-element tuple, (repo_id, default_branch) or (None, None) if no match - """ - - user_url = 'https://gitlab.com/api/v4/%s/%s/projects' % (repo_type, user_id) - try: - repos_info = self.fetch_json(user_url) - except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: - return (None, None) - raise - - repo_info = next( - (repo for repo in repos_info if repo['name'].lower() == repo_name.lower()), None) - - if not repo_info: - return (None, None) - - return (repo_info['id'], repo_info['default_branch']) diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py index 38fa88b..889f131 100644 --- a/app/lib/package_control/clients/json_api_client.py +++ b/app/lib/package_control/clients/json_api_client.py @@ -1,12 +1,5 @@ import json - -try: - # Python 3 - from urllib.parse import urlencode, urlparse -except (ImportError): - # Python 2 - from urllib import urlencode - from urlparse import urlparse +from urllib.parse import urlencode, urlparse from .client_exception import ClientException from ..download_manager import downloader @@ -60,5 +53,5 @@ def fetch_json(self, url, prefer_cached=False): try: return json.loads(repository_json.decode('utf-8')) except (ValueError): - error_string = u'Error parsing JSON from URL %s.' % url + error_string = 'Error parsing JSON from URL %s.' % url raise ClientException(error_string) diff --git a/app/lib/package_control/clients/readme_client.py b/app/lib/package_control/clients/readme_client.py index 5edf416..a2e2980 100644 --- a/app/lib/package_control/clients/readme_client.py +++ b/app/lib/package_control/clients/readme_client.py @@ -1,13 +1,7 @@ import re import os import base64 - -try: - # Python 3 - from urllib.parse import urlencode -except (ImportError): - # Python 2 - from urllib import urlencode +from urllib.parse import urlencode from .json_api_client import JSONApiClient @@ -69,7 +63,7 @@ def readme_info(self, url): if not contents: contents = self.fetch(url) - basename, ext = os.path.splitext(url) + _, ext = os.path.splitext(url) format = 'txt' ext = ext.lower() if ext in _readme_formats: diff --git a/app/lib/package_control/cmd.py b/app/lib/package_control/cmd.py index 09f7464..62a969c 100644 --- a/app/lib/package_control/cmd.py +++ b/app/lib/package_control/cmd.py @@ -1,10 +1,8 @@ import os import subprocess import re -import sys from .console_write import console_write -from .unicode import unicode_from_os from .show_error import show_error from . import text @@ -18,13 +16,6 @@ except (ImportError): sublime = None -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - def create_cmd(args, basename_binary=False): """ @@ -50,9 +41,9 @@ def create_cmd(args, basename_binary=False): escaped_args = [] for arg in args: if re.search('^[a-zA-Z0-9/_^\\-\\.:=]+$', arg) is None: - arg = u"'" + arg.replace(u"'", u"'\\''") + u"'" + arg = "'" + arg.replace("'", "'\\''") + "'" escaped_args.append(arg) - return u' '.join(escaped_args) + return ' '.join(escaped_args) class Cli(object): @@ -119,15 +110,13 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal if self.debug: console_write( - u''' + ''' Executing %s [%s] ''', (create_cmd(args), cwd) ) try: - if sys.platform == 'win32' and sys.version_info < (3,): - cwd = cwd.encode('mbcs') proc = subprocess.Popen( args, stdin=subprocess.PIPE, @@ -138,7 +127,7 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal env=os.environ ) - if input and isinstance(input, str_cls): + if input and isinstance(input, str): input = input.encode(encoding) stuck = True @@ -157,7 +146,7 @@ def kill_proc(): proc.kill() message = text.format( - u''' + ''' The process %s seems to have gotten stuck. Command: %s @@ -168,7 +157,7 @@ def kill_proc(): ) if is_vcs: message += text.format( - u''' + ''' This is likely due to a password or passphrase prompt. Please ensure %s works without a prompt, or @@ -193,7 +182,7 @@ def kill_proc(): if proc.returncode not in self.ok_returncodes: if not ignore_errors or re.search(ignore_errors, output) is None: message = text.format( - u''' + ''' Error executing: %s Working directory: %s @@ -223,14 +212,14 @@ def kill_proc(): except (OSError) as e: show_error( - u''' + ''' Error executing: %s %s Try checking your "%s_binary" setting? ''', - (create_cmd(args), unicode_from_os(e), self.cli_name) + (create_cmd(args), str(e), self.cli_name) ) return False @@ -283,7 +272,7 @@ def find_binary(self, name): if self.debug: console_write( - u''' + ''' Looking for %s at: "%s" ''', (self.cli_name, '", "'.join(check_binaries)) @@ -293,7 +282,7 @@ def find_binary(self, name): if os.path.exists(path) and not os.path.isdir(path) and os.access(path, os.X_OK): if self.debug: console_write( - u''' + ''' Found %s at "%s" ''', (self.cli_name, path) @@ -303,7 +292,7 @@ def find_binary(self, name): if self.debug: console_write( - u''' + ''' Could not find %s on your machine ''', self.cli_name diff --git a/app/lib/package_control/console_write.py b/app/lib/package_control/console_write.py index 72822e6..0f29e0b 100644 --- a/app/lib/package_control/console_write.py +++ b/app/lib/package_control/console_write.py @@ -1,12 +1,5 @@ import sys -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - from . import text @@ -31,11 +24,7 @@ def console_write(string, params=None, strip=True, indent=None, prefix=True): If the string "Package Control: " should be prefixed to the string """ - string = text.format(str_cls(string), params, strip=strip, indent=indent) - - if sys.version_info < (3,): - if isinstance(string, str_cls): - string = string.encode('UTF-8') + string = text.format(str(string), params, strip=strip, indent=indent) if prefix: sys.stdout.write('Package Control: ') diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index 3cba12a..a1cbe1a 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -3,22 +3,13 @@ from threading import Lock, Timer from contextlib import contextmanager import sys - -try: - # Python 3 - from urllib.parse import urlparse - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urlparse - str_cls = unicode # noqa +from urllib.parse import urlparse from . import __version__ from .show_error import show_error from .console_write import console_write from .cache import set_cache, get_cache -from .unicode import unicode_from_os from . import text from .downloaders import DOWNLOADERS @@ -59,15 +50,14 @@ def downloader(url, settings): def _grab(url, settings): global _managers, _lock, _in_use, _timer - _lock.acquire() - try: + with _lock: if _timer: _timer.cancel() _timer = None parsed = urlparse(url) if not parsed or not parsed.hostname: - raise DownloaderException(u'The URL "%s" is malformed' % url) + raise DownloaderException('The URL "%s" is malformed' % url) hostname = parsed.hostname.lower() if hostname not in _managers: _managers[hostname] = [] @@ -79,15 +69,11 @@ def _grab(url, settings): return _managers[hostname].pop() - finally: - _lock.release() - def _release(url, manager): global _managers, _lock, _in_use, _timer - _lock.acquire() - try: + with _lock: hostname = urlparse(url).hostname.lower() # This means the package was reloaded between _grab and _release, @@ -107,15 +93,11 @@ def _release(url, manager): _timer = Timer(5.0, close_all_connections) _timer.start() - finally: - _lock.release() - def close_all_connections(): global _managers, _lock, _in_use, _timer - _lock.acquire() - try: + with _lock: if _timer: _timer.cancel() _timer = None @@ -125,9 +107,6 @@ def close_all_connections(): manager.close() _managers = {} - finally: - _lock.release() - def update_url(url, debug): """ @@ -159,7 +138,7 @@ def update_url(url, debug): if debug and url != original_url: console_write( - u''' + ''' Fixed URL from %s to %s ''', (original_url, url) @@ -182,6 +161,7 @@ def __init__(self, settings): if settings.get('http_cache'): cache_length = settings.get('http_cache_length', 604800) self.settings['cache'] = HttpCache(cache_length) + self.settings['cache_length'] = cache_length def close(self): if self.downloader: @@ -233,7 +213,7 @@ def fetch(self, url, error_message, prefer_cached=False): if not isinstance(downloader_list, list) or len(downloader_list) == 0: error_string = text.format( - u''' + ''' No list of preferred downloaders specified in the "downloader_precedence" setting for the platform "%s" ''', @@ -256,7 +236,7 @@ def fetch(self, url, error_message, prefer_cached=False): downloader_name == 'oscrypto': continue error_string = text.format( - u''' + ''' The downloader "%s" from the "downloader_precedence" setting for the platform "%s" is invalid ''', @@ -278,7 +258,7 @@ def fetch(self, url, error_message, prefer_cached=False): if not self.downloader: error_string = text.format( - u''' + ''' None of the preferred downloaders can download %s. This is usually either because the ssl module is unavailable @@ -317,31 +297,31 @@ def fetch(self, url, error_message, prefer_cached=False): try: ip = socket.gethostbyname(hostname) except (socket.gaierror) as e: - ip = unicode_from_os(e) + ip = str(e) except (TypeError): ip = None console_write( - u''' + ''' Download Debug URL: %s Timeout: %s Resolved IP: %s ''', - (url, str_cls(timeout), ip) + (url, str(timeout), ip) ) if ipv6: console_write( - u' Resolved IPv6: %s', + ' Resolved IPv6: %s', ipv6, prefix=False ) if hostname in rate_limited_domains: - error_string = u'Skipping due to hitting rate limit for %s' % hostname + error_string = 'Skipping due to hitting rate limit for %s' % hostname if self.settings.get('debug'): console_write( - u' %s', + ' %s', error_string, prefix=False ) @@ -356,7 +336,7 @@ def fetch(self, url, error_message, prefer_cached=False): set_cache('rate_limited_domains', rate_limited_domains, self.settings.get('cache_length')) console_write( - u''' + ''' Hit rate limit of %s for %s. Skipping all futher download requests for this domain. ''', @@ -366,10 +346,10 @@ def fetch(self, url, error_message, prefer_cached=False): except (OscryptoDownloaderException) as e: console_write( - u''' + ''' Attempting to use Urllib downloader due to Oscrypto error: %s ''', - str_cls(e) + str(e) ) self.downloader = UrlLibDownloader(self.settings) @@ -379,7 +359,7 @@ def fetch(self, url, error_message, prefer_cached=False): except (WinDownloaderException) as e: console_write( - u''' + ''' Attempting to use Urllib downloader due to WinINet error: %s ''', e diff --git a/app/lib/package_control/downloaders/basic_auth_downloader.py b/app/lib/package_control/downloaders/basic_auth_downloader.py index 333b0b0..78678ed 100644 --- a/app/lib/package_control/downloaders/basic_auth_downloader.py +++ b/app/lib/package_control/downloaders/basic_auth_downloader.py @@ -1,11 +1,6 @@ import base64 -try: - # Python 3 - from urllib.parse import urlparse -except (ImportError): - # Python 2 - from urlparse import urlparse +from urllib.parse import urlparse class BasicAuthDownloader(object): @@ -64,7 +59,6 @@ def get_username_password(self, url): domain_name = urlparse(url).netloc auth_settings = self.settings.get('http_basic_auth') - domain_name = urlparse(url).netloc if auth_settings and isinstance(auth_settings, dict): params = auth_settings.get(domain_name) if params and isinstance(params, (list, tuple)) and len(params) == 2: diff --git a/app/lib/package_control/downloaders/binary_not_found_error.py b/app/lib/package_control/downloaders/binary_not_found_error.py index 7e343ad..e93a8f7 100644 --- a/app/lib/package_control/downloaders/binary_not_found_error.py +++ b/app/lib/package_control/downloaders/binary_not_found_error.py @@ -1,17 +1,6 @@ -import sys - - class BinaryNotFoundError(Exception): """If a necessary executable is not found in the PATH on the system""" - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/caching_downloader.py b/app/lib/package_control/downloaders/caching_downloader.py index af92f69..dc0db8e 100644 --- a/app/lib/package_control/downloaders/caching_downloader.py +++ b/app/lib/package_control/downloaders/caching_downloader.py @@ -4,13 +4,6 @@ from ..console_write import console_write -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - class CachingDownloader(object): @@ -89,7 +82,7 @@ def cache_result(self, method, url, status, headers, content): if not cache: if debug: console_write( - u''' + ''' Skipping cache since there is no cache object ''' ) @@ -98,7 +91,7 @@ def cache_result(self, method, url, status, headers, content): if method.lower() != 'get': if debug: console_write( - u''' + ''' Skipping cache since the HTTP method != GET ''' ) @@ -110,7 +103,7 @@ def cache_result(self, method, url, status, headers, content): if status not in (200, 304): if debug: console_write( - u''' + ''' Skipping cache since the HTTP status code not one of: 200, 304 ''' ) @@ -123,7 +116,7 @@ def cache_result(self, method, url, status, headers, content): if cached_content: if debug: console_write( - u''' + ''' Using cached content for %s from %s ''', (url, cache.path(key)) @@ -148,7 +141,7 @@ def cache_result(self, method, url, status, headers, content): if headers.get('content-type') in ('application/zip', 'application/octet-stream'): if debug: console_write( - u''' + ''' Skipping cache since the response is a zip file ''' ) @@ -166,7 +159,7 @@ def cache_result(self, method, url, status, headers, content): info_key = self.generate_key(url, '.info') if debug: console_write( - u''' + ''' Caching %s in %s ''', (url, cache.path(key)) @@ -191,7 +184,7 @@ def generate_key(self, url, suffix=''): A string key for the URL """ - if isinstance(url, str_cls): + if isinstance(url, str): url = url.encode('utf-8') key = hashlib.md5(url).hexdigest() @@ -214,7 +207,7 @@ def retrieve_cached(self, url): if not cache: if debug: console_write( - u''' + ''' Skipping cache since there is no cache object ''' ) @@ -225,7 +218,7 @@ def retrieve_cached(self, url): cached_content = cache.get(key) if cached_content and debug: console_write( - u''' + ''' Using cached content for %s from %s ''', (url, cache.path(key)) diff --git a/app/lib/package_control/downloaders/cli_downloader.py b/app/lib/package_control/downloaders/cli_downloader.py index cebbb22..53d06ee 100644 --- a/app/lib/package_control/downloaders/cli_downloader.py +++ b/app/lib/package_control/downloaders/cli_downloader.py @@ -43,13 +43,16 @@ def find_binary(self, name): # This is mostly for OS X, which seems to launch ST with a # minimal set of environmental variables dirs.append('/usr/local/bin') + executable = name + else: + executable = name + ".exe" for dir_ in dirs: - path = os.path.join(dir_, name) + path = os.path.join(dir_, executable) if os.path.exists(path): return path - raise BinaryNotFoundError('The binary %s could not be located' % name) + raise BinaryNotFoundError('The binary %s could not be located' % executable) def execute(self, args): """ @@ -67,19 +70,24 @@ def execute(self, args): if self.settings.get('debug'): console_write( - u''' + ''' Trying to execute command %s ''', create_cmd(args) ) - proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + startupinfo = None + if os.name == 'nt': + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - output = proc.stdout.read() - self.stderr = proc.stderr.read() - returncode = proc.wait() - if returncode != 0: - error = NonCleanExitError(returncode) + proc = subprocess.Popen( + args, startupinfo=startupinfo, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + output, self.stderr = proc.communicate() + + if proc.returncode != 0: + error = NonCleanExitError(proc.returncode) error.stderr = self.stderr error.stdout = output raise error diff --git a/app/lib/package_control/downloaders/curl_downloader.py b/app/lib/package_control/downloaders/curl_downloader.py index da4af55..7f6e0fa 100644 --- a/app/lib/package_control/downloaders/curl_downloader.py +++ b/app/lib/package_control/downloaders/curl_downloader.py @@ -2,15 +2,7 @@ import re import os -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - from ..console_write import console_write -from ..open_compat import open_compat, read_compat from .cli_downloader import CliDownloader from .non_clean_exit_error import NonCleanExitError from .downloader_exception import DownloaderException @@ -83,7 +75,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): command = [ self.curl, '--connect-timeout', - str_cls(int(timeout)), + str(int(timeout)), '-sSL', '--tlsv1', # We have to capture the headers to check for rate limit info @@ -125,7 +117,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if debug: console_write( - u''' + ''' Curl Debug Proxy http_proxy: %s https_proxy: %s @@ -139,7 +131,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): command.append('--proxy-anyauth') if proxy_username or proxy_password: - command.extend(['-U', u"%s:%s" % (proxy_username, proxy_password)]) + command.extend(['-U', "%s:%s" % (proxy_username, proxy_password)]) if http_proxy: os.putenv('http_proxy', http_proxy) @@ -154,8 +146,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): try: output = self.execute(command) - with open_compat(self.tmp_file, 'r') as f: - headers_str = read_compat(f) + with open(self.tmp_file, 'r') as fobj: + headers_str = fobj.read() self.clean_tmp_file() message = 'OK' @@ -214,21 +206,21 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url ) continue - download_error = u'HTTP error ' + code + download_error = 'HTTP error ' + code elif e.returncode == 7: # If the user could not connect, check for ipv6 errors and # if so, force curl to use ipv4. Apparently some users have # network configuration where curl will try ipv6 and resolve # it, but their ISP won't actually route it. - full_debug = u"\n".join([section['contents'] for section in debug_sections]) + full_debug = "\n".join([section['contents'] for section in debug_sections]) ipv6_error = re.search( r'^\s*connect to ([0-9a-f]+(:+[0-9a-f]+)+) port \d+ failed: Network is unreachable', full_debug, @@ -237,7 +229,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if ipv6_error and tries != 0: if debug: console_write( - u''' + ''' Downloading %s failed because the ipv6 address %s was not reachable, retrying using ipv4 ''', @@ -247,20 +239,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue elif e.returncode == 6: - download_error = u'URL error host not found' + download_error = 'URL error host not found' elif e.returncode == 28: # GitHub and BitBucket seem to time out a lot if tries and debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url ) continue - error_string = u'%s %s downloading %s.' % (error_message, download_error, url) + error_string = '%s %s downloading %s.' % (error_message, download_error, url) break @@ -276,9 +268,9 @@ def print_debug(self, sections): for section in sections: type = section['type'] - indented_contents = section['contents'].replace(u"\n", u"\n ") + indented_contents = section['contents'].replace("\n", "\n ") console_write( - u''' + ''' Curl HTTP Debug %s %s ''', @@ -321,34 +313,34 @@ def split_debug(self, string): section = 'General' last_section = None - stderr = u'' + stderr = '' debug_sections = [] - debug_section = u'' + debug_section = '' for line in string.splitlines(): # Placeholder for body of request - if line and line[0:2] == u'{ ': + if line and line[0:2] == '{ ': continue - if line and line[0:18] == u'} [data not shown]': + if line and line[0:18] == '} [data not shown]': continue if len(line) > 1: subtract = 0 - if line[0:2] == u'* ': - section = u'General' + if line[0:2] == '* ': + section = 'General' subtract = 2 - elif line[0:2] == u'> ': - section = u'Write' + elif line[0:2] == '> ': + section = 'Write' subtract = 2 - elif line[0:2] == u'< ': - section = u'Read' + elif line[0:2] == '< ': + section = 'Read' subtract = 2 line = line[subtract:] # If the line does not start with "* ", "< ", "> " or " " # then it is a real stderr message - if subtract == 0 and line[0:2] != u' ': - stderr += line.rstrip() + u' ' + if subtract == 0 and line[0:2] != ' ': + stderr += line.rstrip() + ' ' continue if line.strip() == '': @@ -359,9 +351,9 @@ def split_debug(self, string): 'type': section, 'contents': debug_section.rstrip() }) - debug_section = u'' + debug_section = '' - debug_section += u"%s\n" % line + debug_section += "%s\n" % line last_section = section if len(debug_section.rstrip()) > 0: diff --git a/app/lib/package_control/downloaders/decoding_downloader.py b/app/lib/package_control/downloaders/decoding_downloader.py index 3396931..257ba6f 100644 --- a/app/lib/package_control/downloaders/decoding_downloader.py +++ b/app/lib/package_control/downloaders/decoding_downloader.py @@ -1,12 +1,6 @@ import gzip import zlib - -try: - # Python 3 - from io import BytesIO as StringIO -except (ImportError): - # Python 2 - from StringIO import StringIO +from io import BytesIO try: import bz2 @@ -55,9 +49,9 @@ def decode_response(self, encoding, response): if bz2: return bz2.decompress(response) else: - raise DownloaderException(u'Received bzip2 file contents, but was unable to import the bz2 module') + raise DownloaderException('Received bzip2 file contents, but was unable to import the bz2 module') elif encoding == 'gzip': - return gzip.GzipFile(fileobj=StringIO(response)).read() + return gzip.GzipFile(fileobj=BytesIO(response)).read() elif encoding == 'deflate': decompresser = zlib.decompressobj(-zlib.MAX_WBITS) return decompresser.decompress(response) + decompresser.flush() diff --git a/app/lib/package_control/downloaders/downloader_exception.py b/app/lib/package_control/downloaders/downloader_exception.py index c727bb3..71509be 100644 --- a/app/lib/package_control/downloaders/downloader_exception.py +++ b/app/lib/package_control/downloaders/downloader_exception.py @@ -1,17 +1,6 @@ -import sys - - class DownloaderException(Exception): """If a downloader could not download a URL""" - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/http_error.py b/app/lib/package_control/downloaders/http_error.py index 40af08a..dbf1383 100644 --- a/app/lib/package_control/downloaders/http_error.py +++ b/app/lib/package_control/downloaders/http_error.py @@ -1,6 +1,3 @@ -import sys - - class HttpError(Exception): """If a downloader was able to download a URL, but the result was not a 200 or 304""" @@ -9,13 +6,5 @@ def __init__(self, message, code): self.code = code super(HttpError, self).__init__(message) - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/limiting_downloader.py b/app/lib/package_control/downloaders/limiting_downloader.py index f4befab..c1cadcb 100644 --- a/app/lib/package_control/downloaders/limiting_downloader.py +++ b/app/lib/package_control/downloaders/limiting_downloader.py @@ -1,11 +1,4 @@ -try: - # Python 3 - from urllib.parse import urlparse - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urlparse - str_cls = unicode # noqa +from urllib.parse import urlparse from .rate_limit_exception import RateLimitException @@ -34,6 +27,6 @@ def handle_rate_limit(self, headers, url): limit_remaining = headers.get('x-ratelimit-remaining', '1') limit = headers.get('x-ratelimit-limit', '1') - if str_cls(limit_remaining) == '0': + if str(limit_remaining) == '0': hostname = urlparse(url).hostname raise RateLimitException(hostname, limit) diff --git a/app/lib/package_control/downloaders/non_clean_exit_error.py b/app/lib/package_control/downloaders/non_clean_exit_error.py index 3088a59..8c32d6c 100644 --- a/app/lib/package_control/downloaders/non_clean_exit_error.py +++ b/app/lib/package_control/downloaders/non_clean_exit_error.py @@ -1,13 +1,3 @@ -import sys - -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - - class NonCleanExitError(Exception): """ @@ -20,13 +10,8 @@ class NonCleanExitError(Exception): def __init__(self, returncode): self.returncode = returncode - def __unicode__(self): - return str_cls(self.returncode) - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() + return str(self.returncode) def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/non_http_error.py b/app/lib/package_control/downloaders/non_http_error.py index 8f3054e..61cf13d 100644 --- a/app/lib/package_control/downloaders/non_http_error.py +++ b/app/lib/package_control/downloaders/non_http_error.py @@ -1,17 +1,6 @@ -import sys - - class NonHttpError(Exception): """If a downloader had a non-clean exit, but it was not due to an HTTP error""" - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py index 0f0d4c9..97e7082 100644 --- a/app/lib/package_control/downloaders/oscrypto_downloader.py +++ b/app/lib/package_control/downloaders/oscrypto_downloader.py @@ -8,10 +8,10 @@ import os import hashlib import socket +from urllib.parse import urlparse +from urllib.request import parse_keqv_list, parse_http_list from ..console_write import console_write -from ..unicode import unicode_from_os -from ..open_compat import open_compat, read_compat from .downloader_exception import DownloaderException from .oscrypto_downloader_exception import OscryptoDownloaderException from ..ca_certs import get_user_ca_bundle_path @@ -45,18 +45,6 @@ from ..deps.oscrypto import tls # noqa from ..deps.oscrypto import errors as oscrypto_errors # noqa -if sys.version_info < (3,): - from urlparse import urlparse - - from urllib2 import parse_keqv_list, parse_http_list - str_cls = unicode # noqa - int_types = (int, long) # noqa -else: - from urllib.parse import urlparse - from urllib.request import parse_keqv_list, parse_http_list - str_cls = str - int_types = int - class OscryptoDownloader(DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): @@ -183,7 +171,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if code == 301: location = resp_headers.get('location') - if not isinstance(location, str_cls): + if not isinstance(location, str): raise OscryptoDownloaderException('Missing or duplicate Location HTTP header') if not re.match(r'https?://', location): if not location.startswith('/'): @@ -224,7 +212,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s TLS verification error %s downloading %s. ''', - (error_message, str_cls(e), url) + (error_message, str(e), url) ) except (oscrypto_errors.TLSDisconnectError): @@ -245,7 +233,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s TLS error %s downloading %s. ''', - (error_message, str_cls(e), url) + (error_message, str(e), url) ) except (socket.error): @@ -269,7 +257,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s OS error %s downloading %s. ''', - (error_message, unicode_from_os(e), url) + (error_message, str(e), url) ) raise @@ -360,8 +348,8 @@ def setup_socket(self): user_ca_bundle_path = get_user_ca_bundle_path(self.settings) if os.path.exists(user_ca_bundle_path): try: - with open_compat(user_ca_bundle_path, 'rb') as f: - file_data = read_compat(f) + with open(user_ca_bundle_path, 'rb') as fobj: + file_data = fobj.read() if len(file_data) > 0: for type_name, headers, der_bytes in pem.unarmor(file_data, multiple=True): extra_trust_roots.append(x509.Certificate.load(der_bytes)) @@ -371,7 +359,7 @@ def setup_socket(self): Oscrypto Debug General Error parsing certs file %s: %s ''', - (user_ca_bundle_path, str_cls(e)) + (user_ca_bundle_path, str(e)) ) session = tls.TLSSession(extra_trust_roots=extra_trust_roots) @@ -518,7 +506,7 @@ def parse_content_length(self, headers): """ content_length = headers.get('content-length') - if isinstance(content_length, str_cls) and len(content_length) > 0: + if isinstance(content_length, str) and len(content_length) > 0: content_length = int(content_length) return content_length @@ -600,7 +588,7 @@ def dump_certificate(self, cert): if curve_info[0] == 'named': public_key_algo += ' ' + curve_info[1] else: - public_key_algo += ' ' + str_cls(cert.public_key.bit_size) + public_key_algo += ' ' + str(cert.public_key.bit_size) console_write( ''' Oscrypto Server TLS Certificate @@ -647,7 +635,7 @@ def do_proxy_connect(self, headers=None): close = False for header in ('connection', 'proxy-connection'): value = resp_headers.get(header) - if isinstance(value, str_cls) and value.lower() == 'close': + if isinstance(value, str) and value.lower() == 'close': close = True if close: diff --git a/app/lib/package_control/downloaders/rate_limit_exception.py b/app/lib/package_control/downloaders/rate_limit_exception.py index 6830faf..de64a3a 100644 --- a/app/lib/package_control/downloaders/rate_limit_exception.py +++ b/app/lib/package_control/downloaders/rate_limit_exception.py @@ -10,5 +10,5 @@ class RateLimitException(DownloaderException): def __init__(self, domain, limit): self.domain = domain self.limit = limit - message = u'Rate limit of %s exceeded for %s' % (limit, domain) + message = 'Rate limit of %s exceeded for %s' % (limit, domain) super(RateLimitException, self).__init__(message) diff --git a/app/lib/package_control/downloaders/urllib_downloader.py b/app/lib/package_control/downloaders/urllib_downloader.py index ebd312e..888b1e2 100644 --- a/app/lib/package_control/downloaders/urllib_downloader.py +++ b/app/lib/package_control/downloaders/urllib_downloader.py @@ -1,45 +1,19 @@ import re import sys - -# Monkey patches various Python 2 issues with urllib2 -from .. import http # noqa - -try: - # Python 3 - from http.client import HTTPException, BadStatusLine - from urllib.request import ( - build_opener, - HTTPPasswordMgrWithDefaultRealm, - ProxyBasicAuthHandler, - ProxyDigestAuthHandler, - ProxyHandler, - Request, - ) - from urllib.error import HTTPError, URLError - import urllib.request as urllib_compat -except (ImportError): - # Python 2 - from httplib import HTTPException, BadStatusLine - from urllib2 import ( - build_opener, - HTTPPasswordMgrWithDefaultRealm, - ProxyBasicAuthHandler, - ProxyDigestAuthHandler, - ProxyHandler, - Request, - ) - from urllib2 import HTTPError, URLError - import urllib2 as urllib_compat - -try: - # Python 3.3 - import ConnectionError -except (ImportError): - # Python 2.6-3.2 - from socket import error as ConnectionError +from http.client import HTTPException, BadStatusLine +from urllib.request import ( + build_opener, + HTTPPasswordMgrWithDefaultRealm, + ProxyBasicAuthHandler, + ProxyDigestAuthHandler, + ProxyHandler, + Request, +) +from urllib.error import HTTPError, URLError +import urllib.request as urllib_compat +from socket import error as ConnectionError from ..console_write import console_write -from ..unicode import unicode_from_os from ..http.validating_https_handler import ValidatingHTTPSHandler from ..http.debuggable_http_handler import DebuggableHTTPHandler from .downloader_exception import DownloaderException @@ -164,10 +138,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): exception_type = e.__class__.__name__ error_string = text.format( - u''' + ''' %s HTTP exception %s (%s) downloading %s. ''', - (error_message, exception_type, unicode_from_os(e), url) + (error_message, exception_type, str(e), url) ) except (HTTPError) as e: @@ -179,14 +153,14 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.handle_rate_limit(e.headers, url) # Handle cached responses - if unicode_from_os(e.code) == '304': + if str(e.code) == '304': return self.cache_result('get', url, int(e.code), e.headers, b'') # Bitbucket and Github return 503 a decent amount - if unicode_from_os(e.code) == '503' and tries != 0: + if str(e.code) == '503' and tries != 0: if tries and debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -194,20 +168,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s HTTP error %s downloading %s. ''', - (error_message, unicode_from_os(e.code), url) + (error_message, str(e.code), url) ) except (URLError) as e: # Bitbucket and Github timeout a decent amount - if unicode_from_os(e.reason) == 'The read operation timed out' \ - or unicode_from_os(e.reason) == 'timed out': + if str(e.reason) == 'The read operation timed out' \ + or str(e.reason) == 'timed out': if tries and debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url @@ -215,10 +189,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s URL error %s downloading %s. ''', - (error_message, unicode_from_os(e.reason), url) + (error_message, str(e.reason), url) ) except (ConnectionError): @@ -226,7 +200,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # thus getting new handlers and a new connection if debug: console_write( - u''' + ''' Connection went away while trying to download %s, trying again ''', url @@ -240,8 +214,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): break if error_string is None: - plural = u's' if tried > 1 else u'' - error_string = u'Unable to download %s after %d attempt%s' % (url, tried, plural) + plural = 's' if tried > 1 else '' + error_string = 'Unable to download %s after %d attempt%s' % (url, tried, plural) raise DownloaderException(error_string) @@ -305,7 +279,7 @@ def setup_opener(self, url, timeout): if debug: console_write( - u''' + ''' Urllib Debug Proxy http_proxy: %s https_proxy: %s diff --git a/app/lib/package_control/downloaders/wget_downloader.py b/app/lib/package_control/downloaders/wget_downloader.py index 6a6b0b7..c135a0e 100644 --- a/app/lib/package_control/downloaders/wget_downloader.py +++ b/app/lib/package_control/downloaders/wget_downloader.py @@ -1,17 +1,9 @@ import tempfile import re import os - -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str +import sys from ..console_write import console_write -from ..unicode import unicode_from_os -from ..open_compat import open_compat, read_compat from .cli_downloader import CliDownloader from .non_http_error import NonHttpError from .non_clean_exit_error import NonCleanExitError @@ -85,7 +77,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.tmp_file = tempfile.NamedTemporaryFile().name command = [ self.wget, - '--connect-timeout=' + str_cls(int(timeout)), + '--connect-timeout=' + str(int(timeout)), '-o', self.tmp_file, '-O', @@ -116,7 +108,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): secure_url_match = re.match('^https://([^/]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) - command.append(u'--ca-certificate=' + bundle_path) + command.append('--ca-certificate=' + bundle_path) command.append('-S') if self.debug: @@ -130,13 +122,13 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): proxy_password = self.settings.get('proxy_password') if proxy_username: - command.append(u"--proxy-user=%s" % proxy_username) + command.append("--proxy-user=%s" % proxy_username) if proxy_password: - command.append(u"--proxy-password=%s" % proxy_password) + command.append("--proxy-password=%s" % proxy_password) if self.debug: console_write( - u''' + ''' Wget Debug Proxy http_proxy: %s https_proxy: %s @@ -180,7 +172,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and self.debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -191,20 +183,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): except (NonHttpError) as e: - download_error = unicode_from_os(e) + download_error = str(e) # GitHub and BitBucket seem to time out a lot if download_error.find('timed out') != -1: if tries and self.debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url ) continue - error_string = u'%s %s downloading %s.' % (error_message, download_error, url) + error_string = '%s %s downloading %s.' % (error_message, download_error, url) break @@ -249,8 +241,8 @@ def parse_output(self, clean_run): HTTP header names. """ - with open_compat(self.tmp_file, 'r') as f: - output = read_compat(f).splitlines() + with open(self.tmp_file, 'r', encoding=sys.getdefaultencoding()) as fobj: + output = fobj.read().splitlines() self.clean_tmp_file() debug_missing = False @@ -295,7 +287,7 @@ def parse_output(self, clean_run): continue if section != last_section: - console_write(u'Wget HTTP Debug %s', section) + console_write('Wget HTTP Debug %s', section) if section == 'Read': if debug_missing: @@ -304,7 +296,7 @@ def parse_output(self, clean_run): else: header_lines.append(line) - console_write(u' %s', line, prefix=False) + console_write(' %s', line, prefix=False) last_section = section else: diff --git a/app/lib/package_control/downloaders/wininet_downloader.py b/app/lib/package_control/downloaders/wininet_downloader.py index e61d42f..a490cd6 100644 --- a/app/lib/package_control/downloaders/wininet_downloader.py +++ b/app/lib/package_control/downloaders/wininet_downloader.py @@ -5,9 +5,9 @@ import struct # To prevent import errors in thread with datetime import locale # noqa +from urllib.parse import urlparse from ..console_write import console_write -from ..unicode import unicode_from_os from .. import text from .non_http_error import NonHttpError from .http_error import HttpError @@ -18,13 +18,6 @@ from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader -try: - # Python 3 - from urllib.parse import urlparse -except (ImportError): - # Python 2 - from urlparse import urlparse - wininet = windll.wininet @@ -201,7 +194,7 @@ def close(self): if self.debug: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' WinINet %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -209,7 +202,7 @@ def close(self): ) if changed_state_back: console_write( - u' Changed Internet Explorer back to Work Offline', + ' Changed Internet Explorer back to Work Offline', prefix=False ) @@ -272,12 +265,12 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): username = url_info.username password = url_info.password - if not username and not password: - username, password = self.get_username_password() - request_headers = { 'Accept-Encoding': self.supported_encodings() } + if not username and not password: + request_headers.update(self.build_auth_header(url)) + request_headers = self.add_conditional_headers(url, request_headers) created_connection = False @@ -323,7 +316,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not self.network_connection: error_string = text.format( - u''' + ''' %s %s during network phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -367,7 +360,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not self.tcp_connection: error_string = text.format( - u''' + ''' %s %s during connection phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -401,7 +394,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): else: if self.debug: console_write( - u''' + ''' WinINet %s Debug General Re-using connection to %s on port %s for request #%s ''', @@ -431,9 +424,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): http_connection = wininet.HttpOpenRequestW( self.tcp_connection, - u'GET', + 'GET', path, - u'HTTP/1.1', + 'HTTP/1.1', None, None, http_flags, @@ -441,7 +434,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ) if not http_connection: error_string = text.format( - u''' + ''' %s %s during HTTP connection phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -450,8 +443,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): request_header_lines = [] for header, value in request_headers.items(): - request_header_lines.append(u"%s: %s" % (header, value)) - request_header_lines = u"\r\n".join(request_header_lines) + request_header_lines.append("%s: %s" % (header, value)) + request_header_lines = "\r\n".join(request_header_lines) success = wininet.HttpSendRequestW( http_connection, @@ -463,7 +456,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not success: error_string = text.format( - u''' + ''' %s %s during HTTP write phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -474,7 +467,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.cache_proxy_info() if self.debug: console_write( - u''' + ''' WinINet Debug Proxy proxy: %s proxy bypass: %s @@ -494,7 +487,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if self.debug and created_connection: if changed_to_online: console_write( - u''' + ''' WinINet HTTP Debug General Internet Explorer was set to Work Offline, temporarily going online ''' @@ -523,16 +516,16 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): issue_date = self.convert_filetime_to_datetime(cert_struct.ftStart) issue_date = issue_date.strftime('%a, %d %b %Y %H:%M:%S GMT') else: - issue_date = u"No issue date" + issue_date = "No issue date" if cert_struct.ftExpiry.dwLowDateTime != 0 and cert_struct.ftExpiry.dwHighDateTime != 0: expiration_date = self.convert_filetime_to_datetime(cert_struct.ftExpiry) expiration_date = expiration_date.strftime('%a, %d %b %Y %H:%M:%S GMT') else: - expiration_date = u"No expiration date" + expiration_date = "No expiration date" console_write( - u''' + ''' WinINet HTTPS Debug General Server SSL Certificate: subject: %s @@ -542,8 +535,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): expire date: %s ''', ( - u', '.join(subject_parts), - u', '.join(issuer_parts), + ', '.join(subject_parts), + ', '.join(issuer_parts), common_name, issue_date, expiration_date @@ -556,11 +549,11 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): other_headers = [] for header, value in request_headers.items(): - other_headers.append(u'%s: %s' % (header, value)) - indented_headers = u'\n '.join(other_headers) + other_headers.append('%s: %s' % (header, value)) + indented_headers = '\n '.join(other_headers) console_write( - u''' + ''' WinINet %s Debug Write GET %s HTTP/1.1 User-Agent: %s @@ -597,7 +590,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not success: if ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER: error_string = text.format( - u''' + ''' %s %s during header read phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -614,9 +607,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): headers = headers.decode('iso-8859-1').rstrip("\r\n").split("\r\n") if self.debug: - indented_headers = u'\n '.join(headers) + indented_headers = '\n '.join(headers) console_write( - u''' + ''' WinINet %s Debug Read %s ''', @@ -643,7 +636,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and self.debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -663,10 +656,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): except (NonHttpError, HttpError) as e: # GitHub and BitBucket seem to time out a lot - if unicode_from_os(e).find('timed out') != -1: + if str(e).find('timed out') != -1: if tries and self.debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url @@ -674,10 +667,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s %s downloading %s. ''', - (error_message, unicode_from_os(e), url) + (error_message, str(e), url) ) finally: @@ -720,26 +713,26 @@ def extract_error(self): error_num = ctypes.GetLastError() raw_error_string = ctypes.FormatError(error_num) - error_string = unicode_from_os(raw_error_string) + error_string = str(raw_error_string) # Try to fill in some known errors - if error_string == u"": + if error_string == "": error_lookup = { - 12007: u'host not found', - 12029: u'connection refused', - 12057: u'error checking for server certificate revocation', - 12169: u'invalid secure certificate', - 12157: u'secure channel error, server not providing SSL', - 12002: u'operation timed out' + 12007: 'host not found', + 12029: 'connection refused', + 12057: 'error checking for server certificate revocation', + 12169: 'invalid secure certificate', + 12157: 'secure channel error, server not providing SSL', + 12002: 'operation timed out' } if error_num in error_lookup: error_string = error_lookup[error_num] - if error_string == u"": - return u"(errno %s)" % error_num + if error_string == "": + return "(errno %s)" % error_num error_string = error_string[0].upper() + error_string[1:] - return u"%s (errno %s)" % (error_string, error_num) + return "%s (errno %s)" % (error_string, error_num) def supports_ssl(self): """ @@ -774,8 +767,8 @@ def cache_proxy_info(self): self.proxy_username = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_USERNAME) self.proxy_password = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_PASSWORD) else: - self.proxy_username = u'' - self.proxy_password = u'' + self.proxy_username = '' + self.proxy_password = '' def read_option(self, handle, option): """ diff --git a/app/lib/package_control/file_not_found_error.py b/app/lib/package_control/file_not_found_error.py deleted file mode 100644 index 5d67655..0000000 --- a/app/lib/package_control/file_not_found_error.py +++ /dev/null @@ -1,17 +0,0 @@ -import sys - - -class FileNotFoundError(Exception): - - """If a file is not found""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/http/__init__.py b/app/lib/package_control/http/__init__.py index d57f2ca..e69de29 100644 --- a/app/lib/package_control/http/__init__.py +++ b/app/lib/package_control/http/__init__.py @@ -1,64 +0,0 @@ -import sys - -try: - # Python 2 - import urllib2 - import httplib - - # Monkey patch AbstractBasicAuthHandler to prevent infinite recursion - def non_recursive_http_error_auth_reqed(self, authreq, host, req, headers): - authreq = headers.get(authreq, None) - - if not hasattr(self, 'retried'): - self.retried = 0 - - if self.retried > 5: - raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None) - else: - self.retried += 1 - - if authreq: - mo = urllib2.AbstractBasicAuthHandler.rx.search(authreq) - if mo: - scheme, quote, realm = mo.groups() - if scheme.lower() == 'basic': - return self.retry_http_basic_auth(host, req, realm) - - urllib2.AbstractBasicAuthHandler.http_error_auth_reqed = non_recursive_http_error_auth_reqed - - # Money patch urllib2.Request and httplib.HTTPConnection so that - # HTTPS proxies work in Python 2.6.1-2 - if sys.version_info < (2, 6, 3): - - urllib2.Request._tunnel_host = None - - def py268_set_proxy(self, host, type): - if self.type == 'https' and not self._tunnel_host: - self._tunnel_host = self.host - else: - self.type = type - # The _Request prefix is to handle python private name mangling - self._Request__r_host = self._Request__original - self.host = host - urllib2.Request.set_proxy = py268_set_proxy - - if sys.version_info < (2, 6, 5): - - def py268_set_tunnel(self, host, port=None, headers=None): - """ Sets up the host and the port for the HTTP CONNECT Tunnelling. - - The headers argument should be a mapping of extra HTTP headers - to send with the CONNECT request. - """ - self._tunnel_host = host - self._tunnel_port = port - if headers: - self._tunnel_headers = headers - else: - self._tunnel_headers.clear() - httplib.HTTPConnection._set_tunnel = py268_set_tunnel - - -except (ImportError): - # Python 3 does not need to be patched - pass diff --git a/app/lib/package_control/http/debuggable_http_connection.py b/app/lib/package_control/http/debuggable_http_connection.py index 504b20e..4ddb147 100644 --- a/app/lib/package_control/http/debuggable_http_connection.py +++ b/app/lib/package_control/http/debuggable_http_connection.py @@ -1,11 +1,5 @@ import socket - -try: - # Python 3 - from http.client import HTTPConnection -except (ImportError): - # Python 2 - from httplib import HTTPConnection +from http.client import HTTPConnection from ..console_write import console_write from .debuggable_http_response import DebuggableHTTPResponse @@ -23,10 +17,6 @@ class DebuggableHTTPConnection(HTTPConnection): def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kwargs): self.passwd = kwargs.get('passwd') - # Python 2.6.1 on OS X 10.6 does not include these - self._tunnel_host = None - self._tunnel_port = None - self._tunnel_headers = {} if 'debug' in kwargs and kwargs['debug']: self.debuglevel = 5 elif 'debuglevel' in kwargs: @@ -37,7 +27,7 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw def connect(self): if self.debuglevel == -1: console_write( - u''' + ''' Urllib %s Debug General Connecting to %s on port %s ''', @@ -58,9 +48,9 @@ def send(self, string): if reset_debug or self.debuglevel == -1: if len(string.strip()) > 0: unicode_string = string.strip().decode('iso-8859-1') - indented_headers = u'\n '.join(unicode_string.splitlines()) + indented_headers = '\n '.join(unicode_string.splitlines()) console_write( - u''' + ''' Urllib %s Debug Write %s ''', diff --git a/app/lib/package_control/http/debuggable_http_handler.py b/app/lib/package_control/http/debuggable_http_handler.py index 76cd97b..eabf34d 100644 --- a/app/lib/package_control/http/debuggable_http_handler.py +++ b/app/lib/package_control/http/debuggable_http_handler.py @@ -1,9 +1,4 @@ -try: - # Python 3 - from urllib.request import HTTPHandler -except (ImportError): - # Python 2 - from urllib2 import HTTPHandler +from urllib.request import HTTPHandler from .debuggable_http_connection import DebuggableHTTPConnection from .persistent_handler import PersistentHandler diff --git a/app/lib/package_control/http/debuggable_http_response.py b/app/lib/package_control/http/debuggable_http_response.py index e5bfeba..05fa861 100644 --- a/app/lib/package_control/http/debuggable_http_response.py +++ b/app/lib/package_control/http/debuggable_http_response.py @@ -1,11 +1,4 @@ -try: - # Python 3 - from http.client import HTTPResponse, IncompleteRead - str_cls = str -except (ImportError): - # Python 2 - from httplib import HTTPResponse, IncompleteRead - str_cls = unicode # noqa +from http.client import HTTPResponse, IncompleteRead from ..console_write import console_write @@ -39,16 +32,16 @@ def begin(self): headers.append("%s: %s" % (header, self.msg[header])) versions = { - 9: u'HTTP/0.9', - 10: u'HTTP/1.0', - 11: u'HTTP/1.1' + 9: 'HTTP/0.9', + 10: 'HTTP/1.0', + 11: 'HTTP/1.1' } - status_line = u'%s %s %s' % (versions[self.version], str_cls(self.status), self.reason) + status_line = '%s %s %s' % (versions[self.version], str(self.status), self.reason) headers.insert(0, status_line) - indented_headers = u'\n '.join(headers) + indented_headers = '\n '.join(headers) console_write( - u''' + ''' Urllib %s Debug Read %s ''', diff --git a/app/lib/package_control/http/invalid_certificate_exception.py b/app/lib/package_control/http/invalid_certificate_exception.py index 19438cd..f6f79a7 100644 --- a/app/lib/package_control/http/invalid_certificate_exception.py +++ b/app/lib/package_control/http/invalid_certificate_exception.py @@ -1,13 +1,5 @@ -import sys - -try: - # Python 3 - from http.client import HTTPException - from urllib.error import URLError -except (ImportError): - # Python 2 - from httplib import HTTPException - from urllib2 import URLError +from http.client import HTTPException +from urllib.error import URLError class InvalidCertificateException(HTTPException, URLError): @@ -24,13 +16,5 @@ def __init__(self, host, cert, reason): message = 'Host %s returned an invalid certificate (%s) %s' % (self.host, self.reason, self.cert) HTTPException.__init__(self, message.rstrip()) - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/http/persistent_handler.py b/app/lib/package_control/http/persistent_handler.py index 056692d..6ac0112 100644 --- a/app/lib/package_control/http/persistent_handler.py +++ b/app/lib/package_control/http/persistent_handler.py @@ -1,13 +1,5 @@ -import sys import socket - -try: - # Python 3 - from urllib.error import URLError -except ImportError: - # Python 2 - from urllib2 import URLError - from urllib import addinfourl +from urllib.error import URLError from ..console_write import console_write @@ -21,7 +13,7 @@ def close(self): if self._debuglevel == 5: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' Urllib %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -41,10 +33,7 @@ def do_open(self, http_class, req): # Large portions from Python 3.3 Lib/urllib/request.py and # Python 2.6 Lib/urllib2.py - if sys.version_info >= (3,): - host = req.host - else: - host = req.get_host() + host = req.host if not host: raise URLError('no host given') @@ -60,26 +49,17 @@ def do_open(self, http_class, req): h = self.connection if self._debuglevel == 5: console_write( - u''' + ''' Urllib %s Debug General Re-using connection to %s on port %s for request #%s ''', (h._debug_protocol, h.host, h.port, self.use_count) ) - if sys.version_info >= (3,): - headers = dict(req.unredirected_hdrs) - headers.update(dict((k, v) for k, v in req.headers.items() - if k not in headers)) - headers = dict((name.title(), val) for name, val in headers.items()) - - else: - h.set_debuglevel(self._debuglevel) - - headers = dict(req.headers) - headers.update(req.unredirected_hdrs) - headers = dict( - (name.title(), val) for name, val in headers.items()) + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + headers = dict((name.title(), val) for name, val in headers.items()) if req._tunnel_host and not self.connection: tunnel_headers = {} @@ -88,16 +68,10 @@ def do_open(self, http_class, req): tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] del headers[proxy_auth_hdr] - if sys.version_info >= (3,): - h.set_tunnel(req._tunnel_host, headers=tunnel_headers) - else: - h._set_tunnel(req._tunnel_host, headers=tunnel_headers) + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: - if sys.version_info >= (3,): - h.request(req.get_method(), req.selector, req.data, headers) - else: - h.request(req.get_method(), req.get_selector(), req.data, headers) + h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # timeout error h.close() raise URLError(err) @@ -111,7 +85,7 @@ def do_open(self, http_class, req): if self._debuglevel == 5: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' Urllib %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -120,15 +94,6 @@ def do_open(self, http_class, req): self.use_count = 0 self.connection = None - if sys.version_info >= (3,): - r.url = req.get_full_url() - r.msg = r.reason - return r - - r.recv = r.read - fp = socket._fileobject(r, close=True) - - resp = addinfourl(fp, r.msg, req.get_full_url()) - resp.code = r.status - resp.msg = r.reason - return resp + r.url = req.get_full_url() + r.msg = r.reason + return r diff --git a/app/lib/package_control/http/validating_https_connection.py b/app/lib/package_control/http/validating_https_connection.py index a1d60c5..af9e1e2 100644 --- a/app/lib/package_control/http/validating_https_connection.py +++ b/app/lib/package_control/http/validating_https_connection.py @@ -4,17 +4,8 @@ import hashlib import os import sys - -try: - # Python 3 - from http.client import HTTPS_PORT - from urllib.request import parse_keqv_list, parse_http_list - x509 = None -except (ImportError): - # Python 2 - from httplib import HTTPS_PORT - from urllib2 import parse_keqv_list, parse_http_list - from ..deps.asn1crypto import x509 +from http.client import HTTPS_PORT +from urllib.request import parse_keqv_list, parse_http_list from ..console_write import console_write from .debuggable_https_response import DebuggableHTTPSResponse @@ -105,7 +96,7 @@ def _tunnel(self): self._proxy_port = self.port self._set_hostport(self._tunnel_host, self._tunnel_port) - self._tunnel_headers['Host'] = u"%s:%s" % (self.host, self.port) + self._tunnel_headers['Host'] = "%s:%s" % (self.host, self.port) self._tunnel_headers['User-Agent'] = self.user_agent self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive' @@ -114,15 +105,14 @@ def _tunnel(self): request += "%s: %s\r\n" % (header, value) request += "\r\n" - if sys.version_info >= (3,): - request = bytes(request, 'iso-8859-1') + request = bytes(request, 'iso-8859-1') self.send(request) response = self.response_class(self.sock, method=self._method) (version, code, message) = response._read_status() - status_line = u"%s %s %s" % (version, code, message.rstrip()) + status_line = "%s %s %s" % (version, code, message.rstrip()) headers = [status_line] content_length = 0 @@ -130,8 +120,7 @@ def _tunnel(self): while True: line = response.fp.readline() - if sys.version_info >= (3,): - line = line.decode('iso-8859-1') + line = line.decode('iso-8859-1') if line == '\r\n': break @@ -148,9 +137,9 @@ def _tunnel(self): close_connection = True if self.debuglevel in [-1, 5]: - indented_headers = u'\n '.join(headers) + indented_headers = '\n '.join(headers) console_write( - u''' + ''' Urllib %s Debug Read %s ''', @@ -177,12 +166,12 @@ def _tunnel(self): response_value = self.build_digest_response( supported_auth_methods['digest'], username, password) if response_value: - self._tunnel_headers['Proxy-Authorization'] = u"Digest %s" % response_value + self._tunnel_headers['Proxy-Authorization'] = "Digest %s" % response_value elif 'basic' in supported_auth_methods: - response_value = u"%s:%s" % (username, password) + response_value = "%s:%s" % (username, password) response_value = base64.b64encode(response_value.encode('utf-8')).decode('utf-8') - self._tunnel_headers['Proxy-Authorization'] = u"Basic %s" % response_value.strip() + self._tunnel_headers['Proxy-Authorization'] = "Basic %s" % response_value.strip() if 'Proxy-Authorization' in self._tunnel_headers: self.host = self._proxy_host @@ -242,7 +231,7 @@ def sha1hash(string): else: return None - host_port = u"%s:%s" % (self.host, self.port) + host_port = "%s:%s" % (self.host, self.port) a1 = "%s:%s:%s" % (username, realm, password) a2 = "CONNECT:%s" % host_port @@ -250,11 +239,11 @@ def sha1hash(string): ha2 = hash(a2) if qop is None: - response = hash(u"%s:%s:%s" % (ha1, nonce, ha2)) + response = hash("%s:%s:%s" % (ha1, nonce, ha2)) elif qop == 'auth': nc = '00000001' cnonce = hash(os.urandom(8))[:8] - response = hash(u"%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2)) + response = hash("%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2)) else: return None @@ -274,7 +263,7 @@ def sha1hash(string): if opaque: response_fields['opaque'] = opaque - return ', '.join([u"%s=\"%s\"" % (field, response_fields[field]) for field in response_fields]) + return ', '.join(["%s=\"%s\"" % (field, response_fields[field]) for field in response_fields]) def connect(self): """ @@ -283,7 +272,7 @@ def connect(self): if self.debuglevel == -1: console_write( - u''' + ''' Urllib HTTPS Debug General Connecting to %s on port %s ''', @@ -296,7 +285,7 @@ def connect(self): if self.debuglevel == -1: console_write( - u''' + ''' Urllib HTTPS Debug General Upgrading connection to SSL using CA certs file at %s ''', @@ -305,48 +294,36 @@ def connect(self): hostname = self.host.split(':', 0)[0] - # Python 3 supports SNI when using an SSLContext - if sys.version_info >= (3,): - proto = ssl.PROTOCOL_SSLv23 - if sys.version_info >= (3, 6): - proto = ssl.PROTOCOL_TLS - self.ctx = ssl.SSLContext(proto) - if sys.version_info < (3, 7): - self.ctx.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 - else: - self.ctx.minimum_version = ssl.TLSVersion.TLSv1 - self.ctx.verify_mode = self.cert_reqs - self.ctx.load_verify_locations(self.ca_certs) - # We don't call load_cert_chain() with self.key_file and self.cert_file - # since that is for servers, and this code only supports client mode - if self.debuglevel == -1: - console_write( - u''' - Using hostname "%s" for TLS SNI extension - ''', - hostname, - indent=' ', - prefix=False - ) - self.sock = self.ctx.wrap_socket( - self.sock, - server_hostname=hostname - ) - + proto = ssl.PROTOCOL_SSLv23 + if sys.version_info >= (3, 6): + proto = ssl.PROTOCOL_TLS + self.ctx = ssl.SSLContext(proto) + if sys.version_info < (3, 7): + self.ctx.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 else: - self.sock = ssl.wrap_socket( - self.sock, - keyfile=self.key_file, - certfile=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ssl_version=ssl.PROTOCOL_SSLv23 + self.ctx.minimum_version = ssl.TLSVersion.TLSv1 + self.ctx.verify_mode = self.cert_reqs + self.ctx.load_verify_locations(self.ca_certs) + # We don't call load_cert_chain() with self.key_file and self.cert_file + # since that is for servers, and this code only supports client mode + if self.debuglevel == -1: + console_write( + ''' + Using hostname "%s" for TLS SNI extension + ''', + hostname, + indent=' ', + prefix=False ) + self.sock = self.ctx.wrap_socket( + self.sock, + server_hostname=hostname + ) if self.debuglevel == -1: cipher_info = self.sock.cipher() console_write( - u''' + ''' Successfully upgraded connection to %s:%s with SSL Using %s with cipher %s ''', @@ -358,20 +335,6 @@ def connect(self): # This debugs and validates the SSL certificate if self.cert_reqs & ssl.CERT_REQUIRED: cert = self.sock.getpeercert() - # Python 2.6 doesn't seem to parse the subject alt name, so - # we parse the raw DER certificate and grab the info ourself - if x509: - der_cert = self.sock.getpeercert(True) - cert_object = x509.Certificate.load(der_cert) - if cert_object.subject_alt_name_value: - subject_alt_names = [] - for general_name in cert_object.subject_alt_name_value: - if general_name.name != 'dns_name': - continue - if 'commonName' not in cert or general_name.native != cert['commonName']: - subject_alt_names.append(('DNS', general_name.native)) - if subject_alt_names: - cert['subjectAltName'] = tuple(subject_alt_names) if self.debuglevel == -1: subjectMap = { @@ -397,7 +360,7 @@ def connect(self): subject_parts.append(field_name + '=' + pair[0][1]) console_write( - u''' + ''' Server SSL certificate: subject: %s ''', @@ -408,18 +371,18 @@ def connect(self): if 'subjectAltName' in cert: alt_names = [c[1] for c in cert['subjectAltName']] alt_names = ', '.join(alt_names) - console_write(u' subject alt name: %s', alt_names, prefix=False) + console_write(' subject alt name: %s', alt_names, prefix=False) if 'notAfter' in cert: - console_write(u' expire date: %s', cert['notAfter'], prefix=False) + console_write(' expire date: %s', cert['notAfter'], prefix=False) if not self.validate_cert_host(cert, hostname): if self.debuglevel == -1: - console_write(u' Certificate INVALID', prefix=False) + console_write(' Certificate INVALID', prefix=False) raise InvalidCertificateException(hostname, cert, 'hostname mismatch') if self.debuglevel == -1: - console_write(u' Certificate validated for %s', hostname, prefix=False) + console_write(' Certificate validated for %s', hostname, prefix=False) except (ImportError): pass diff --git a/app/lib/package_control/http/validating_https_handler.py b/app/lib/package_control/http/validating_https_handler.py index 5c569fb..10c50b9 100644 --- a/app/lib/package_control/http/validating_https_handler.py +++ b/app/lib/package_control/http/validating_https_handler.py @@ -1,63 +1,40 @@ -try: - # Python 3 - from urllib.error import URLError - import urllib.request as urllib_compat -except (ImportError): - # Python 2 - from urllib2 import URLError - import urllib2 as urllib_compat - - -# The following code is wrapped in a try because the Linux versions of Sublime -# Text do not include the ssl module due to the fact that different distros -# have different versions -try: - import ssl - - from .validating_https_connection import ValidatingHTTPSConnection - from .invalid_certificate_exception import InvalidCertificateException - from .persistent_handler import PersistentHandler - - if hasattr(urllib_compat, 'HTTPSHandler'): - class ValidatingHTTPSHandler(PersistentHandler, urllib_compat.HTTPSHandler): - - """ - A urllib handler that validates SSL certificates for HTTPS requests - """ - - def __init__(self, **kwargs): - # This is a special value that will not trigger the standard debug - # functionality, but custom code where we can format the output - self._debuglevel = 0 - if 'debug' in kwargs and kwargs['debug']: - self._debuglevel = 5 - elif 'debuglevel' in kwargs: - self._debuglevel = kwargs['debuglevel'] - self._connection_args = kwargs - - def https_open(self, req): - def http_class_wrapper(host, **kwargs): - full_kwargs = dict(self._connection_args) - full_kwargs.update(kwargs) - return ValidatingHTTPSConnection(host, **full_kwargs) - - try: - return self.do_open(http_class_wrapper, req) - except URLError as e: - if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1: - raise InvalidCertificateException(req.host, '', - e.reason.args[1]) - raise - - https_request = urllib_compat.AbstractHTTPHandler.do_request_ - else: - raise ImportError() - -except (ImportError) as e: - - import_error = e - - class ValidatingHTTPSHandler(): - - def __init__(self, **kwargs): - raise import_error +import ssl +from urllib.error import URLError +import urllib.request as urllib_compat + +from .validating_https_connection import ValidatingHTTPSConnection +from .invalid_certificate_exception import InvalidCertificateException +from .persistent_handler import PersistentHandler + + +class ValidatingHTTPSHandler(PersistentHandler, urllib_compat.HTTPSHandler): + + """ + A urllib handler that validates SSL certificates for HTTPS requests + """ + + def __init__(self, **kwargs): + # This is a special value that will not trigger the standard debug + # functionality, but custom code where we can format the output + self._debuglevel = 0 + if 'debug' in kwargs and kwargs['debug']: + self._debuglevel = 5 + elif 'debuglevel' in kwargs: + self._debuglevel = kwargs['debuglevel'] + self._connection_args = kwargs + + def https_open(self, req): + def http_class_wrapper(host, **kwargs): + full_kwargs = dict(self._connection_args) + full_kwargs.update(kwargs) + return ValidatingHTTPSConnection(host, **full_kwargs) + + try: + return self.do_open(http_class_wrapper, req) + except URLError as e: + if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1: + raise InvalidCertificateException(req.host, '', + e.reason.args[1]) + raise + + https_request = urllib_compat.AbstractHTTPHandler.do_request_ diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py index a0ba634..9a76b61 100644 --- a/app/lib/package_control/http_cache.py +++ b/app/lib/package_control/http_cache.py @@ -1,21 +1,20 @@ -# Not shared with Package Control - import os +import time -from datetime import datetime, timedelta - -from ..connection import connection +from .sys_path import pc_cache_dir class HttpCache(object): + """ A data store for caching HTTP response data. """ def __init__(self, ttl): + self.base_path = os.path.join(pc_cache_dir(), 'http_cache') + os.makedirs(self.base_path, exist_ok=True) self.clear(int(ttl)) - def clear(self, ttl): """ Removes all cache entries older than the TTL @@ -25,11 +24,16 @@ def clear(self, ttl): """ ttl = int(ttl) - cutoff = datetime.utcnow() - timedelta(seconds=ttl) - - with connection() as cursor: - cursor.execute("DELETE FROM http_cache_entries WHERE last_modified < %s", [cutoff]) + for filename in os.listdir(self.base_path): + path = os.path.join(self.base_path, filename) + # There should not be any folders in the cache dir, but we + # ignore to prevent an exception + if os.path.isdir(path): + continue + mtime = os.stat(path).st_mtime + if mtime < time.time() - ttl: + os.unlink(path) def get(self, key): """ @@ -41,20 +45,16 @@ def get(self, key): :return: The (binary) cached value, or False """ - - with connection() as cursor: - cursor.execute("SELECT content FROM http_cache_entries WHERE key = %s", [key]) - row = cursor.fetchone() - if not row: - return False - - return row['content'].tobytes() - + try: + cache_file = os.path.join(self.base_path, key) + with open(cache_file, 'rb') as fobj: + return fobj.read() + except FileNotFoundError: + return False def has(self, key): - with connection() as cursor: - cursor.execute("SELECT key FROM http_cache_entries WHERE key = %s", [key]) - return cursor.fetchone() != None + cache_file = os.path.join(self.base_path, key) + return os.path.exists(cache_file) def path(self, key): """ @@ -67,7 +67,7 @@ def path(self, key): The absolute filesystem path to the cache file """ - return "SELECT * FROM http_cache_entries WHERE key = '%s'" % key + return os.path.join(self.base_path, key) def set(self, key, content): """ @@ -80,10 +80,6 @@ def set(self, key, content): The (binary) content to cache """ - if self.has(key): - sql = "UPDATE http_cache_entries SET content = %s, last_modified = CURRENT_TIMESTAMP WHERE key = %s" - else: - sql = "INSERT INTO http_cache_entries (content, last_modified, key) VALUES (%s, CURRENT_TIMESTAMP, %s)" - - with connection() as cursor: - cursor.execute(sql, [content, key]) + cache_file = os.path.join(self.base_path, key) + with open(cache_file, 'wb') as f: + f.write(content) diff --git a/app/lib/package_control/open_compat.py b/app/lib/package_control/open_compat.py deleted file mode 100644 index 5aacd26..0000000 --- a/app/lib/package_control/open_compat.py +++ /dev/null @@ -1,39 +0,0 @@ -import os -import sys - -from .file_not_found_error import FileNotFoundError - -try: - str_cls = unicode -except (NameError): - str_cls = str - - -def open_compat(path, mode='r'): - if mode in ['r', 'rb'] and not os.path.exists(path): - raise FileNotFoundError(u"The file \"%s\" could not be found" % path) - - if sys.version_info >= (3,): - encoding = 'utf-8' - errors = 'replace' - if mode in ['rb', 'wb', 'ab']: - encoding = None - errors = None - return open(path, mode, encoding=encoding, errors=errors) - - else: - return open(path, mode) - - -def read_compat(file_obj): - if sys.version_info >= (3,): - return file_obj.read() - else: - return str_cls(file_obj.read(), 'utf-8', errors='replace') - - -def write_compat(file_obj, value): - if sys.version_info >= (3,): - return file_obj.write(str(value)) - else: - return file_obj.write(str_cls(value).encode('utf-8')) diff --git a/app/lib/package_control/providers/base_repository_provider.py b/app/lib/package_control/providers/base_repository_provider.py new file mode 100644 index 0000000..4e0b8d1 --- /dev/null +++ b/app/lib/package_control/providers/base_repository_provider.py @@ -0,0 +1,131 @@ +class BaseRepositoryProvider: + """ + Base repository downloader that fetches package info + + This base class acts as interface to ensure all providers expose the same + set of methods. All providers should therefore derive from this base class. + + The structure of the JSON a repository should contain is located in + example-packages.json. + + :param repo_url: + The URL of the package repository + + :param settings: + A dict containing at least the following fields: + `cache_length`, + `debug`, + `timeout`, + `user_agent` + Optional fields: + `http_proxy`, + `https_proxy`, + `proxy_username`, + `proxy_password`, + `query_string_params` + """ + + __slots__ = [ + 'broken_libriaries' + 'broken_packages', + 'cache', + 'failed_sources', + 'repo_url', + 'settings', + ] + + def __init__(self, repo_url, settings): + self.broken_libriaries = {} + self.broken_packages = {} + self.failed_sources = {} + self.cache = {} + self.repo_url = repo_url + self.settings = settings + + @classmethod + def match_url(cls, repo_url): + """ + Indicates if this provider can handle the provided repo_url + """ + + return True + + def prefetch(self): + """ + Go out and perform HTTP operations, caching the result + + :raises: + DownloaderException: when there is an issue download package info + ClientException: when there is an issue parsing package info + """ + + [name for name, info in self.get_packages()] + + def fetch(self): + """ + Retrieves and loads the JSON for other methods to use + + :raises: + NotImplementedError: when called + """ + + raise NotImplementedError() + + def get_broken_libraries(self): + """ + List of library names for libraries that are missing information + + :return: + A generator of ("Library Name", Exception()) tuples + """ + + return self.broken_libriaries.items() + + def get_broken_packages(self): + """ + List of package names for packages that are missing information + + :return: + A generator of ("Package Name", Exception()) tuples + """ + + return self.broken_packages.items() + + def get_failed_sources(self): + """ + List of any URLs that could not be accessed while accessing this repository + + :return: + A generator of ("https://example.com", Exception()) tuples + """ + + return self.failed_sources.items() + + def get_libraries(self, invalid_sources=None): + """ + For API-compatibility with RepositoryProvider + """ + + return {}.items() + + def get_packages(self, invalid_sources=None): + """ + For API-compatibility with RepositoryProvider + """ + + return {}.items() + + def get_sources(self): + """ + Return a list of current URLs that are directly referenced by the repo + + :return: + A list of URLs + """ + + return [self.repo_url] + + def get_renamed_packages(self): + """For API-compatibility with RepositoryProvider""" + + return {} diff --git a/app/lib/package_control/providers/bitbucket_repository_provider.py b/app/lib/package_control/providers/bitbucket_repository_provider.py index 2179624..73c6605 100644 --- a/app/lib/package_control/providers/bitbucket_repository_provider.py +++ b/app/lib/package_control/providers/bitbucket_repository_provider.py @@ -1,13 +1,13 @@ import re from ..clients.bitbucket_client import BitBucketClient -from ..downloaders.downloader_exception import DownloaderException from ..clients.client_exception import ClientException +from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -class BitBucketRepositoryProvider(): - +class BitBucketRepositoryProvider(BaseRepositoryProvider): """ Allows using a public BitBucket repository as the source for a single package. For legacy purposes, this can also be treated as the source for a Package @@ -32,57 +32,11 @@ class BitBucketRepositoryProvider(): `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} - self.repo = repo - self.settings = settings - self.failed_sources = {} - @classmethod - def match_url(cls, repo): - """Indicates if this provider can handle the provided repo""" - - return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) is not None - - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :return: - A generator of ("https://bitbucket.org/user/repo", Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_broken_dependencies(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_dependencies(self, ): - "For API-compatibility with RepositoryProvider" + def match_url(cls, repo_url): + """Indicates if this provider can handle the provided repo_url""" - return {}.items() + return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) is not None def get_packages(self, invalid_sources=None): """ @@ -131,16 +85,16 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - client = BitBucketClient(self.settings) - - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() + client = BitBucketClient(self.settings) + try: - repo_info = client.repo_info(self.repo) + repo_info = client.repo_info(self.repo_url) releases = [] - for download in client.download_info(self.repo): + for download in client.download_info(self.repo_url): download['sublime_text'] = '*' download['platforms'] = ['*'] releases.append(download) @@ -155,7 +109,7 @@ def get_packages(self, invalid_sources=None): 'releases': releases, 'previous_names': [], 'labels': [], - 'sources': [self.repo], + 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], @@ -165,21 +119,6 @@ def get_packages(self, invalid_sources=None): yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: - self.failed_sources[self.repo] = e + self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} raise StopIteration() - - def get_sources(self): - """ - Return a list of current URLs that are directly referenced by the repo - - :return: - A list of URLs - """ - - return [self.repo] - - def get_renamed_packages(self): - """For API-compatibility with RepositoryProvider""" - - return {} diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py index 1373c80..a775d21 100644 --- a/app/lib/package_control/providers/channel_provider.py +++ b/app/lib/package_control/providers/channel_provider.py @@ -1,36 +1,34 @@ import json import os import re +from urllib.parse import urljoin -try: - # Python 3 - from urllib.parse import urljoin - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urljoin - str_cls = unicode # noqa - -from .. import text from ..console_write import console_write -from .provider_exception import ProviderException -from .schema_compat import platforms_to_releases from ..download_manager import downloader, update_url from ..versions import version_sort +from .provider_exception import ProviderException +from .schema_compat import platforms_to_releases +from .schema_compat import SchemaVersion + + +class InvalidChannelFileException(ProviderException): + def __init__(self, channel, reason_message): + super().__init__( + 'Channel %s does not appear to be a valid channel file because' + ' %s' % (channel.url, reason_message)) -class ChannelProvider(): +class ChannelProvider: """ Retrieves a channel and provides an API into the information The current channel/repository infrastructure caches repository info into the channel to improve the Package Control client performance. This also - has the side effect of lessening the load on the GitHub, GitLab and - BitBucket APIs and getting around not-infrequent HTTP 503 errors from - those APIs. + has the side effect of lessening the load on the GitHub and BitBucket APIs + and getting around not-infrequent HTTP 503 errors from those APIs. - :param channel: + :param channel_url: The URL of the channel :param settings: @@ -48,16 +46,24 @@ class ChannelProvider(): `http_basic_auth` """ - def __init__(self, channel, settings): + __slots__ = [ + 'channel_info', + 'channel_url', + 'schema_version', + 'settings', + ] + + def __init__(self, channel_url, settings): self.channel_info = None - self.schema_version = '0.0' - self.schema_major_version = 0 - self.channel = channel + self.channel_url = channel_url + self.schema_version = None self.settings = settings @classmethod - def match_url(cls, channel): - """Indicates if this provider can handle the provided channel""" + def match_url(cls, channel_url): + """ + Indicates if this provider can handle the provided channel_url. + """ return True @@ -77,67 +83,50 @@ def fetch(self): Retrieves and loads the JSON for other methods to use :raises: - ProviderException: when an error occurs with the channel contents + InvalidChannelFileException: when parsing or validation file content fails + ProviderException: when an error occurs trying to open a file DownloaderException: when an error occurs trying to open a URL """ if self.channel_info is not None: return - if re.match('https?://', self.channel, re.I): - with downloader(self.channel, self.settings) as manager: - channel_json = manager.fetch(self.channel, 'Error downloading channel.') + if re.match(r'https?://', self.channel_url, re.I): + with downloader(self.channel_url, self.settings) as manager: + json_string = manager.fetch(self.channel_url, 'Error downloading channel.') # All other channels are expected to be filesystem paths else: - if not os.path.exists(self.channel): - raise ProviderException(u'Error, file %s does not exist' % self.channel) + if not os.path.exists(self.channel_url): + raise ProviderException('Error, file %s does not exist' % self.channel_url) if self.settings.get('debug'): console_write( - u''' + ''' Loading %s as a channel ''', - self.channel + self.channel_url ) # We open as binary so we get bytes like the DownloadManager - with open(self.channel, 'rb') as f: - channel_json = f.read() + with open(self.channel_url, 'rb') as f: + json_string = f.read() try: - channel_info = json.loads(channel_json.decode('utf-8')) + channel_info = json.loads(json_string.decode('utf-8')) except (ValueError): - raise ProviderException(u'Error parsing JSON from channel %s.' % self.channel) - - schema_error = u'Channel %s does not appear to be a valid channel file because ' % self.channel - - if 'schema_version' not in channel_info: - raise ProviderException(u'%s the "schema_version" JSON key is missing.' % schema_error) + raise InvalidChannelFileException(self, 'parsing JSON failed.') try: - self.schema_version = channel_info.get('schema_version') - if isinstance(self.schema_version, int): - self.schema_version = float(self.schema_version) - if isinstance(self.schema_version, float): - self.schema_version = str_cls(self.schema_version) - except (ValueError): - raise ProviderException(u'%s the "schema_version" is not a valid number.' % schema_error) - - if self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']: - raise ProviderException(text.format( - u''' - %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0. - ''', - schema_error - )) - - version_parts = self.schema_version.split('.') - self.schema_major_version = int(version_parts[0]) + schema_version = SchemaVersion(channel_info['schema_version']) + except KeyError: + raise InvalidChannelFileException(self, 'the "schema_version" JSON key is missing.') + except ValueError as e: + raise InvalidChannelFileException(self, e) # Fix any out-dated repository URLs in the package cache debug = self.settings.get('debug') - packages_key = 'packages_cache' if self.schema_major_version >= 2 else 'packages' + packages_key = 'packages_cache' if schema_version.major >= 2 else 'packages' if packages_key in channel_info: original_cache = channel_info[packages_key] new_cache = {} @@ -146,6 +135,7 @@ def fetch(self): channel_info[packages_key] = new_cache self.channel_info = channel_info + self.schema_version = schema_version def get_name_map(self): """ @@ -159,7 +149,7 @@ def get_name_map(self): self.fetch() - if self.schema_major_version >= 2: + if self.schema_version.major >= 2: return {} return self.channel_info.get('package_name_map', {}) @@ -176,7 +166,7 @@ def get_renamed_packages(self): self.fetch() - if self.schema_major_version >= 2: + if self.schema_version.major >= 2: output = {} if 'packages_cache' in self.channel_info: for repo in self.channel_info['packages_cache']: @@ -203,27 +193,21 @@ def get_repositories(self): self.fetch() if 'repositories' not in self.channel_info: - raise ProviderException(text.format( - u''' - Channel %s does not appear to be a valid channel file because - the "repositories" JSON key is missing. - ''', - self.channel - )) + raise InvalidChannelFileException( + self, 'the "repositories" JSON key is missing.') # Determine a relative root so repositories can be defined # relative to the location of the channel file. - scheme_match = re.match('(https?:)//', self.channel, re.I) + scheme_match = re.match(r'(https?:)//', self.channel_url, re.I) if scheme_match is None: - relative_base = os.path.dirname(self.channel) + relative_base = os.path.dirname(self.channel_url) is_http = False else: is_http = True debug = self.settings.get('debug') output = [] - repositories = self.channel_info.get('repositories', []) - for repository in repositories: + for repository in self.channel_info['repositories']: if repository.startswith('//'): if scheme_match is not None: repository = scheme_match.group(1) + repository @@ -234,7 +218,7 @@ def get_repositories(self): continue elif repository.startswith('./') or repository.startswith('../'): if is_http: - repository = urljoin(self.channel, repository) + repository = urljoin(self.channel_url, repository) else: repository = os.path.join(relative_base, repository) repository = os.path.normpath(repository) @@ -253,11 +237,11 @@ def get_sources(self): return self.get_repositories() - def get_packages(self, repo): + def get_packages(self, repo_url): """ Provides access to the repository info that is cached in a channel - :param repo: + :param repo_url: The URL of the repository to get the cached info of :raises: @@ -295,35 +279,36 @@ def get_packages(self, repo): self.fetch() - repo = update_url(repo, self.settings.get('debug')) + repo_url = update_url(repo_url, self.settings.get('debug')) # The 2.0 channel schema renamed the key cached package info was # stored under in order to be more clear to new users. - packages_key = 'packages_cache' if self.schema_major_version >= 2 else 'packages' - - if self.channel_info.get(packages_key, False) is False: - return {} - - if self.channel_info[packages_key].get(repo, False) is False: - return {} + packages_key = 'packages_cache' if self.schema_version.major >= 2 else 'packages' output = {} - for package in self.channel_info[packages_key][repo]: + for package in self.channel_info.get(packages_key, {}).get(repo_url, []): copy = package.copy() # In schema version 2.0, we store a list of dicts containing info # about all available releases. These include "version" and # "platforms" keys that are used to pick the download for the # current machine. - if self.schema_major_version < 2: + if self.schema_version.major < 2: copy['releases'] = platforms_to_releases(copy, self.settings.get('debug')) del copy['platforms'] else: last_modified = None + for release in copy.get('releases', []): date = release.get('date') if not last_modified or (date and date > last_modified): last_modified = date + + if self.schema_version.major < 4: + if 'dependencies' in release: + release['libraries'] = release['dependencies'] + del release['dependencies'] + copy['last_modified'] = last_modified defaults = { @@ -344,11 +329,11 @@ def get_packages(self, repo): return output - def get_dependencies(self, repo): + def get_libraries(self, repo_url): """ - Provides access to the dependency info that is cached in a channel + Provides access to the library info that is cached in a channel - :param repo: + :param repo_url: The URL of the repository to get the cached info of :raises: @@ -358,7 +343,7 @@ def get_dependencies(self, repo): :return: A dict in the format: { - 'Dependency Name': { + 'Library Name': { 'name': name, 'load_order': two digit string, 'description': description, @@ -381,17 +366,15 @@ def get_dependencies(self, repo): self.fetch() - repo = update_url(repo, self.settings.get('debug')) + repo_url = update_url(repo_url, self.settings.get('debug')) - if self.channel_info.get('dependencies_cache', False) is False: - return {} - - if self.channel_info['dependencies_cache'].get(repo, False) is False: - return {} + # The 4.0.0 channel schema renamed the key cached package info was + # stored under in order to be more clear to new users. + libraries_key = 'libraries_cache' if self.schema_version.major >= 4 else 'dependencies_cache' output = {} - for dependency in self.channel_info['dependencies_cache'][repo]: - dependency['releases'] = version_sort(dependency['releases'], 'platforms', reverse=True) - output[dependency['name']] = dependency + for library in self.channel_info.get(libraries_key, {}).get(repo_url, []): + library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) + output[library['name']] = library return output diff --git a/app/lib/package_control/providers/github_repository_provider.py b/app/lib/package_control/providers/github_repository_provider.py index 59a0eaf..d9cfb7b 100644 --- a/app/lib/package_control/providers/github_repository_provider.py +++ b/app/lib/package_control/providers/github_repository_provider.py @@ -1,19 +1,19 @@ import re +from ..clients.client_exception import ClientException from ..clients.github_client import GitHubClient from ..downloaders.downloader_exception import DownloaderException -from ..clients.client_exception import ClientException +from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -class GitHubRepositoryProvider(): - +class GitHubRepositoryProvider(BaseRepositoryProvider): """ Allows using a public GitHub repository as the source for a single package. For legacy purposes, this can also be treated as the source for a Package Control "repository". - :param repo: + :param repo_url: The public web URL to the GitHub repository. Should be in the format `https://github.com/user/package` for the master branch, or `https://github.com/user/package/tree/{branch_name}` for any other @@ -34,61 +34,18 @@ class GitHubRepositoryProvider(): `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} + def __init__(self, repo_url, settings): # Clean off the trailing .git to be more forgiving - self.repo = re.sub(r'\.git$', '', repo) - self.settings = settings - self.failed_sources = {} + super().__init__(re.sub(r'\.git$', '', repo_url), settings) @classmethod - def match_url(cls, repo): - """Indicates if this provider can handle the provided repo""" + def match_url(cls, repo_url): + """Indicates if this provider can handle the provided repo_url""" - master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo) - branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$', repo) + master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo_url) + branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$', repo_url) return master is not None or branch is not None - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :return: - A generator of ("https://github.com/user/repo", Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_broken_dependencies(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_dependencies(self, ): - "For API-compatibility with RepositoryProvider" - - return {}.items() - def get_packages(self, invalid_sources=None): """ Uses the GitHub API to construct necessary info for a package @@ -136,16 +93,16 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - client = GitHubClient(self.settings) - - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() + client = GitHubClient(self.settings) + try: - repo_info = client.repo_info(self.repo) + repo_info = client.repo_info(self.repo_url) releases = [] - for download in client.download_info(self.repo): + for download in client.download_info(self.repo_url): download['sublime_text'] = '*' download['platforms'] = ['*'] releases.append(download) @@ -160,7 +117,7 @@ def get_packages(self, invalid_sources=None): 'releases': releases, 'previous_names': [], 'labels': [], - 'sources': [self.repo], + 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], @@ -170,21 +127,6 @@ def get_packages(self, invalid_sources=None): yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: - self.failed_sources[self.repo] = e + self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} raise StopIteration() - - def get_sources(self): - """ - Return a list of current URLs that are directly referenced by the repo - - :return: - A list of URLs - """ - - return [self.repo] - - def get_renamed_packages(self): - """For API-compatibility with RepositoryProvider""" - - return {} diff --git a/app/lib/package_control/providers/github_user_provider.py b/app/lib/package_control/providers/github_user_provider.py index 81723c4..273cecc 100644 --- a/app/lib/package_control/providers/github_user_provider.py +++ b/app/lib/package_control/providers/github_user_provider.py @@ -1,18 +1,18 @@ import re +from ..clients.client_exception import ClientException from ..clients.github_client import GitHubClient from ..downloaders.downloader_exception import DownloaderException -from ..clients.client_exception import ClientException +from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -class GitHubUserProvider(): - +class GitHubUserProvider(BaseRepositoryProvider): """ Allows using a GitHub user/organization as the source for multiple packages, or in Package Control terminology, a "repository". - :param repo: + :param repo_url: The public web URL to the GitHub user/org. Should be in the format `https://github.com/user`. @@ -31,57 +31,11 @@ class GitHubUserProvider(): `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} - self.repo = repo - self.settings = settings - self.failed_sources = {} - @classmethod - def match_url(cls, repo): - """Indicates if this provider can handle the provided repo""" - - return re.search('^https?://github.com/[^/]+/?$', repo) is not None - - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - - :return: - A generator of ("https://github.com/user/repo", Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - For API-compatibility with RepositoryProvider - """ + def match_url(cls, repo_url): + """Indicates if this provider can handle the provided repo_url""" - return {}.items() - - def get_broken_dependencies(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_dependencies(self, ): - "For API-compatibility with RepositoryProvider" - - return {}.items() + return re.search('^https?://github.com/[^/]+/?$', repo_url) is not None def get_packages(self, invalid_sources=None): """ @@ -130,24 +84,25 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - client = GitHubClient(self.settings) - - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() + client = GitHubClient(self.settings) + try: - user_repos = client.user_info(self.repo) - except (DownloaderException, ClientException, ProviderException) as e: - self.failed_sources = [self.repo] - self.cache['get_packages'] = e - raise e + user_repos = client.user_info(self.repo_url) + except (DownloaderException, ClientException) as e: + self.failed_sources[self.repo_url] = e + self.cache['get_packages'] = {} + raise output = {} for repo_info in user_repos: - try: - name = repo_info['name'] - repo_url = 'https://github.com/%s/%s' % (repo_info['author'], name) + author = repo_info['author'] + name = repo_info['name'] + repo_url = client.make_repo_url(author, name) + try: releases = [] for download in client.download_info(repo_url): download['sublime_text'] = '*' @@ -158,12 +113,12 @@ def get_packages(self, invalid_sources=None): 'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], - 'author': repo_info['author'], + 'author': author, 'last_modified': releases[0].get('date'), 'releases': releases, 'previous_names': [], 'labels': [], - 'sources': [self.repo], + 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], @@ -176,18 +131,3 @@ def get_packages(self, invalid_sources=None): self.failed_sources[repo_url] = e self.cache['get_packages'] = output - - def get_sources(self): - """ - Return a list of current URLs that are directly referenced by the repo - - :return: - A list of URLs - """ - - return [self.repo] - - def get_renamed_packages(self): - """For API-compatibility with RepositoryProvider""" - - return {} diff --git a/app/lib/package_control/providers/gitlab_repository_provider.py b/app/lib/package_control/providers/gitlab_repository_provider.py index 59c340d..30db514 100644 --- a/app/lib/package_control/providers/gitlab_repository_provider.py +++ b/app/lib/package_control/providers/gitlab_repository_provider.py @@ -1,18 +1,19 @@ import re +from ..clients.client_exception import ClientException from ..clients.gitlab_client import GitLabClient from ..downloaders.downloader_exception import DownloaderException -from ..clients.client_exception import ClientException +from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -class GitLabRepositoryProvider(): +class GitLabRepositoryProvider(BaseRepositoryProvider): """ Allows using a public GitLab repository as the source for a single package. For legacy purposes, this can also be treated as the source for a Package Control "repository". - :param repo: + :param repo_url: The public web URL to the GitLab repository. Should be in the format `https://gitlab.com/user/package` for the master branch, or `https://gitlab.com/user/package/-/tree/{branch_name}` for any other @@ -33,61 +34,18 @@ class GitLabRepositoryProvider(): `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} + def __init__(self, repo_url, settings): # Clean off the trailing .git to be more forgiving - self.repo = re.sub(r'\.git$', '', repo) - self.settings = settings - self.failed_sources = {} + super().__init__(re.sub(r'\.git$', '', repo_url), settings) @classmethod - def match_url(cls, repo): - """Indicates if this provider can handle the provided repo""" + def match_url(cls, repo_url): + """Indicates if this provider can handle the provided repo_url""" - master = re.search('^https?://gitlab.com/[^/]+/[^/]+/?$', repo) - branch = re.search('^https?://gitlab.com/[^/]+/[^/]+/-/tree/[^/]+/?$', repo) + master = re.search('^https?://gitlab.com/[^/]+/[^/]+/?$', repo_url) + branch = re.search('^https?://gitlab.com/[^/]+/[^/]+/-/tree/[^/]+/?$', repo_url) return master is not None or branch is not None - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :return: - A generator of ("https://gitlab.com/user/repo", Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_broken_dependencies(self): - """ - For API-compatibility with RepositoryProvider - """ - - return {}.items() - - def get_dependencies(self, ): - """For API-compatibility with RepositoryProvider""" - - return {}.items() - def get_packages(self, invalid_sources=None): """ Uses the GitLab API to construct necessary info for a package @@ -135,16 +93,16 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - client = GitLabClient(self.settings) - - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() + client = GitLabClient(self.settings) + try: - repo_info = client.repo_info(self.repo) + repo_info = client.repo_info(self.repo_url) releases = [] - for download in client.download_info(self.repo): + for download in client.download_info(self.repo_url): download['sublime_text'] = '*' download['platforms'] = ['*'] releases.append(download) @@ -159,7 +117,7 @@ def get_packages(self, invalid_sources=None): 'releases': releases, 'previous_names': [], 'labels': [], - 'sources': [self.repo], + 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], @@ -169,21 +127,6 @@ def get_packages(self, invalid_sources=None): yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: - self.failed_sources[self.repo] = e + self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} raise StopIteration() - - def get_sources(self): - """ - Return a list of current URLs that are directly referenced by the repo - - :return: - A list of URLs - """ - - return [self.repo] - - def get_renamed_packages(self): - """For API-compatibility with RepositoryProvider""" - - return {} diff --git a/app/lib/package_control/providers/gitlab_user_provider.py b/app/lib/package_control/providers/gitlab_user_provider.py index acc1cb9..c583d7d 100644 --- a/app/lib/package_control/providers/gitlab_user_provider.py +++ b/app/lib/package_control/providers/gitlab_user_provider.py @@ -3,15 +3,16 @@ from ..clients.client_exception import ClientException from ..clients.gitlab_client import GitLabClient from ..downloaders.downloader_exception import DownloaderException +from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -class GitLabUserProvider: +class GitLabUserProvider(BaseRepositoryProvider): """ Allows using a GitLab user/organization as the source for multiple packages, or in Package Control terminology, a 'repository'. - :param repo: + :param repo_url: The public web URL to the GitHub user/org. Should be in the format `https://gitlab.com/user`. @@ -30,59 +31,13 @@ class GitLabUserProvider: `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} - self.repo = repo - self.settings = settings - self.failed_sources = {} - @classmethod - def match_url(cls, repo): - """ - Indicates if this provider can handle the provided repo - """ - - return re.search('^https?://gitlab.com/[^/]+/?$', repo) is not None - - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - - :return: - A generator of ('https://gitlab.com/user/repo', Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - For API-compatibility with RepositoryProvider + def match_url(cls, repo_url): """ - - return {}.items() - - def get_broken_dependencies(self): - """ - For API-compatibility with RepositoryProvider + Indicates if this provider can handle the provided repo_url """ - return {}.items() - - def get_dependencies(self, ): - '''For API-compatibility with RepositoryProvider''' - - return {}.items() + return re.search('^https?://gitlab.com/[^/]+/?$', repo_url) is not None def get_packages(self, invalid_sources=None): """ @@ -131,25 +86,25 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - client = GitLabClient(self.settings) - - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() + client = GitLabClient(self.settings) + try: - user_repos = client.user_info(self.repo) - except (DownloaderException, ClientException, ProviderException) as e: - self.failed_sources = [self.repo] - self.cache['get_packages'] = e - raise e + user_repos = client.user_info(self.repo_url) + except (DownloaderException, ClientException) as e: + self.failed_sources[self.repo_url] = e + self.cache['get_packages'] = {} + raise output = {} for repo_info in user_repos: - try: - name = repo_info['name'] - repo_url = 'https://gitlab.com/%s/%s' % (repo_info['author'], - name) + author = repo_info['author'] + name = repo_info['name'] + repo_url = client.make_repo_url(author, name) + try: releases = [] for download in client.download_info(repo_url): download['sublime_text'] = '*' @@ -160,12 +115,12 @@ def get_packages(self, invalid_sources=None): 'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], - 'author': repo_info['author'], + 'author': author, 'last_modified': releases[0].get('date'), 'releases': releases, 'previous_names': [], 'labels': [], - 'sources': [self.repo], + 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], @@ -179,18 +134,3 @@ def get_packages(self, invalid_sources=None): self.failed_sources[repo_url] = e self.cache['get_packages'] = output - - def get_sources(self): - """ - Return a list of current URLs that are directly referenced by the repo - - :return: - A list of URLs - """ - - return [self.repo] - - def get_renamed_packages(self): - """For API-compatibility with RepositoryProvider""" - - return {} diff --git a/app/lib/package_control/providers/provider_exception.py b/app/lib/package_control/providers/provider_exception.py index bb23d1e..cb669fd 100644 --- a/app/lib/package_control/providers/provider_exception.py +++ b/app/lib/package_control/providers/provider_exception.py @@ -1,17 +1,6 @@ -import sys - - class ProviderException(Exception): """If a provider could not return information""" - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return self.__str__().encode('utf-8') diff --git a/app/lib/package_control/providers/release_selector.py b/app/lib/package_control/providers/release_selector.py index 672825b..00b8ce8 100644 --- a/app/lib/package_control/providers/release_selector.py +++ b/app/lib/package_control/providers/release_selector.py @@ -37,6 +37,7 @@ def filter_releases(package, settings, releases): releases = version_exclude_prerelease(releases) output = [] + st_version = int(sublime.version()) for release in releases: platforms = release.get('platforms', '*') if not isinstance(platforms, list): @@ -51,7 +52,7 @@ def filter_releases(package, settings, releases): continue # Default to '*' (for legacy reasons), see #604 - if not is_compatible_version(release.get('sublime_text', '*')): + if not is_compatible_version(release.get('sublime_text', '*'), st_version): continue output.append(release) @@ -59,36 +60,28 @@ def filter_releases(package, settings, releases): return output -def is_compatible_version(version_range): - min_version = float("-inf") - max_version = float("inf") - +def is_compatible_version(version_range, st_version): if version_range == '*': return True - gt_match = re.match(r'>(\d+)$', version_range) - ge_match = re.match(r'>=(\d+)$', version_range) - lt_match = re.match(r'<(\d+)$', version_range) - le_match = re.match(r'<=(\d+)$', version_range) - range_match = re.match(r'(\d+) - (\d+)$', version_range) - + gt_match = re.match(r'>(\d{4})$', version_range) if gt_match: - min_version = int(gt_match.group(1)) + 1 - elif ge_match: - min_version = int(ge_match.group(1)) - elif lt_match: - max_version = int(lt_match.group(1)) - 1 - elif le_match: - max_version = int(le_match.group(1)) - elif range_match: - min_version = int(range_match.group(1)) - max_version = int(range_match.group(2)) - else: - return None - - if min_version > int(sublime.version()): - return False - if max_version < int(sublime.version()): - return False - - return True + return st_version > int(gt_match.group(1)) + + ge_match = re.match(r'>=(\d{4})$', version_range) + if ge_match: + return st_version >= int(ge_match.group(1)) + + lt_match = re.match(r'<(\d{4})$', version_range) + if lt_match: + return st_version < int(lt_match.group(1)) + + le_match = re.match(r'<=(\d{4})$', version_range) + if le_match: + return st_version <= int(le_match.group(1)) + + range_match = re.match(r'(\d{4}) - (\d{4})$', version_range) + if range_match: + return st_version >= int(range_match.group(1)) and st_version <= int(range_match.group(2)) + + return None diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index 05fd797..4d1ffa2 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -2,31 +2,32 @@ import re import os from itertools import chain - -try: - # Python 3 - from urllib.parse import urljoin, urlparse - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urljoin, urlparse - str_cls = unicode # noqa +from urllib.parse import urljoin, urlparse from .. import text -from ..console_write import console_write -from .provider_exception import ProviderException -from .schema_compat import platforms_to_releases -from ..downloaders.downloader_exception import DownloaderException +from ..clients.bitbucket_client import BitBucketClient from ..clients.client_exception import ClientException from ..clients.github_client import GitHubClient from ..clients.gitlab_client import GitLabClient -from ..clients.bitbucket_client import BitBucketClient +from ..console_write import console_write from ..download_manager import downloader, update_url +from ..downloaders.downloader_exception import DownloaderException from ..versions import version_sort +from .base_repository_provider import BaseRepositoryProvider +from .provider_exception import ProviderException +from .schema_compat import platforms_to_releases +from .schema_compat import SchemaVersion + + +class InvalidRepoFileException(ProviderException): + def __init__(self, repo, reason_message): + super().__init__( + 'Repository %s does not appear to be a valid repository file because' + ' %s' % (repo.repo_url, reason_message)) -class RepositoryProvider(): +class RepositoryProvider(BaseRepositoryProvider): """ Generic repository downloader that fetches package info @@ -37,7 +38,7 @@ class RepositoryProvider(): The structure of the JSON a repository should contain is located in example-packages.json. - :param repo: + :param repo_url: The URL of the package repository :param settings: @@ -55,78 +56,47 @@ class RepositoryProvider(): `http_basic_auth` """ - def __init__(self, repo, settings): - self.cache = {} + def __init__(self, repo_url, settings): + super().__init__(repo_url, settings) self.repo_info = None - self.schema_version = '0.0' - self.schema_major_version = 0 - self.repo = repo - self.settings = settings - self.failed_sources = {} - self.broken_packages = {} - self.broken_dependencies = {} - - @classmethod - def match_url(cls, repo): - """Indicates if this provider can handle the provided repo""" - - return True - - def prefetch(self): - """ - Go out and perform HTTP operations, caching the result - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - """ - - [name for name, info in self.get_packages()] - - def get_failed_sources(self): - """ - List of any URLs that could not be accessed while accessing this repository - - :return: - A generator of ("https://example.com", Exception()) tuples - """ - - return self.failed_sources.items() - - def get_broken_packages(self): - """ - List of package names for packages that are missing information - - :return: - A generator of ("Package Name", Exception()) tuples - """ - - return self.broken_packages.items() - - def get_broken_dependencies(self): - """ - List of dependency names for dependencies that are missing information - - :return: - A generator of ("Dependency Name", Exception()) tuples - """ - - return self.broken_dependencies.items() + self.schema_version = None def fetch(self): """ Retrieves and loads the JSON for other methods to use :raises: + InvalidChannelFileException: when parsing or validation file content fails ProviderException: when an error occurs trying to open a file DownloaderException: when an error occurs trying to open a URL """ + if self.repo_url in self.failed_sources: + return False + if self.repo_info is not None: - return + return True + + try: + self.fetch_repo() + except (DownloaderException, ProviderException) as e: + self.failed_sources[self.repo_url] = e + self.cache['get_libraries'] = {} + self.cache['get_packages'] = {} + return False + + return True + + def fetch_repo(self): + self.cache = {} + self.repo_info = self.fetch_json(self.repo_url) + self.schema_version = self.repo_info['schema_version'] - self.repo_info = self.fetch_location(self.repo) - for key in ['packages', 'dependencies']: + # The 4.0.0 repository schema renamed dependencies key to libraries. + if self.schema_version.major < 4: + self.repo_info['libraries'] = self.repo_info.pop('dependencies', []) + + for key in ('packages', 'libraries'): if key not in self.repo_info: self.repo_info[key] = [] @@ -134,16 +104,14 @@ def fetch(self): return # Allow repositories to include other repositories - scheme_match = re.match('(https?:)//', self.repo, re.I) + scheme_match = re.match(r'(https?:)//', self.repo_url, re.I) if scheme_match is None: - relative_base = os.path.dirname(self.repo) + relative_base = os.path.dirname(self.repo_url) is_http = False else: is_http = True - includes = self.repo_info.get('includes', []) - del self.repo_info['includes'] - for include in includes: + for include in self.repo_info.pop('includes', []): if include.startswith('//'): if scheme_match is not None: include = scheme_match.group(1) + include @@ -154,89 +122,26 @@ def fetch(self): continue elif include.startswith('./') or include.startswith('../'): if is_http: - include = urljoin(self.repo, include) + include = urljoin(self.repo_url, include) else: include = os.path.join(relative_base, include) include = os.path.normpath(include) - include_info = self.fetch_location(include) - included_packages = include_info.get('packages', []) - self.repo_info['packages'].extend(included_packages) - included_dependencies = include_info.get('dependencies', []) - self.repo_info['dependencies'].extend(included_dependencies) - - def fetch_and_validate(self): - """ - Fetch the repository and validates that it is parse-able - - :return: - Boolean if the repo was fetched and validated - """ - - if self.repo in self.failed_sources: - return False - if self.repo_info is not None: - return True + include_info = self.fetch_json(include) + include_version = include_info['schema_version'] + if include_version != self.schema_version: + raise ProviderException( + 'Scheme version of included repository %s doesn\'t match its parent.' % include) - try: - self.fetch() - except (DownloaderException, ProviderException) as e: - self.failed_sources[self.repo] = e - self.cache['get_packages'] = {} - return False - - def fail(message): - exception = ProviderException(message) - self.failed_sources[self.repo] = exception - self.cache['get_packages'] = {} - return - schema_error = u'Repository %s does not appear to be a valid repository file because ' % self.repo - - if 'schema_version' not in self.repo_info: - error_string = u'%s the "schema_version" JSON key is missing.' % schema_error - fail(error_string) - return False - - try: - self.schema_version = self.repo_info.get('schema_version') - if isinstance(self.schema_version, int): - self.schema_version = float(self.schema_version) - if isinstance(self.schema_version, float): - self.schema_version = str_cls(self.schema_version) - except (ValueError): - error_string = u'%s the "schema_version" is not a valid number.' % schema_error - fail(error_string) - return False - - if self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']: - fail(text.format( - u''' - %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0. - ''', - schema_error - )) - return False - - version_parts = self.schema_version.split('.') - self.schema_major_version = int(version_parts[0]) - - if 'packages' not in self.repo_info: - error_string = u'%s the "packages" JSON key is missing.' % schema_error - fail(error_string) - return False - - if isinstance(self.repo_info['packages'], dict): - fail(text.format( - u''' - %s the "packages" key is an object, not an array. This indicates it is a channel not a repository. - ''', - schema_error - )) - return False + included_packages = include_info.get('packages', []) + self.repo_info['packages'].extend(included_packages) - return True + # The 4.0.0 repository schema renamed dependencies key to libraries. + libraries_key = 'libraries' if include_version.major >= 4 else 'dependencies' + included_libraries = include_info.get(libraries_key, []) + self.repo_info['libraries'].extend(included_libraries) - def fetch_location(self, location): + def fetch_json(self, location): """ Fetches the contents of a URL of file path @@ -251,18 +156,18 @@ def fetch_location(self, location): A dict of the parsed JSON """ - if re.match('https?://', self.repo, re.I): + if re.match(r'https?://', location, re.I): with downloader(location, self.settings) as manager: json_string = manager.fetch(location, 'Error downloading repository.') # Anything that is not a URL is expected to be a filesystem path else: if not os.path.exists(location): - raise ProviderException(u'Error, file %s does not exist' % location) + raise ProviderException('Error, file %s does not exist' % location) if self.settings.get('debug'): console_write( - u''' + ''' Loading %s as a repository ''', location @@ -273,13 +178,29 @@ def fetch_location(self, location): json_string = f.read() try: - return json.loads(json_string.decode('utf-8')) + repo_info = json.loads(json_string.decode('utf-8')) except (ValueError): - raise ProviderException(u'Error parsing JSON from repository %s.' % location) + raise InvalidRepoFileException(self, 'parsing JSON failed.') - def get_dependencies(self, invalid_sources=None): + try: + repo_info['schema_version'] = SchemaVersion(repo_info['schema_version']) + except KeyError: + raise InvalidRepoFileException( + self, 'the "schema_version" JSON key is missing.') + except ValueError as e: + raise InvalidRepoFileException(self, e) + + if isinstance(repo_info['packages'], dict): + raise InvalidRepoFileException( + self, + 'the "packages" key is an object, not an array. ' + 'This indicates it is a channel not a repository.') + + return repo_info + + def get_libraries(self, invalid_sources=None): """ - Provides access to the dependencies in this repository + Provides access to the libraries in this repository :param invalid_sources: A list of URLs that are permissible to fetch data from @@ -292,7 +213,7 @@ def get_dependencies(self, invalid_sources=None): :return: A generator of ( - 'Dependency Name', + 'Library Name', { 'name': name, 'load_order': two digit string, @@ -303,6 +224,7 @@ def get_dependencies(self, invalid_sources=None): { 'sublime_text': compatible version, 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], 'url': url, 'version': version, 'sha256': hex hash @@ -314,198 +236,224 @@ def get_dependencies(self, invalid_sources=None): tuples """ - if 'get_dependencies' in self.cache: - for key, value in self.cache['get_dependencies'].items(): + if 'get_libraries' in self.cache: + for key, value in self.cache['get_libraries'].items(): yield (key, value) return - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() - if not self.fetch_and_validate(): + if not self.fetch(): return - debug = self.settings.get('debug') + if self.schema_version.major >= 4: + allowed_library_keys = { + 'name', 'description', 'author', 'issues', 'releases' + } + allowed_release_keys = { # todo: remove 'branch' + 'base', 'version', 'sublime_text', 'platforms', 'python_versions', 'branch', 'tags', 'url', 'sha256' + } + else: + allowed_library_keys = { + 'name', 'description', 'author', 'issues', 'load_order', 'releases' + } + allowed_release_keys = { + 'base', 'version', 'sublime_text', 'platforms', 'branch', 'tags', 'url', 'sha256' + } - github_client = GitHubClient(self.settings) - gitlab_client = GitLabClient(self.settings) - bitbucket_client = BitBucketClient(self.settings) + debug = self.settings.get('debug') - if self.schema_major_version < 3: - self.repo_info['dependencies'] = [] + clients = [ + Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient) + ] output = {} - for dependency in self.repo_info['dependencies']: + for library in self.repo_info['libraries']: info = { - 'sources': [self.repo] + 'releases': [], + 'sources': [self.repo_url] } - for field in ['name', 'description', 'author', 'issues', 'load_order']: - if dependency.get(field): - info[field] = dependency.get(field) + for field in ('name', 'description', 'author', 'issues'): + field_value = library.get(field) + if field_value: + info[field] = field_value if 'name' not in info: - self.failed_sources[self.repo] = ProviderException(text.format( - u''' - No "name" value for one of the dependencies in the repository %s. + self.failed_sources[self.repo_url] = ProviderException(text.format( + ''' + No "name" value for one of the libraries in the repository %s. ''', - self.repo + self.repo_url )) continue - releases = dependency.get('releases', []) + try: + unknown_keys = set(library) - allowed_library_keys + if unknown_keys: + raise ProviderException(text.format( + ''' + The "%s" key(s) in the library "%s" in the repository %s are not supported. + ''', + ('", "'.join(sorted(unknown_keys)), info['name'], self.repo_url) + )) + + releases = library.get('releases', []) + if releases and not isinstance(releases, list): + raise ProviderException(text.format( + ''' + The "releases" value is not an array for the library "%s" in the repository %s. + ''', + (info['name'], self.repo_url) + )) - if releases and not isinstance(releases, list): - self.broken_dependencies[info['name']] = ProviderException(text.format( - u''' - The "releases" value is not an array for the dependency "%s" in the repository %s. - ''', - (info['name'], self.repo) - )) - continue + def assert_release_keys(download_info): + for key in ('version', 'url', 'sublime_text', 'platforms', 'python_versions'): + if key not in download_info: + raise ProviderException(text.format( + ''' + Missing "%s" key for one of the releases of the library "%s" in the repository %s. + ''', + (key, info['name'], self.repo_url) + )) - for release in releases: - if 'releases' not in info: - info['releases'] = [] + for release in releases: + download_info = {} - download_info = {} + unknown_keys = set(release) - allowed_release_keys + if unknown_keys: + raise ProviderException(text.format( + ''' + The "%s" key(s) in one of the releases of the library "%s" + in the repository %s are not supported. + ''', + ('", "'.join(sorted(unknown_keys)), info['name'], self.repo_url) + )) - # Make sure that explicit fields are copied over - for field in ['platforms', 'sublime_text', 'version', 'url', 'sha256']: - if field in release: - value = release[field] - if field == 'url': - value = update_url(value, debug) - if field == 'platforms' and not isinstance(release['platforms'], list): - value = [value] - download_info[field] = value + # Make sure that explicit fields are copied over + for field in ('sublime_text', 'version', 'sha256'): + value = release.get(field) + if value: + download_info[field] = value - if 'platforms' not in download_info: - download_info['platforms'] = ['*'] + # Validate url + value = release.get('url') + if value: + download_info['url'] = update_url(value, debug) + + # Validate supported platforms + value = release.get('platforms', ['*']) + if not isinstance(value, list): + value = [value] + download_info['platforms'] = value + + # Validate supported python_versions + if self.schema_version.major < 4: + # Assume python 3.3 for backward compatibility with older schemes. + # Note: ST2 with python 2.6 are no longer supported + download_info['python_versions'] = ['3.3'] + else: + value = release.get('python_versions') + if value: + if not isinstance(value, list): + value = [value] + download_info['python_versions'] = value - tags = release.get('tags') - branch = release.get('branch') + tags = release.get('tags') + branch = release.get('branch') - if tags or branch: - try: + if tags or branch: base = None if 'base' in release: base = release['base'] if not base: raise ProviderException(text.format( - u''' + ''' Missing release-level "base" key for one of the releases of the - dependency "%s" in the repository %s. + library "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) - github_url = False - gitlab_url = False - bitbucket_url = False + client = None extra = None + url = None if tags: - github_url = github_client.make_tags_url(base) - gitlab_url = gitlab_client.make_tags_url(base) - bitbucket_url = bitbucket_client.make_tags_url(base) if tags is not True: extra = tags + for client in clients: + url = client.make_tags_url(base) + if url: + break if branch: - github_url = github_client.make_branch_url(base, branch) - gitlab_url = gitlab_client.make_branch_url(base, branch) - bitbucket_url = bitbucket_client.make_branch_url(base, branch) - - if github_url: - downloads = github_client.download_info(github_url, extra) - url = github_url - elif gitlab_url: - downloads = gitlab_client.download_info(gitlab_url, extra) - url = gitlab_url - elif bitbucket_url: - downloads = bitbucket_client.download_info(bitbucket_url, extra) - url = bitbucket_url - else: + for client in clients: + url = client.make_branch_url(base, branch) + if url: + break + + if not url: raise ProviderException(text.format( - u''' + ''' Invalid "base" value "%s" for one of the releases of the - dependency "%s" in the repository %s. + library "%s" in the repository %s. ''', - (base, info['name'], self.repo) + (base, info['name'], self.repo_url) )) + downloads = client.download_info(url, extra) if downloads is False: raise ProviderException(text.format( - u''' - No valid semver tags found at %s for the dependency + ''' + No valid semver tags found at %s for the library "%s" in the repository %s. ''', - (url, info['name'], self.repo) + (url, info['name'], self.repo_url) )) for download in downloads: del download['date'] new_download = download_info.copy() new_download.update(download) + assert_release_keys(new_download) info['releases'].append(new_download) - except (DownloaderException, ClientException, ProviderException) as e: - self.broken_dependencies[info['name']] = e - continue - - elif download_info: - if 'url' in download_info: + elif 'url' in download_info: is_http = urlparse(download_info['url']).scheme == 'http' if is_http and 'sha256' not in download_info: - self.broken_dependencies[info['name']] = ProviderException(text.format( - u''' + raise ProviderException(text.format( + ''' No "sha256" key for the non-secure "url" value in one of the - releases of the dependency "%s" in the repository %s. + releases of the library "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) - continue - info['releases'].append(download_info) + assert_release_keys(download_info) + info['releases'].append(download_info) - if info['name'] in self.broken_dependencies: - continue - - # Make sure the dependency has the appropriate keys. We use a - # function here so that we can break out of multiple loops. - def is_missing_keys(): - for key in ['author', 'releases', 'issues', 'description', 'load_order']: + # check required library keys + for key in ('author', 'releases', 'issues', 'description'): if key not in info: - self.broken_dependencies[info['name']] = ProviderException(text.format( - u''' - No "%s" key for the dependency "%s" in the repository %s. + raise ProviderException(text.format( + ''' + No "%s" key for the library "%s" in the repository %s. ''', - (key, info['name'], self.repo) + (key, info['name'], self.repo_url) )) - return True - for release in info.get('releases', []): - for key in ['version', 'url', 'sublime_text', 'platforms']: - if key not in release: - self.broken_dependencies[info['name']] = ProviderException(text.format( - u''' - Missing "%s" key for one of the releases of the dependency "%s" in the repository %s. - ''', - (key, info['name'], self.repo) - )) - return True - return False - if is_missing_keys(): - continue + info['releases'] = version_sort(info['releases'], 'platforms', reverse=True) - info['releases'] = version_sort(info['releases'], 'platforms', reverse=True) + output[info['name']] = info + yield (info['name'], info) - output[info['name']] = info - yield (info['name'], info) + except (DownloaderException, ClientException, ProviderException) as e: + self.broken_libriaries[info['name']] = e - self.cache['get_dependencies'] = output + self.cache['get_libraries'] = output def get_packages(self, invalid_sources=None): """ @@ -536,7 +484,7 @@ def get_packages(self, invalid_sources=None): 'url': url, 'date': date, 'version': version, - 'dependencies': [dependency name, ...] + 'libraries': [library name, ...] }, ... ] 'previous_names': [old_name, ...], @@ -556,21 +504,21 @@ def get_packages(self, invalid_sources=None): yield (key, value) return - if invalid_sources is not None and self.repo in invalid_sources: + if invalid_sources is not None and self.repo_url in invalid_sources: raise StopIteration() - if not self.fetch_and_validate(): + if not self.fetch(): return debug = self.settings.get('debug') - github_client = GitHubClient(self.settings) - gitlab_client = GitLabClient(self.settings) - bitbucket_client = BitBucketClient(self.settings) + clients = [ + Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient) + ] # Backfill the "previous_names" keys for old schemas previous_names = {} - if self.schema_major_version < 2: + if self.schema_version.major < 2: renamed = self.get_renamed_packages() for old_name in renamed: new_name = renamed[old_name] @@ -581,7 +529,7 @@ def get_packages(self, invalid_sources=None): output = {} for package in self.repo_info['packages']: info = { - 'sources': [self.repo] + 'sources': [self.repo_url] } copy_fields = [ @@ -602,13 +550,13 @@ def get_packages(self, invalid_sources=None): info[field] = package.get(field) # Schema version 2.0 allows for grabbing details about a package, or its - # download from "details" urls. See the GitHubClient, GitLabClient - # and BitBucketClient classes for valid URLs. - if self.schema_major_version >= 2: + # download from "details" urls. See the GitHubClient and BitBucketClient + # classes for valid URLs. + if self.schema_version.major >= 2: details = package.get('details') releases = package.get('releases') - # Try to grab package-level details from GitHub, GitLab or BitBucket + # Try to grab package-level details from GitHub or BitBucket if details: if invalid_sources is not None and details in invalid_sources: continue @@ -616,26 +564,25 @@ def get_packages(self, invalid_sources=None): info['sources'].append(details) try: - github_repo_info = github_client.repo_info(details) - gitlab_repo_info = gitlab_client.repo_info(details) - bitbucket_repo_info = bitbucket_client.repo_info(details) + repo_info = None - # When grabbing details, prefer explicit field values over the values - # from the GitHub, GitLab or BitBucket API - if github_repo_info: - info = dict(chain(github_repo_info.items(), info.items())) - elif gitlab_repo_info: - info = dict(chain(gitlab_repo_info.items(), info.items())) - elif bitbucket_repo_info: - info = dict(chain(bitbucket_repo_info.items(), info.items())) - else: + for client in clients: + repo_info = client.repo_info(details) + if repo_info: + break + + if not repo_info: raise ProviderException(text.format( - u''' + ''' Invalid "details" value "%s" for one of the packages in the repository %s. ''', - (details, self.repo) + (details, self.repo_url) )) + # When grabbing details, prefer explicit field values over the values + # from the GitHub or BitBucket API + info = dict(chain(repo_info.items(), info.items())) + except (DownloaderException, ClientException, ProviderException) as e: if 'name' in info: self.broken_packages[info['name']] = e @@ -643,37 +590,37 @@ def get_packages(self, invalid_sources=None): continue if 'name' not in info: - self.failed_sources[self.repo] = ProviderException(text.format( - u''' + self.failed_sources[self.repo_url] = ProviderException(text.format( + ''' No "name" value for one of the packages in the repository %s. ''', - self.repo + self.repo_url )) continue info['releases'] = [] - if self.schema_major_version == 2: + if self.schema_version.major == 2: # If no releases info was specified, also grab the download info from GH or BB if not releases and details: releases = [{'details': details}] - if self.schema_major_version >= 2: + if self.schema_version.major >= 2: if not releases: e = ProviderException(text.format( - u''' + ''' No "releases" value for the package "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) self.broken_packages[info['name']] = e continue if not isinstance(releases, list): e = ProviderException(text.format( - u''' + ''' The "releases" value is not an array or the package "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) self.broken_packages[info['name']] = e continue @@ -686,7 +633,7 @@ def get_packages(self, invalid_sources=None): download_info = {} # Make sure that explicit fields are copied over - for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'dependencies']: + for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'libraries']: if field in release: value = release[field] if field == 'url': @@ -695,10 +642,13 @@ def get_packages(self, invalid_sources=None): value = [value] download_info[field] = value + if self.schema_version.major < 4 and 'dependencies' in release: + download_info['libraries'] = release['dependencies'] + if 'platforms' not in download_info: download_info['platforms'] = ['*'] - if self.schema_major_version == 2: + if self.schema_version.major == 2: if 'sublime_text' not in download_info: download_info['sublime_text'] = '<3000' @@ -706,38 +656,25 @@ def get_packages(self, invalid_sources=None): download_details = release['details'] try: - github_downloads = github_client.download_info(download_details) - gitlab_downloads = gitlab_client.download_info(download_details) - bitbucket_downloads = bitbucket_client.download_info(download_details) + downloads = False - if github_downloads is False or gitlab_downloads is False \ - or bitbucket_downloads is False: - raise ProviderException(text.format( - u''' - No valid semver tags found at %s for the package "%s" in the repository %s. - ''', - (download_details, info['name'], self.repo) - )) + for client in clients: + downloads = client.download_info(download_details) + if downloads is not None: + break - if github_downloads: - downloads = github_downloads - elif gitlab_downloads: - downloads = gitlab_downloads - elif bitbucket_downloads: - downloads = bitbucket_downloads - else: + if downloads is False: raise ProviderException(text.format( - u''' + ''' Invalid "details" value "%s" under the "releases" key for the package "%s" in the repository %s. ''', - (download_details, info['name'], self.repo) + (download_details, info['name'], self.repo_url) )) for download in downloads: - new_download = download_info.copy() - new_download.update(download) - info['releases'].append(new_download) + download.update(download_info) + info['releases'].append(download) except (DownloaderException, ClientException, ProviderException) as e: self.broken_packages[info['name']] = e @@ -745,7 +682,7 @@ def get_packages(self, invalid_sources=None): elif download_info: info['releases'].append(download_info) - elif self.schema_major_version == 3: + elif self.schema_version.major >= 3: tags = release.get('tags') branch = release.get('branch') @@ -759,61 +696,53 @@ def get_packages(self, invalid_sources=None): if not base: raise ProviderException(text.format( - u''' + ''' Missing root-level "details" key, or release-level "base" key for one of the releases of the package "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) - github_url = False - gitlab_url = False - bitbucket_url = False + client = None extra = None + url = None if tags: - github_url = github_client.make_tags_url(base) - gitlab_url = gitlab_client.make_tags_url(base) - bitbucket_url = bitbucket_client.make_tags_url(base) if tags is not True: extra = tags + for client in clients: + url = client.make_tags_url(base) + if url: + break if branch: - github_url = github_client.make_branch_url(base, branch) - gitlab_url = gitlab_client.make_branch_url(base, branch) - bitbucket_url = bitbucket_client.make_branch_url(base, branch) - - if github_url: - downloads = github_client.download_info(github_url, extra) - url = github_url - elif gitlab_url: - downloads = gitlab_client.download_info(gitlab_url, extra) - url = gitlab_url - elif bitbucket_url: - downloads = bitbucket_client.download_info(bitbucket_url, extra) - url = bitbucket_url - else: + for client in clients: + url = client.make_branch_url(base, branch) + if url: + break + + if not url: raise ProviderException(text.format( - u''' + ''' Invalid "base" value "%s" for one of the releases of the package "%s" in the repository %s. ''', - (base, info['name'], self.repo) + (base, info['name'], self.repo_url) )) + downloads = client.download_info(url, extra) if downloads is False: raise ProviderException(text.format( - u''' + ''' No valid semver tags found at %s for the package "%s" in the repository %s. ''', - (url, info['name'], self.repo) + (url, info['name'], self.repo_url) )) for download in downloads: - new_download = download_info.copy() - new_download.update(download) - info['releases'].append(new_download) + download.update(download_info) + info['releases'].append(download) except (DownloaderException, ClientException, ProviderException) as e: self.broken_packages[info['name']] = e @@ -833,19 +762,19 @@ def get_packages(self, invalid_sources=None): if 'author' not in info: self.broken_packages[info['name']] = ProviderException(text.format( - u''' + ''' No "author" key for the package "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) continue if 'releases' not in info: self.broken_packages[info['name']] = ProviderException(text.format( - u''' + ''' No "releases" key for the package "%s" in the repository %s. ''', - (info['name'], self.repo) + (info['name'], self.repo_url) )) continue @@ -856,10 +785,10 @@ def has_broken_release(): for key in ['version', 'date', 'url', 'sublime_text', 'platforms']: if key not in release: self.broken_packages[info['name']] = ProviderException(text.format( - u''' + ''' Missing "%s" key for one of the releases of the package "%s" in the repository %s. ''', - (key, info['name'], self.repo) + (key, info['name'], self.repo_url) )) return True return False @@ -879,7 +808,7 @@ def has_broken_release(): info[field] = None if 'homepage' not in info: - info['homepage'] = self.repo + info['homepage'] = self.repo_url if 'releases' in info and 'last_modified' not in info: # Extract a date from the newest release @@ -905,11 +834,11 @@ def get_sources(self): A list of URLs and/or file paths """ - if not self.fetch_and_validate(): + if not self.fetch(): return [] - output = [self.repo] - if self.schema_major_version >= 2: + output = [self.repo_url] + if self.schema_version.major >= 2: for package in self.repo_info['packages']: details = package.get('details') if details: @@ -919,10 +848,10 @@ def get_sources(self): def get_renamed_packages(self): """:return: A dict of the packages that have been renamed""" - if not self.fetch_and_validate(): + if not self.fetch(): return {} - if self.schema_major_version < 2: + if self.schema_version.major < 2: return self.repo_info.get('renamed_packages', {}) output = {} diff --git a/app/lib/package_control/providers/schema_compat.py b/app/lib/package_control/providers/schema_compat.py index 0f648fe..39dd80c 100644 --- a/app/lib/package_control/providers/schema_compat.py +++ b/app/lib/package_control/providers/schema_compat.py @@ -1,4 +1,5 @@ from ..download_manager import update_url +from ..semver import SemVer def platforms_to_releases(info, debug): @@ -45,3 +46,39 @@ def platforms_to_releases(info, debug): output.append(release) return output + + +class SchemaVersion(SemVer): + supported_versions = ('1.0', '1.1', '1.2', '2.0', '3.0.0', '4.0.0') + + @classmethod + def _parse(cls, ver): + """ + Custom version string parsing to maintain backward compatibility. + + SemVer needs all of major, minor and patch parts being present in `ver`. + + :param ver: + An integer, float or string containing a version string. + + :returns: + List of (major, minor, patch) + """ + try: + if isinstance(ver, int): + ver = float(ver) + if isinstance(ver, float): + ver = str(ver) + except ValueError: + raise ValueError('the "schema_version" is not a valid number.') + + if ver not in cls.supported_versions: + raise ValueError( + 'the "schema_version" is not recognized. Must be one of: %s or %s.' + % (', '.join(cls.supported_versions[:-1]), cls.supported_versions[-1]) + ) + + if ver.count('.') == 1: + ver += '.0' + + return SemVer._parse(ver) diff --git a/app/lib/package_control/versions.py b/app/lib/package_control/versions.py index f00a2ac..3055601 100644 --- a/app/lib/package_control/versions.py +++ b/app/lib/package_control/versions.py @@ -149,7 +149,7 @@ def _version_sort_key(item): return sorted(sortable, key=_version_sort_key, **kwargs) except (ValueError) as e: console_write( - u''' + ''' Error sorting versions - %s ''', e diff --git a/app/tasks/generate_channel_v3_json.py b/app/tasks/generate_channel_v3_json.py index 452c4fd..19ea53e 100644 --- a/app/tasks/generate_channel_v3_json.py +++ b/app/tasks/generate_channel_v3_json.py @@ -5,7 +5,7 @@ import bz2 from urllib.parse import urlparse -from ..models import package, dependency +from ..models import package, library from ..lib.json_datetime_encoder import JsonDatetimeEncoder @@ -40,8 +40,8 @@ output['packages_cache'][repo].append(info) -dependency_info = dependency.all(limit_one_per_dependency=True) -for name, info in dependency_info.items(): +library_info = library.all(limit_one_per_library=True) +for name, info in library_info.items(): repo = info['repository'] del info['repository'] diff --git a/app/tasks/update_package_control_lib.py b/app/tasks/update_package_control_lib.py index ff35b24..59602a1 100644 --- a/app/tasks/update_package_control_lib.py +++ b/app/tasks/update_package_control_lib.py @@ -29,7 +29,6 @@ 'downloaders', 'file_not_found_error.py', 'http', - 'open_compat.py', 'providers', 'semver.py', 'text.py', diff --git a/setup/scripts/extract_package_control.py b/setup/scripts/extract_package_control.py index 137ebfe..e704342 100644 --- a/setup/scripts/extract_package_control.py +++ b/setup/scripts/extract_package_control.py @@ -106,7 +106,6 @@ 'download_manager.py', 'file_not_found_error.py', # CUSTOM 'http_cache.py', - 'open_compat.py', '../readme.md', 'semver.py', # CUSTOM 'show_error.py', From 1507a197162002f7d1f5c60f2032e3cbc6f23197 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Fri, 19 Aug 2022 16:08:27 +0200 Subject: [PATCH 03/39] Add task to create channel_v4.json Scheme 4.0.0 is incompatible with 3.0.0 and will cause current PC 3.4.x releases to completely fail. They wouldn't even be able to fetch data to upgrade itself. Hence a dummy channel_v3.json needs to be served for all clients not updated before new packagecontrol.io release goes online. PC 4.0 can ship with both channel_v3 and _v4 pre-configured to ensure a smooth transition to new scheme version. The only thing changing for end users is availability of libraries for python 3.8 then. --- app/tasks/generate_channel_v3_json.py | 15 +---- app/tasks/generate_channel_v4_json.py | 80 +++++++++++++++++++++++++++ tasks.md | 3 + 3 files changed, 84 insertions(+), 14 deletions(-) create mode 100644 app/tasks/generate_channel_v4_json.py diff --git a/app/tasks/generate_channel_v3_json.py b/app/tasks/generate_channel_v3_json.py index 19ea53e..0110cb3 100644 --- a/app/tasks/generate_channel_v3_json.py +++ b/app/tasks/generate_channel_v3_json.py @@ -26,7 +26,7 @@ 'dependencies_cache': {} } -package_info = package.find.all() +package_info = package.find.all(limit_one_per_package=True, only_package_control=True) for name, info in package_info.items(): repo = info['repository'] del info['repository'] @@ -40,19 +40,6 @@ output['packages_cache'][repo].append(info) -library_info = library.all(limit_one_per_library=True) -for name, info in library_info.items(): - repo = info['repository'] - del info['repository'] - - if repo not in output['repositories']: - output['repositories'].append(repo) - - if repo not in output['dependencies_cache']: - output['dependencies_cache'][repo] = [] - - output['dependencies_cache'][repo].append(info) - output['repositories'] = sorted(output['repositories']) new_json_path = os.path.join(assets_dir, 'channel_v3.json-new') diff --git a/app/tasks/generate_channel_v4_json.py b/app/tasks/generate_channel_v4_json.py new file mode 100644 index 0000000..58194ef --- /dev/null +++ b/app/tasks/generate_channel_v4_json.py @@ -0,0 +1,80 @@ +import re +import json +import os +import gzip +import bz2 +from urllib.parse import urlparse + +from ..models import package, library +from ..lib.json_datetime_encoder import JsonDatetimeEncoder + + +if 'PACKAGE_CONTROL_ASSETS' not in os.environ: + dirname = os.path.dirname(os.path.abspath(__file__)) + assets_dir = os.path.realpath(os.path.join(dirname, '..', '..', 'assets')) +else: + assets_dir = os.environ['PACKAGE_CONTROL_ASSETS'] + +if not os.path.exists(assets_dir): + raise Exception('The assets folder, "%s", does not exists. It can be ' +\ + 'overridden by the environmental variable PACKAGE_CONTROL_ASSETS.') + +output = { + 'schema_version': '4.0.0', + 'repositories': [], + 'packages_cache': {}, + 'libraries_cache': {} +} + +package_info = package.find.all(limit_one_per_package=True) +for name, info in package_info.items(): + repo = info['repository'] + del info['repository'] + del info['last_modified'] + + if repo not in output['repositories']: + output['repositories'].append(repo) + + if repo not in output['packages_cache']: + output['packages_cache'][repo] = [] + + output['packages_cache'][repo].append(info) + +library_info = library.all(limit_one_per_library=True) +for name, info in library_info.items(): + repo = info['repository'] + del info['repository'] + + if repo not in output['repositories']: + output['repositories'].append(repo) + + if repo not in output['libraries_cache']: + output['libraries_cache'][repo] = [] + + output['libraries_cache'][repo].append(info) + +output['repositories'] = sorted(output['repositories']) + +new_json_path = os.path.join(assets_dir, 'channel_v4.json-new') +new_json_gz_path = os.path.join(assets_dir, 'channel_v4.json.gz-new') +new_json_bz2_path = os.path.join(assets_dir, 'channel_v4.json.bz2-new') +json_path = os.path.join(assets_dir, 'channel_v4.json') +json_gz_path = os.path.join(assets_dir, 'channel_v4.json.gz') +json_bz2_path = os.path.join(assets_dir, 'channel_v4.json.bz2') + +encoded_json = json.dumps(output, cls=JsonDatetimeEncoder).encode('utf-8') + +with open(new_json_path, 'wb') as f: + f.write(encoded_json) + +os.rename(new_json_path, json_path) + +with gzip.open(new_json_gz_path, 'w') as f: + f.write(encoded_json) + +os.rename(new_json_gz_path, json_gz_path) + +with bz2.open(new_json_bz2_path, 'w') as f: + f.write(encoded_json) + +os.rename(new_json_bz2_path, json_bz2_path) diff --git a/tasks.md b/tasks.md index c4a5cf8..359c0fa 100644 --- a/tasks.md +++ b/tasks.md @@ -33,6 +33,9 @@ current list of valid tasks: - `generate_channel_v3_json` - builds the `channel_v3.json` file that contains the `3.0.0` schema version channel info used by Package Control 3.x. + - `generate_channel_v4_json` - builds the `channel_v4.json` file that contains + the `4.0.0` schema version channel info used by Package Control 4.x. + - `generate_legacy_channel_json` - build the `repositories.json` file that contains the `1.2` schema version channel info used by Package Control 1.x. From 76c10c1206dbb043c2216ca087e6b3ad859d864f Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sun, 21 Aug 2022 12:24:23 +0200 Subject: [PATCH 04/39] Revert "Avoid some object copies" This change caused explicit data from JSON to be overwritten with fallback defaults. see: https://github.com/wbond/package_control/commit/a8262a919d5baa3262da7df02ba2d78652b71e7c --- .../package_control/providers/repository_provider.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index 4d1ffa2..92d26fa 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -673,8 +673,9 @@ def get_packages(self, invalid_sources=None): )) for download in downloads: - download.update(download_info) - info['releases'].append(download) + new_download = download_info.copy() + new_download.update(download) + info['releases'].append(new_download) except (DownloaderException, ClientException, ProviderException) as e: self.broken_packages[info['name']] = e @@ -741,8 +742,9 @@ def get_packages(self, invalid_sources=None): )) for download in downloads: - download.update(download_info) - info['releases'].append(download) + new_download = download_info.copy() + new_download.update(download) + info['releases'].append(new_download) except (DownloaderException, ClientException, ProviderException) as e: self.broken_packages[info['name']] = e From 9096f297b50ef987314fd23d8a6c182dc76aaf6e Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sat, 20 Aug 2022 12:52:15 +0200 Subject: [PATCH 05/39] Fix __all__ values __all__ is a list of names (strings) of exported modules. --- app/models/package/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/package/__init__.py b/app/models/package/__init__.py index dfd50a3..bf262b4 100644 --- a/app/models/package/__init__.py +++ b/app/models/package/__init__.py @@ -4,4 +4,4 @@ from . import stats from . import usage -__all__ = [find, modify, sources, stats, usage] +__all__ = ["find", "modify", "sources", "stats", "usage"] From 92bbf7d778e760175f8c7fb3e246af9f3a945b0e Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sat, 27 Aug 2022 16:36:03 +0200 Subject: [PATCH 06/39] Update package_control --- .../clients/bitbucket_client.py | 277 ++++++++-------- .../package_control/clients/github_client.py | 282 +++++++++------- .../package_control/clients/gitlab_client.py | 300 +++++++++++------- .../clients/json_api_client.py | 14 +- .../package_control/clients/readme_client.py | 2 +- app/lib/package_control/{ => deps}/semver.py | 5 +- app/lib/package_control/download_manager.py | 4 +- .../downloaders/basic_auth_downloader.py | 2 +- .../downloaders/caching_downloader.py | 2 +- .../downloaders/cli_downloader.py | 2 +- .../downloaders/curl_downloader.py | 2 +- .../downloaders/decoding_downloader.py | 2 +- .../downloaders/limiting_downloader.py | 2 +- .../downloaders/urllib_downloader.py | 2 +- .../downloaders/wget_downloader.py | 2 +- .../providers/base_repository_provider.py | 4 - .../bitbucket_repository_provider.py | 46 ++- .../providers/github_repository_provider.py | 46 ++- .../providers/github_user_provider.py | 48 +-- .../providers/gitlab_repository_provider.py | 46 ++- .../providers/gitlab_user_provider.py | 51 +-- .../providers/provider_exception.py | 48 +++ .../providers/release_selector.py | 13 +- .../providers/repository_provider.py | 83 +++-- .../providers/schema_compat.py | 2 +- app/lib/package_control/versions.py | 181 +++++------ 26 files changed, 857 insertions(+), 611 deletions(-) rename app/lib/package_control/{ => deps}/semver.py (99%) diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index 1518d0c..fb5fa5d 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -1,9 +1,9 @@ import re -from urllib.parse import quote +from urllib.parse import urlencode, quote -from ..versions import version_sort, version_process -from .json_api_client import JSONApiClient from ..downloaders.downloader_exception import DownloaderException +from ..versions import version_match_prefix +from .json_api_client import JSONApiClient # A predefined list of readme filenames to look for @@ -23,7 +23,33 @@ class BitBucketClient(JSONApiClient): @staticmethod - def make_repo_url(owner_name, repo_name): + def user_repo_branch(url): + """ + Extract the username, repo and branch name from the URL + + :param url: + The URL to extract the info from, in one of the forms: + https://bitbucket.org/{user} + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}.git + https://bitbucket.org/{user}/{repo}/src/{branch} + + :return: + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. + """ + + match = re.match(r'^https?://bitbucket\.org/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/src/([^/#?]+)/?|/?)|/?)$', url) + if match: + return match.groups() + + return (None, None, None) + + @staticmethod + def repo_url(user_name, repo_name): """ Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL @@ -38,60 +64,94 @@ def make_repo_url(owner_name, repo_name): The repositoy URL of given owner and repo name """ - return 'https://bitbucket.com/%s/%s' % (quote(owner_name), quote(repo_name)) + return 'https://bitbucket.com/%s/%s' % (quote(user_name), quote(repo_name)) - @staticmethod - def make_tags_url(repo_url): + def download_info(self, url, tag_prefix=None): """ - Generate the tags URL for a BitBucket repo if the value passed is a BitBucket - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/src/{branch} + https://bitbucket.org/{user}/{repo}/#tags + If the last option, grabs the info from the newest + tag that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The tags URL if repo_url was a BitBucket repo, otherwise False + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False - - return 'https://bitbucket.org/%s#tags' % match.group(1) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - @staticmethod - def make_branch_url(repo_url, branch): + def download_info_from_branch(self, url, default_branch=None): """ - Generate the branch URL for a BitBucket repo if the value passed is a BitBucket - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/src/{branch} - :param branch: - The branch name + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The branch URL if repo_url was a BitBucket repo, otherwise False + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - return 'https://bitbucket.org/%s/src/%s' % (match.group(1), quote(branch)) + user_repo = "%s/%s" % (user_name, repo_name) - def download_info(self, url, tag_prefix=None): + if branch is None: + branch = default_branch + if branch is None: + repo_info = self.fetch_json(self._api_url(user_repo)) + branch = repo_info['mainbranch'].get('name', 'master') + + branch_url = self._api_url(user_repo, '/refs/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) + + timestamp = branch_info['target']['date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) + + return [self._make_download_info(user_repo, branch, version, timestamp)] + + def download_info_from_tags(self, url, tag_prefix=None): """ Retrieve information about downloading a package :param url: The URL of the repository, in one of the forms: https://bitbucket.org/{user}/{repo} - https://bitbucket.org/{user}/{repo}/src/{branch} https://bitbucket.org/{user}/{repo}/#tags - If the last option, grabs the info from the newest - tag that is a valid semver version. + Grabs the info from the newest tag(s) that is a valid semver version. :param tag_prefix: If the URL is a tags URL, only match tags that have this prefix. @@ -109,67 +169,42 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - output = [] - - version = None - url_pattern = 'https://bitbucket.org/%s/get/%s.zip' - - # tag based releases - tags_match = re.match('https?://bitbucket.org/([^/]+/[^#/]+)/?#tags$', url) - if tags_match: - user_repo = tags_match.group(1) + tags_match = re.match(r'https?://bitbucket\.org/([^/#?]+/[^/#?]+)/?(?:#tags)?$', url) + if not tags_match: + return None - tags_list = {} - tags_url = self._make_api_url(user_repo, '/refs/tags?pagelen=100') + def _get_releases(user_repo, tag_prefix, page_size=100): + used_versions = set() + query_string = urlencode({'pagelen': page_size}) + tags_url = self._api_url(user_repo, '/refs/tags?%s' % query_string) while tags_url: tags_json = self.fetch_json(tags_url) for tag in tags_json['values']: - tags_list[tag['name']] = tag['target']['date'][0:19].replace('T', ' ') - tags_url = tags_json['next'] if 'next' in tags_json else None - - tag_info = version_process(tags_list.keys(), tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield ( + version, + tag['name'], + tag['target']['date'][0:19].replace('T', ' ') + ) - max_releases = self.settings.get('max_releases', 0) + tags_url = tags_json.get('next') - used_versions = set() - for info in tag_info: - version = info['version'] - if version in used_versions: - continue - - tag = info['prefix'] + version - output.append({ - 'url': url_pattern % (user_repo, tag), - 'version': version, - 'date': tags_list[tag] - }) - used_versions.add(version) - if max_releases > 0 and len(used_versions) >= max_releases: - break - - # branch based releases - else: - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return None + user_repo = tags_match.group(1) - if branch is None: - repo_info = self.fetch_json(self._make_api_url(user_repo)) - branch = repo_info['mainbranch'].get('name', 'master') + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 - branch_url = self._make_api_url(user_repo, '/refs/branches/%s' % branch) - branch_info = self.fetch_json(branch_url) + output = [] + for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): + version, tag, timestamp = release - timestamp = branch_info['target']['date'][0:19].replace('T', ' ') + output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) - output = [{ - 'url': url_pattern % (user_repo, branch), - 'version': re.sub(r'[\-: ]', '.', timestamp), - 'date': timestamp - }] + num_releases += not version.prerelease + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -195,13 +230,15 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - api_url = self._make_api_url(user_repo) + user_repo = "%s/%s" % (user_name, repo_name) + api_url = self._api_url(user_repo) repo_info = self.fetch_json(api_url) if branch is None: @@ -220,7 +257,8 @@ def repo_info(self, url): 'author': author, 'donate': None, 'readme': self._readme_url(user_repo, branch), - 'issues': issues_url if repo_info['has_issues'] else None + 'issues': issues_url if repo_info['has_issues'] else None, + 'default_branch': branch } def user_info(self, url): @@ -236,26 +274,40 @@ def user_info(self, url): """ return None - def _main_branch_name(self, user_repo): + def _make_download_info(self, user_repo, ref_name, version, timestamp): """ - Fetch the name of the default branch + Generate a download_info record :param user_repo: - The user/repo name to get the main branch for + The user/repo of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created :raises: DownloaderException: when there is an error downloading ClientException: when there is an error parsing the response :return: - The name of the main branch - `master` or `default` + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - main_branch_url = self._make_api_url(user_repo) - main_branch_info = self.fetch_json(main_branch_url, True) - return main_branch_info['mainbranch']['name'] + return { + 'url': 'https://bitbucket.org/%s/get/%s.zip' % (user_repo, ref_name), + 'version': version, + 'date': timestamp + } - def _make_api_url(self, user_repo, suffix=''): + def _api_url(self, user_repo, suffix=''): """ Generate a URL for the BitBucket API @@ -293,7 +345,7 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): The URL to the readme file, or None """ - listing_url = self._make_api_url(user_repo, '/src/%s/?pagelen=100' % branch) + listing_url = self._api_url(user_repo, '/src/%s/?pagelen=100' % branch) try: while listing_url: @@ -310,30 +362,3 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): raise return None - - def _user_repo_branch(self, url): - """ - Extract the username/repo and branch name from the URL - - :param url: - The URL to extract the info from, in one of the forms: - https://bitbucket.org/{user}/{repo} - https://bitbucket.org/{user}/{repo}/src/{branch} - - :raises: - DownloaderException: when there is an error downloading - ClientException: when there is an error parsing the response - - :return: - A tuple of (user/repo, branch name) or (None, None) if not matching - """ - - branch_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/src/([^/]+)/?$', url) - if branch_match: - return branch_match.groups() - - repo_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', url) - if repo_match: - return (repo_match.group(1), None) - - return (None, None) diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index 4ec1ee2..9952db2 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -1,15 +1,40 @@ import re from urllib.parse import urlencode, quote -from ..versions import version_sort, version_process -from .json_api_client import JSONApiClient from ..downloaders.downloader_exception import DownloaderException +from ..versions import version_match_prefix +from .json_api_client import JSONApiClient class GitHubClient(JSONApiClient): @staticmethod - def make_repo_url(owner_name, repo_name): + def user_repo_branch(url): + """ + Extract the username, repo and branch name from the URL + + :param url: + The URL to extract the info from, in one of the forms: + https://github.com/{user} + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}.git + https://github.com/{user}/{repo}/tree/{branch} + + :return: + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. + """ + match = re.match(r'^https?://github\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/tree/([^/#?]+)/?|/?)|/?)$', url) + if match: + return match.groups() + + return (None, None, None) + + @staticmethod + def repo_url(user_name, repo_name): """ Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL @@ -24,60 +49,94 @@ def make_repo_url(owner_name, repo_name): The repositoy URL of given owner and repo name """ - return 'https://github.com/%s/%s' % (quote(owner_name), quote(repo_name)) + return 'https://github.com/%s/%s' % (quote(user_name), quote(repo_name)) - @staticmethod - def make_tags_url(repo_url): + def download_info(self, url, tag_prefix=None): """ - Generate the tags URL for a GitHub repo if the value passed is a GitHub - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/tree/{branch} + https://github.com/{user}/{repo}/tags + If the last option, grabs the info from the newest + tag that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The tags URL if repo was a GitHub repo_url, otherwise False + None if no match, False if no commits, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False - - return 'https://github.com/%s/tags' % match.group(1) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - @staticmethod - def make_branch_url(repo_url, branch): + def download_info_from_branch(self, url, default_branch=None): """ - Generate the branch URL for a GitHub repo if the value passed is a GitHub - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/tree/{branch} - :param branch: - The branch name + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The branch URL if repo_url was a GitHub repo, otherwise False + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - return 'https://github.com/%s/tree/%s' % (match.group(1), quote(branch)) + user_repo = "%s/%s" % (user_name, repo_name) - def download_info(self, url, tag_prefix=None): + if branch is None: + branch = default_branch + if branch is None: + repo_info = self.fetch_json(self._api_url(user_repo)) + branch = repo_info.get('default_branch', 'master') + + branch_url = self._api_url(user_repo, '/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) + + timestamp = branch_info['commit']['commit']['committer']['date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) + + return [self._make_download_info(user_repo, branch, version, timestamp)] + + def download_info_from_tags(self, url, tag_prefix=None): """ Retrieve information about downloading a package :param url: The URL of the repository, in one of the forms: https://github.com/{user}/{repo} - https://github.com/{user}/{repo}/tree/{branch} https://github.com/{user}/{repo}/tags - If the last option, grabs the info from the newest - tag that is a valid semver version. + Grabs the info from the newest tag(s) that is a valid semver version. :param tag_prefix: If the URL is a tags URL, only match tags that have this prefix. @@ -88,71 +147,49 @@ def download_info(self, url, tag_prefix=None): ClientException: when there is an error parsing the response :return: - None if no match, False if no commits, or a list of dicts with the + None if no match, False if no commit, or a list of dicts with the following keys: `version` - the version number of the download `url` - the download URL of a zip file of the package `date` - the ISO-8601 timestamp string when the version was published """ - output = [] + tags_match = re.match(r'https?://github\.com/([^/#?]+/[^/#?]+)(?:/tags)?/?$', url) + if not tags_match: + return None - version = None - url_pattern = 'https://codeload.github.com/%s/zip/%s' + def _get_releases(user_repo, tag_prefix=None, page_size=100): + used_versions = set() + for page in range(100): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + tags_url = self._api_url(user_repo, '/tags?%s' % query_string) + tags_json = self.fetch_json(tags_url) - # tag based releases - tags_match = re.match('https?://github.com/([^/]+/[^/]+)/tags/?$', url) - if tags_match: - user_repo = tags_match.group(1) - tags_url = self._make_api_url(user_repo, '/tags?per_page=100') - tags_json = self.fetch_json(tags_url) - tag_urls = {tag['name']: tag['commit']['url'] for tag in tags_json} - tag_info = version_process(tag_urls.keys(), tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False + for tag in tags_json: + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield (version, tag['name'], tag['commit']['url']) - max_releases = self.settings.get('max_releases', 0) + if len(tags_json) < page_size: + return - used_versions = set() - for info in tag_info: - version = info['version'] - if version in used_versions: - continue - - tag = info['prefix'] + version - tag_info = self.fetch_json(tag_urls[tag]) - timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') - - output.append({ - 'url': url_pattern % (user_repo, tag), - 'version': version, - 'date': timestamp - }) - used_versions.add(version) - if max_releases > 0 and len(used_versions) >= max_releases: - break - - # branch based releases - else: - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return None + user_repo = tags_match.group(1) + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 - if branch is None: - repo_info = self.fetch_json(self._make_api_url(user_repo)) - branch = repo_info.get('default_branch', 'master') + output = [] + for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): + version, tag, tag_url = release - branch_url = self._make_api_url(user_repo, '/branches/%s' % branch) - branch_info = self.fetch_json(branch_url) + tag_info = self.fetch_json(tag_url) + timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') - timestamp = branch_info['commit']['commit']['committer']['date'][0:19].replace('T', ' ') + output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) - output = [{ - 'url': url_pattern % (user_repo, branch), - 'version': re.sub(r'[\-: ]', '.', timestamp), - 'date': timestamp - }] + num_releases += not version.prerelease + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -178,13 +215,15 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - api_url = self._make_api_url(user_repo) + user_repo = "%s/%s" % (user_name, repo_name) + api_url = self._api_url(user_repo) repo_info = self.fetch_json(api_url) if branch is None: @@ -214,9 +253,10 @@ def user_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_match = re.match('https?://github.com/([^/]+)/?$', url) + user_match = re.match(r'https?://github\.com/([^/#?]+)/?$', url) if user_match is None: return None @@ -249,6 +289,7 @@ def _extract_repo_info(self, branch, result): `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ user_name = result['owner']['login'] @@ -266,10 +307,44 @@ def _extract_repo_info(self, branch, result): 'author': user_name, 'readme': self._readme_url(user_repo, branch), 'issues': issues_url, - 'donate': None + 'donate': None, + 'default_branch': branch + } + + def _make_download_info(self, user_repo, ref_name, version, timestamp): + """ + Generate a download_info record + + :param user_repo: + The user/repo of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + return { + 'url': 'https://codeload.github.com/%s/zip/%s' % (user_repo, ref_name), + 'version': version, + 'date': timestamp } - def _make_api_url(self, user_repo, suffix=''): + def _api_url(self, user_repo, suffix=''): """ Generate a URL for the BitBucket API @@ -307,7 +382,7 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): """ query_string = urlencode({'ref': branch}) - readme_url = self._make_api_url(user_repo, '/readme?%s' % query_string) + readme_url = self._api_url(user_repo, '/readme?%s' % query_string) try: readme_file = self.fetch_json(readme_url, prefer_cached).get('path') @@ -319,26 +394,3 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): raise return None - - def _user_repo_branch(self, url): - """ - Extract the username/repo and branch name from the URL - - :param url: - The URL to extract the info from, in one of the forms: - https://github.com/{user}/{repo} - https://github.com/{user}/{repo}/tree/{branch} - - :return: - A tuple of (user/repo, branch name) or (None, None) if no match - """ - - branch_match = re.match('https?://github.com/([^/]+/[^/]+)/tree/([^/]+)/?$', url) - if branch_match: - return branch_match.groups() - - repo_match = re.match('https?://github.com/([^/]+/[^/]+)(?:$|/.*$)', url) - if repo_match: - return (repo_match.group(1), None) - - return (None, None) diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index 3fa1d21..dc1a816 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -1,15 +1,43 @@ import re -from urllib.parse import quote +from urllib.parse import urlencode, quote from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_process, version_sort +from ..versions import version_match_prefix from .json_api_client import JSONApiClient class GitLabClient(JSONApiClient): @staticmethod - def make_repo_url(owner_name, repo_name): + def user_repo_branch(url): + """ + Extract the username, repo and branch name from the URL + + :param url: + The URL to extract the info from, in one of the forms: + https://gitlab.com/{user} + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}.git + https://gitlab.com/{user}/{repo}/-/tree/{branch} + + :return: + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. + + The branch name may be a branch name or a commit + """ + + match = re.match(r'^https?://gitlab\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/-/tree/([^/#?]+)/?|/?)|/?)$', url) + if match: + return match.groups() + + return (None, None, None) + + @staticmethod + def repo_url(user_name, repo_name): """ Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL @@ -24,60 +52,94 @@ def make_repo_url(owner_name, repo_name): The repositoy URL of given owner and repo name """ - return 'https://gitlab.com/%s/%s' % (quote(owner_name), quote(repo_name)) + return 'https://gitlab.com/%s/%s' % (quote(user_name), quote(repo_name)) - @staticmethod - def make_tags_url(repo_url): + def download_info(self, url, tag_prefix=None): """ - Generate the tags URL for a GitLab repo if the value passed is a GitLab - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/-/tree/{branch} + https://gitlab.com/{user}/{repo}/-/tags + If the last option, grabs the info from the newest + tag that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The tags URL if repo_url was a GitLab repo, otherwise False + None if no match, False if no commits, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False - - return 'https://gitlab.com/%s/-/tags' % match.group(1) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - @staticmethod - def make_branch_url(repo_url, branch): + def download_info_from_branch(self, url, default_branch=None): """ - Generate the branch URL for a GitLab repo if the value passed is a GitLab - repository URL + Retrieve information about downloading a package - :param repo_url: - The repository URL + :param url: + The URL of the repository, in one of the forms: + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/-/tree/{branch} - :param branch: - The branch name + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The branch URL if repo_url was a GitLab repo, otherwise False + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo_url) - if not match: - return False + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None + + repo_id = '%s%%2F%s' % (user_name, repo_name) + + if branch is None: + branch = default_branch + if branch is None: + repo_info = self.fetch_json(self._api_url(repo_id)) + branch = repo_info.get('default_branch', 'master') + + branch_url = self._api_url(repo_id, '/repository/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) - return 'https://gitlab.com/%s/-/tree/%s' % (match.group(1), quote(branch)) + timestamp = branch_info['commit']['committed_date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) - def download_info(self, url, tag_prefix=None): + return [self._make_download_info(user_name, repo_name, branch, version, timestamp)] + + def download_info_from_tags(self, url, tag_prefix=None): """ Retrieve information about downloading a package :param url: The URL of the repository, in one of the forms: https://gitlab.com/{user}/{repo} - https://gitlab.com/{user}/{repo}/-/tree/{branch} https://gitlab.com/{user}/{repo}/-/tags - If the last option, grabs the info from the newest - tag that is a valid semver version. + Grabs the info from the newest tag(s) that is a valid semver version. :param tag_prefix: If the URL is a tags URL, only match tags that have this prefix. @@ -88,76 +150,52 @@ def download_info(self, url, tag_prefix=None): ClientException: when there is an error parsing the response :return: - None if no match, False if no commits, or a list of dicts with the + None if no match, False if no commit, or a list of dicts with the following keys: `version` - the version number of the download `url` - the download URL of a zip file of the package `date` - the ISO-8601 timestamp string when the version was published """ - output = [] - - version = None - url_pattern = 'https://gitlab.com/%s/%s/-/archive/%s/%s-%s.zip' - - # tag based releases - tags_match = re.match('https?://gitlab.com/([^/]+)/([^/]+)/-/tags/?$', url) - if tags_match: - user_name, repo_name = tags_match.groups() - repo_id = '%s%%2F%s' % (user_name, repo_name) - tags_url = self._make_api_url(repo_id, '/repository/tags?per_page=100') - tags_json = self.fetch_json(tags_url) - tags_list = { - tag['name']: tag['commit']['committed_date'][0:19].replace('T', ' ') - for tag in tags_json - } - - tag_info = version_process(tags_list.keys(), tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False - - max_releases = self.settings.get('max_releases', 0) + tags_match = re.match(r'https?://gitlab\.com/([^/#?]+)/([^/#?]+)(?:/-/tags)?/?$', url) + if not tags_match: + return None + def _get_releases(user_repo, tag_prefix=None, page_size=100): used_versions = set() - for info in tag_info: - version = info['version'] - if version in used_versions: - continue - - tag = info['prefix'] + version - output.append({ - 'url': url_pattern % (user_name, repo_name, tag, repo_name, tag), - 'version': version, - 'date': tags_list[tag] - }) - used_versions.add(version) - if max_releases > 0 and len(used_versions) >= max_releases: - break - - # branch based releases - else: - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return None - - user_name, repo_name = user_repo.split('/') - repo_id = '%s%%2F%s' % (user_name, repo_name) - - if branch is None: - repo_info = self.fetch_json(self._make_api_url(repo_id)) - branch = repo_info.get('default_branch', 'master') + for page in range(100): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + tags_url = self._api_url(user_repo, '/repository/tags?%s' % query_string) + tags_json = self.fetch_json(tags_url) + + for tag in tags_json: + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield ( + version, + tag['name'], + tag['commit']['committed_date'][0:19].replace('T', ' ') + ) + + if len(tags_json) < page_size: + return + + user_name, repo_name = tags_match.groups() + user_repo = '%s%%2F%s' % (user_name, repo_name) + + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 - branch_url = self._make_api_url(repo_id, '/repository/branches/%s' % branch) - branch_info = self.fetch_json(branch_url) + output = [] + for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): + version, tag, timestamp = release - timestamp = branch_info['commit']['committed_date'][0:19].replace('T', ' ') + output.append(self._make_download_info(user_name, repo_name, tag, str(version), timestamp)) - output = [{ - 'url': url_pattern % (user_name, repo_name, branch, repo_name, branch), - 'version': re.sub(r'[\-: ]', '.', timestamp), - 'date': timestamp - }] + num_releases += not version.prerelease + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -168,9 +206,11 @@ def repo_info(self, url): The URL to the repository, in one of the forms: https://gitlab.com/{user}/{repo} https://gitlab.com/{user}/{repo}/-/tree/{branch} + :raises: DownloaderException: when there is an error downloading ClientException: when there is an error parsing the response + :return: None if no match, or a dict with the following keys: `name` @@ -180,15 +220,15 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_branch(url) - if not user_repo: + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: return None - user_name, repo_name = user_repo.split('/') repo_id = '%s%%2F%s' % (user_name, repo_name) - repo_url = self._make_api_url(repo_id) + repo_url = self._api_url(repo_id) repo_info = self.fetch_json(repo_url) if not branch: @@ -218,9 +258,10 @@ def user_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_match = re.match('https?://gitlab.com/([^/]+)/?$', url) + user_match = re.match(r'https?://gitlab\.com/([^/#?]+)/?$', url) if user_match is None: return None @@ -256,6 +297,7 @@ def _extract_repo_info(self, branch, result): `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ user_name = result['owner']['username'] if result.get('owner') else result['namespace']['name'] @@ -276,47 +318,61 @@ def _extract_repo_info(self, branch, result): 'readme': readme_url, 'issues': result.get('issues', None) if result.get('_links') else None, 'donate': None, + 'default_branch': branch } - def _make_api_url(self, project_id, suffix=''): + def _make_download_info(self, user_name, repo_name, ref_name, version, timestamp): """ - Generate a URL for the GitLab API + Generate a download_info record - :param user_repo: - The user/repo of the repository + :param user_name: + The owner of the repository - :param suffix: - The extra API path info to add to the URL + :param repo_name: + The name of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The API URL + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - return 'https://gitlab.com/api/v4/projects/%s%s' % (project_id, suffix) + return { + 'url': 'https://gitlab.com/%s/%s/-/archive/%s/%s-%s.zip' % ( + user_name, repo_name, ref_name, repo_name, ref_name), + 'version': version, + 'date': timestamp + } - def _user_repo_branch(self, url): + def _api_url(self, project_id, suffix=''): """ - Extract the username/repo and branch name from the URL + Generate a URL for the GitLab API - :param url: - The URL to extract the info from, in one of the forms: - https://gitlab.com/{user}/{repo} - https://gitlab.com/{user}/{repo}/-/tree/{branch} + :param user_repo: + The user/repo of the repository + + :param suffix: + The extra API path info to add to the URL :return: - A tuple of (user/repo, branch name) or (None, None) if no match. - The branch name may be a branch name or a commit + The API URL """ - branch_match = re.match('https?://gitlab.com/([^/]+/[^/]+)/-/tree/([^/]+)/?$', url) - if branch_match: - return branch_match.groups() - - repo_match = re.match('https?://gitlab.com/([^/]+/[^/]+)(?:$|/.*$)', url) - if repo_match: - return (repo_match.group(1), None) - - return (None, None) + return 'https://gitlab.com/api/v4/projects/%s%s' % (project_id, suffix) def _extract_user_id(self, username): """ diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py index 889f131..ac8e00e 100644 --- a/app/lib/package_control/clients/json_api_client.py +++ b/app/lib/package_control/clients/json_api_client.py @@ -5,7 +5,7 @@ from ..download_manager import downloader -class JSONApiClient(): +class JSONApiClient: def __init__(self, settings): self.settings = settings @@ -20,7 +20,11 @@ def fetch(self, url, prefer_cached=False): :param prefer_cached: If a cached copy of the content is preferred - :return: The bytes/string + :raises: + DownloaderException: when there is an error downloading + + :return: + The bytes/string """ # If there are extra params for the domain name, add them @@ -45,7 +49,11 @@ def fetch_json(self, url, prefer_cached=False): :param prefer_cached: If a cached copy of the JSON is preferred - :return: A dict or list from the JSON + :raises: + ClientException: when there is an error parsing the response + + :return: + A dict or list from the JSON """ repository_json = self.fetch(url, prefer_cached) diff --git a/app/lib/package_control/clients/readme_client.py b/app/lib/package_control/clients/readme_client.py index a2e2980..d3f09dc 100644 --- a/app/lib/package_control/clients/readme_client.py +++ b/app/lib/package_control/clients/readme_client.py @@ -43,7 +43,7 @@ def readme_info(self, url): # Try to grab the contents of a GitHub-based readme by grabbing the cached # content of the readme API call github_match = re.match( - r'https://raw\.github(?:usercontent)?\.com/([^/]+/[^/]+)/([^/]+)/' + r'https://raw\.github(?:usercontent)?\.com/([^/#?]+/[^/#?]+)/([^/#?]+)/' r'readme(\.(md|mkd|mdown|markdown|textile|creole|rst|txt))?$', url, re.I diff --git a/app/lib/package_control/semver.py b/app/lib/package_control/deps/semver.py similarity index 99% rename from app/lib/package_control/semver.py rename to app/lib/package_control/deps/semver.py index 20baf16..5d59d49 100644 --- a/app/lib/package_control/semver.py +++ b/app/lib/package_control/deps/semver.py @@ -246,6 +246,9 @@ def __repr__(self): return 'SemVer("%s")' % str(self) # return 'SemVer(%s)' % ', '.join('%s=%r' % (k, getattr(self, k)) for k in self._fields) + def __hash__(self): + return hash(str(self)) + def __len__(self): return 3 + (self.build is not None and 2 or self.prerelease is not None) @@ -397,7 +400,7 @@ def cp_len(t, i=0): return 0 -class SemComparator(object): +class SemComparator: """Holds a SemVer object and a comparing operator and can match these against a given version. diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index a1cbe1a..db10559 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -129,7 +129,7 @@ def update_url(url, debug): original_url = url url = url.replace('://raw.github.com/', '://raw.githubusercontent.com/') url = url.replace('://nodeload.github.com/', '://codeload.github.com/') - url = re.sub('^(https://codeload.github.com/[^/]+/[^/]+/)zipball(/.*)$', '\\1zip\\2', url) + url = re.sub(r'^(https://codeload\.github\.com/[^/#?]+/[^/#?]+/)zipball(/.*)$', '\\1zip\\2', url) # Fix URLs from old versions of Package Control since we are going to # remove all packages but Package Control from them to force upgrades @@ -147,7 +147,7 @@ def update_url(url, debug): return url -class DownloadManager(object): +class DownloadManager: def __init__(self, settings): # Cache the downloader for re-use diff --git a/app/lib/package_control/downloaders/basic_auth_downloader.py b/app/lib/package_control/downloaders/basic_auth_downloader.py index 78678ed..9150127 100644 --- a/app/lib/package_control/downloaders/basic_auth_downloader.py +++ b/app/lib/package_control/downloaders/basic_auth_downloader.py @@ -3,7 +3,7 @@ from urllib.parse import urlparse -class BasicAuthDownloader(object): +class BasicAuthDownloader: """ A base for downloaders to add an HTTP basic auth header diff --git a/app/lib/package_control/downloaders/caching_downloader.py b/app/lib/package_control/downloaders/caching_downloader.py index dc0db8e..14191f8 100644 --- a/app/lib/package_control/downloaders/caching_downloader.py +++ b/app/lib/package_control/downloaders/caching_downloader.py @@ -5,7 +5,7 @@ from ..console_write import console_write -class CachingDownloader(object): +class CachingDownloader: """ A base downloader that will use a caching backend to cache HTTP requests diff --git a/app/lib/package_control/downloaders/cli_downloader.py b/app/lib/package_control/downloaders/cli_downloader.py index 53d06ee..31365a0 100644 --- a/app/lib/package_control/downloaders/cli_downloader.py +++ b/app/lib/package_control/downloaders/cli_downloader.py @@ -7,7 +7,7 @@ from .binary_not_found_error import BinaryNotFoundError -class CliDownloader(object): +class CliDownloader: """ Base for downloaders that use a command line program diff --git a/app/lib/package_control/downloaders/curl_downloader.py b/app/lib/package_control/downloaders/curl_downloader.py index 7f6e0fa..5fda6e2 100644 --- a/app/lib/package_control/downloaders/curl_downloader.py +++ b/app/lib/package_control/downloaders/curl_downloader.py @@ -101,7 +101,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) command.extend(['--cacert', bundle_path]) diff --git a/app/lib/package_control/downloaders/decoding_downloader.py b/app/lib/package_control/downloaders/decoding_downloader.py index 257ba6f..6434c9b 100644 --- a/app/lib/package_control/downloaders/decoding_downloader.py +++ b/app/lib/package_control/downloaders/decoding_downloader.py @@ -10,7 +10,7 @@ from .downloader_exception import DownloaderException -class DecodingDownloader(object): +class DecodingDownloader: """ A base for downloaders that provides the ability to decode bzip2ed, gzipped diff --git a/app/lib/package_control/downloaders/limiting_downloader.py b/app/lib/package_control/downloaders/limiting_downloader.py index c1cadcb..61056b6 100644 --- a/app/lib/package_control/downloaders/limiting_downloader.py +++ b/app/lib/package_control/downloaders/limiting_downloader.py @@ -3,7 +3,7 @@ from .rate_limit_exception import RateLimitException -class LimitingDownloader(object): +class LimitingDownloader: """ A base for downloaders that checks for rate limiting headers. diff --git a/app/lib/package_control/downloaders/urllib_downloader.py b/app/lib/package_control/downloaders/urllib_downloader.py index 888b1e2..ecfb81b 100644 --- a/app/lib/package_control/downloaders/urllib_downloader.py +++ b/app/lib/package_control/downloaders/urllib_downloader.py @@ -289,7 +289,7 @@ def setup_opener(self, url, timeout): (http_proxy, https_proxy, proxy_username, proxy_password) ) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) bundle_path = bundle_path.encode(sys.getfilesystemencoding()) diff --git a/app/lib/package_control/downloaders/wget_downloader.py b/app/lib/package_control/downloaders/wget_downloader.py index c135a0e..d4e89b7 100644 --- a/app/lib/package_control/downloaders/wget_downloader.py +++ b/app/lib/package_control/downloaders/wget_downloader.py @@ -105,7 +105,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) command.append('--ca-certificate=' + bundle_path) diff --git a/app/lib/package_control/providers/base_repository_provider.py b/app/lib/package_control/providers/base_repository_provider.py index 4e0b8d1..adf5b00 100644 --- a/app/lib/package_control/providers/base_repository_provider.py +++ b/app/lib/package_control/providers/base_repository_provider.py @@ -53,10 +53,6 @@ def match_url(cls, repo_url): def prefetch(self): """ Go out and perform HTTP operations, caching the result - - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info """ [name for name, info in self.get_packages()] diff --git a/app/lib/package_control/providers/bitbucket_repository_provider.py b/app/lib/package_control/providers/bitbucket_repository_provider.py index 73c6605..24b7b46 100644 --- a/app/lib/package_control/providers/bitbucket_repository_provider.py +++ b/app/lib/package_control/providers/bitbucket_repository_provider.py @@ -1,10 +1,12 @@ -import re - from ..clients.bitbucket_client import BitBucketClient from ..clients.client_exception import ClientException from ..downloaders.downloader_exception import DownloaderException from .base_repository_provider import BaseRepositoryProvider -from .provider_exception import ProviderException +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) class BitBucketRepositoryProvider(BaseRepositoryProvider): @@ -34,9 +36,22 @@ class BitBucketRepositoryProvider(BaseRepositoryProvider): @classmethod def match_url(cls, repo_url): - """Indicates if this provider can handle the provided repo_url""" + """ + Indicates if this provider can handle the provided repo_url - return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo_url) is not None + :param repo_url: + The URL to the repository, in one of the forms: + https://bitbucket.org/{user}/{repo}.git + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/ + https://bitbucket.org/{user}/{repo}/src/{branch} + https://bitbucket.org/{user}/{repo}/src/{branch}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = BitBucketClient.user_repo_branch(repo_url) + return bool(user and repo) def get_packages(self, invalid_sources=None): """ @@ -45,10 +60,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that should be ignored - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -86,18 +97,22 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return client = BitBucketClient(self.settings) try: repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) - releases = [] - for download in client.download_info(self.repo_url): + for download in downloads: download['sublime_text'] = '*' download['platforms'] = ['*'] - releases.append(download) name = repo_info['name'] details = { @@ -105,8 +120,8 @@ def get_packages(self, invalid_sources=None): 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], - 'last_modified': releases[0].get('date'), - 'releases': releases, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, 'previous_names': [], 'labels': [], 'sources': [self.repo_url], @@ -121,4 +136,3 @@ def get_packages(self, invalid_sources=None): except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} - raise StopIteration() diff --git a/app/lib/package_control/providers/github_repository_provider.py b/app/lib/package_control/providers/github_repository_provider.py index d9cfb7b..ae51fb2 100644 --- a/app/lib/package_control/providers/github_repository_provider.py +++ b/app/lib/package_control/providers/github_repository_provider.py @@ -4,7 +4,11 @@ from ..clients.github_client import GitHubClient from ..downloaders.downloader_exception import DownloaderException from .base_repository_provider import BaseRepositoryProvider -from .provider_exception import ProviderException +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) class GitHubRepositoryProvider(BaseRepositoryProvider): @@ -40,11 +44,22 @@ def __init__(self, repo_url, settings): @classmethod def match_url(cls, repo_url): - """Indicates if this provider can handle the provided repo_url""" + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://github.com/{user}/{repo}.git + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/ + https://github.com/{user}/{repo}/tree/{branch} + https://github.com/{user}/{repo}/tree/{branch}/ - master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo_url) - branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$', repo_url) - return master is not None or branch is not None + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitHubClient.user_repo_branch(repo_url) + return bool(user and repo) def get_packages(self, invalid_sources=None): """ @@ -53,10 +68,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that should be ignored - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -94,18 +105,22 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return client = GitHubClient(self.settings) try: repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) - releases = [] - for download in client.download_info(self.repo_url): + for download in downloads: download['sublime_text'] = '*' download['platforms'] = ['*'] - releases.append(download) name = repo_info['name'] details = { @@ -113,8 +128,8 @@ def get_packages(self, invalid_sources=None): 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], - 'last_modified': releases[0].get('date'), - 'releases': releases, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, 'previous_names': [], 'labels': [], 'sources': [self.repo_url], @@ -129,4 +144,3 @@ def get_packages(self, invalid_sources=None): except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} - raise StopIteration() diff --git a/app/lib/package_control/providers/github_user_provider.py b/app/lib/package_control/providers/github_user_provider.py index 273cecc..f04689f 100644 --- a/app/lib/package_control/providers/github_user_provider.py +++ b/app/lib/package_control/providers/github_user_provider.py @@ -1,10 +1,12 @@ -import re - from ..clients.client_exception import ClientException from ..clients.github_client import GitHubClient from ..downloaders.downloader_exception import DownloaderException from .base_repository_provider import BaseRepositoryProvider -from .provider_exception import ProviderException +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderUserInfoException, + ProviderException, +) class GitHubUserProvider(BaseRepositoryProvider): @@ -33,9 +35,19 @@ class GitHubUserProvider(BaseRepositoryProvider): @classmethod def match_url(cls, repo_url): - """Indicates if this provider can handle the provided repo_url""" + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://github.com/{user} + https://github.com/{user}/ - return re.search('^https?://github.com/[^/]+/?$', repo_url) is not None + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitHubClient.user_repo_branch(repo_url) + return bool(user and not repo) def get_packages(self, invalid_sources=None): """ @@ -44,10 +56,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that should be ignored - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -85,37 +93,41 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return client = GitHubClient(self.settings) try: user_repos = client.user_info(self.repo_url) - except (DownloaderException, ClientException) as e: + if not user_repos: + raise GitProviderUserInfoException(self) + except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} - raise + return output = {} for repo_info in user_repos: author = repo_info['author'] name = repo_info['name'] - repo_url = client.make_repo_url(author, name) + repo_url = client.repo_url(author, name) try: - releases = [] - for download in client.download_info(repo_url): + downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: download['sublime_text'] = '*' download['platforms'] = ['*'] - releases.append(download) details = { 'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': author, - 'last_modified': releases[0].get('date'), - 'releases': releases, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, 'previous_names': [], 'labels': [], 'sources': [self.repo_url], diff --git a/app/lib/package_control/providers/gitlab_repository_provider.py b/app/lib/package_control/providers/gitlab_repository_provider.py index 30db514..7521268 100644 --- a/app/lib/package_control/providers/gitlab_repository_provider.py +++ b/app/lib/package_control/providers/gitlab_repository_provider.py @@ -4,7 +4,11 @@ from ..clients.gitlab_client import GitLabClient from ..downloaders.downloader_exception import DownloaderException from .base_repository_provider import BaseRepositoryProvider -from .provider_exception import ProviderException +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderRepoInfoException, + ProviderException, +) class GitLabRepositoryProvider(BaseRepositoryProvider): @@ -40,11 +44,22 @@ def __init__(self, repo_url, settings): @classmethod def match_url(cls, repo_url): - """Indicates if this provider can handle the provided repo_url""" + """ + Indicates if this provider can handle the provided repo_url + + :param repo_url: + The URL to the repository, in one of the forms: + https://gitlab.com/{user}/{repo}.git + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/ + https://gitlab.com/{user}/{repo}/-/tree/{branch} + https://gitlab.com/{user}/{repo}/-/tree/{branch}/ - master = re.search('^https?://gitlab.com/[^/]+/[^/]+/?$', repo_url) - branch = re.search('^https?://gitlab.com/[^/]+/[^/]+/-/tree/[^/]+/?$', repo_url) - return master is not None or branch is not None + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitLabClient.user_repo_branch(repo_url) + return bool(user and repo) def get_packages(self, invalid_sources=None): """ @@ -53,10 +68,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that should be ignored - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -94,18 +105,22 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return client = GitLabClient(self.settings) try: repo_info = client.repo_info(self.repo_url) + if not repo_info: + raise GitProviderRepoInfoException(self) + + downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) - releases = [] - for download in client.download_info(self.repo_url): + for download in downloads: download['sublime_text'] = '*' download['platforms'] = ['*'] - releases.append(download) name = repo_info['name'] details = { @@ -113,8 +128,8 @@ def get_packages(self, invalid_sources=None): 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': repo_info['author'], - 'last_modified': releases[0].get('date'), - 'releases': releases, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, 'previous_names': [], 'labels': [], 'sources': [self.repo_url], @@ -129,4 +144,3 @@ def get_packages(self, invalid_sources=None): except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} - raise StopIteration() diff --git a/app/lib/package_control/providers/gitlab_user_provider.py b/app/lib/package_control/providers/gitlab_user_provider.py index c583d7d..26a659d 100644 --- a/app/lib/package_control/providers/gitlab_user_provider.py +++ b/app/lib/package_control/providers/gitlab_user_provider.py @@ -1,10 +1,12 @@ -import re - from ..clients.client_exception import ClientException from ..clients.gitlab_client import GitLabClient from ..downloaders.downloader_exception import DownloaderException from .base_repository_provider import BaseRepositoryProvider -from .provider_exception import ProviderException +from .provider_exception import ( + GitProviderDownloadInfoException, + GitProviderUserInfoException, + ProviderException, +) class GitLabUserProvider(BaseRepositoryProvider): @@ -35,9 +37,17 @@ class GitLabUserProvider(BaseRepositoryProvider): def match_url(cls, repo_url): """ Indicates if this provider can handle the provided repo_url - """ - return re.search('^https?://gitlab.com/[^/]+/?$', repo_url) is not None + :param repo_url: + The URL to the repository, in one of the forms: + https://gitlab.com/{user} + https://gitlab.com/{user}/ + + :return: + True if repo_url matches an supported scheme. + """ + user, repo, _ = GitLabClient.user_repo_branch(repo_url) + return bool(user and not repo) def get_packages(self, invalid_sources=None): """ @@ -46,10 +56,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that should be ignored - :raises: - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -87,50 +93,53 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return client = GitLabClient(self.settings) try: user_repos = client.user_info(self.repo_url) - except (DownloaderException, ClientException) as e: + if not user_repos: + raise GitProviderUserInfoException(self) + except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e self.cache['get_packages'] = {} - raise + return output = {} for repo_info in user_repos: author = repo_info['author'] name = repo_info['name'] - repo_url = client.make_repo_url(author, name) + repo_url = client.repo_url(author, name) try: - releases = [] - for download in client.download_info(repo_url): + downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) + if not downloads: + raise GitProviderDownloadInfoException(self) + + for download in downloads: download['sublime_text'] = '*' download['platforms'] = ['*'] - releases.append(download) details = { 'name': name, 'description': repo_info['description'], 'homepage': repo_info['homepage'], 'author': author, - 'last_modified': releases[0].get('date'), - 'releases': releases, + 'last_modified': downloads[0].get('date'), + 'releases': downloads, 'previous_names': [], 'labels': [], 'sources': [self.repo_url], 'readme': repo_info['readme'], 'issues': repo_info['issues'], 'donate': repo_info['donate'], - 'buy': None, + 'buy': None } output[name] = details yield (name, details) - except (DownloaderException, ClientException, - ProviderException) as e: + except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[repo_url] = e self.cache['get_packages'] = output diff --git a/app/lib/package_control/providers/provider_exception.py b/app/lib/package_control/providers/provider_exception.py index cb669fd..17db1ab 100644 --- a/app/lib/package_control/providers/provider_exception.py +++ b/app/lib/package_control/providers/provider_exception.py @@ -4,3 +4,51 @@ class ProviderException(Exception): def __bytes__(self): return self.__str__().encode('utf-8') + + +class GitProviderUserInfoException(ProviderException): + """ + Exception for signaling user information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download user info from. + """ + + def __init__(self, provider): + self.provider_name = provider.__class__.__name__ + self.url = provider.repo_url + + def __str__(self): + return '%s unable to fetch user information from "%s".' % (self.provider_name, self.url) + + +class GitProviderRepoInfoException(ProviderException): + """ + Exception for signaling repository information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download repo info from. + """ + + def __init__(self, provider): + self.provider_name = provider.__class__.__name__ + self.url = provider.repo_url + + def __str__(self): + return '%s unable to fetch repo information from "%s".' % (self.provider_name, self.url) + + +class GitProviderDownloadInfoException(ProviderException): + """ + Exception for signaling download information download error. + + The exception is used to indicate a given URL not being in expected form + to be used by given provider to download release information from. + """ + + def __init__(self, provider, url=None): + self.provider_name = provider.__class__.__name__ + self.url = url or provider.repo_url + + def __str__(self): + return '%s unable to fetch download information from "%s".' % (self.provider_name, self.url) diff --git a/app/lib/package_control/providers/release_selector.py b/app/lib/package_control/providers/release_selector.py index 00b8ce8..e55c191 100644 --- a/app/lib/package_control/providers/release_selector.py +++ b/app/lib/package_control/providers/release_selector.py @@ -27,6 +27,7 @@ def filter_releases(package, settings, releases): sublime.platform(), '*' ] + st_version = int(sublime.version()) install_prereleases = settings.get('install_prereleases') allow_prereleases = install_prereleases is True @@ -37,22 +38,16 @@ def filter_releases(package, settings, releases): releases = version_exclude_prerelease(releases) output = [] - st_version = int(sublime.version()) for release in releases: - platforms = release.get('platforms', '*') + platforms = release.get('platforms') or ['*'] if not isinstance(platforms, list): platforms = [platforms] - matched = False - for selector in platform_selectors: - if selector in platforms: - matched = True - break - if not matched: + if not any(selector in platforms for selector in platform_selectors): continue # Default to '*' (for legacy reasons), see #604 - if not is_compatible_version(release.get('sublime_text', '*'), st_version): + if not is_compatible_version(release.get('sublime_text') or '*', st_version): continue output.append(release) diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index 92d26fa..0fa194e 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -205,11 +205,6 @@ def get_libraries(self, invalid_sources=None): :param invalid_sources: A list of URLs that are permissible to fetch data from - :raises: - ProviderException: when an error occurs trying to open a file - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -242,7 +237,7 @@ def get_libraries(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return if not self.fetch(): return @@ -377,25 +372,23 @@ def assert_release_keys(download_info): (info['name'], self.repo_url) )) - client = None - extra = None - url = None + downloads = None if tags: + extra = None if tags is not True: extra = tags for client in clients: - url = client.make_tags_url(base) - if url: + downloads = client.download_info_from_tags(base, extra) + if downloads is not None: break - - if branch: + else: for client in clients: - url = client.make_branch_url(base, branch) - if url: + downloads = client.download_info_from_branch(base, branch) + if downloads is not None: break - if not url: + if downloads is None: raise ProviderException(text.format( ''' Invalid "base" value "%s" for one of the releases of the @@ -404,14 +397,13 @@ def assert_release_keys(download_info): (base, info['name'], self.repo_url) )) - downloads = client.download_info(url, extra) if downloads is False: raise ProviderException(text.format( ''' - No valid semver tags found at %s for the library - "%s" in the repository %s. + No valid semver tags found at %s for the + library "%s" in the repository %s. ''', - (url, info['name'], self.repo_url) + (base, info['name'], self.repo_url) )) for download in downloads: @@ -462,11 +454,6 @@ def get_packages(self, invalid_sources=None): :param invalid_sources: A list of URLs that are permissible to fetch data from - :raises: - ProviderException: when an error occurs trying to open a file - DownloaderException: when there is an issue download package info - ClientException: when there is an issue parsing package info - :return: A generator of ( @@ -505,7 +492,7 @@ def get_packages(self, invalid_sources=None): return if invalid_sources is not None and self.repo_url in invalid_sources: - raise StopIteration() + return if not self.fetch(): return @@ -549,6 +536,9 @@ def get_packages(self, invalid_sources=None): if package.get(field): info[field] = package.get(field) + details = None + releases = None + # Schema version 2.0 allows for grabbing details about a package, or its # download from "details" urls. See the GitHubClient and BitBucketClient # classes for valid URLs. @@ -570,8 +560,7 @@ def get_packages(self, invalid_sources=None): repo_info = client.repo_info(details) if repo_info: break - - if not repo_info: + else: raise ProviderException(text.format( ''' Invalid "details" value "%s" for one of the packages in the repository %s. @@ -579,6 +568,8 @@ def get_packages(self, invalid_sources=None): (details, self.repo_url) )) + del repo_info['default_branch'] + # When grabbing details, prefer explicit field values over the values # from the GitHub or BitBucket API info = dict(chain(repo_info.items(), info.items())) @@ -656,18 +647,27 @@ def get_packages(self, invalid_sources=None): download_details = release['details'] try: - downloads = False + downloads = None for client in clients: downloads = client.download_info(download_details) if downloads is not None: break + if downloads is None: + raise ProviderException(text.format( + ''' + Invalid "details" value "%s" for one of the releases of the + package "%s" in the repository %s. + ''', + (download_details, info['name'], self.repo_url) + )) + if downloads is False: raise ProviderException(text.format( ''' - Invalid "details" value "%s" under the "releases" key - for the package "%s" in the repository %s. + No valid semver tags found at %s for the + package "%s" in the repository %s. ''', (download_details, info['name'], self.repo_url) )) @@ -704,25 +704,23 @@ def get_packages(self, invalid_sources=None): (info['name'], self.repo_url) )) - client = None - extra = None - url = None + downloads = None if tags: + extra = None if tags is not True: extra = tags for client in clients: - url = client.make_tags_url(base) - if url: + downloads = client.download_info_from_tags(base, extra) + if downloads is not None: break - - if branch: + else: for client in clients: - url = client.make_branch_url(base, branch) - if url: + downloads = client.download_info_from_branch(base, branch) + if downloads is not None: break - if not url: + if downloads is None: raise ProviderException(text.format( ''' Invalid "base" value "%s" for one of the releases of the @@ -731,14 +729,13 @@ def get_packages(self, invalid_sources=None): (base, info['name'], self.repo_url) )) - downloads = client.download_info(url, extra) if downloads is False: raise ProviderException(text.format( ''' No valid semver tags found at %s for the package "%s" in the repository %s. ''', - (url, info['name'], self.repo_url) + (base, info['name'], self.repo_url) )) for download in downloads: diff --git a/app/lib/package_control/providers/schema_compat.py b/app/lib/package_control/providers/schema_compat.py index 39dd80c..064d980 100644 --- a/app/lib/package_control/providers/schema_compat.py +++ b/app/lib/package_control/providers/schema_compat.py @@ -1,5 +1,5 @@ from ..download_manager import update_url -from ..semver import SemVer +from ..deps.semver import SemVer def platforms_to_releases(info, debug): diff --git a/app/lib/package_control/versions.py b/app/lib/package_control/versions.py index 3055601..e965c0b 100644 --- a/app/lib/package_control/versions.py +++ b/app/lib/package_control/versions.py @@ -1,63 +1,83 @@ import re -from .semver import SemVer +from .deps.semver import SemVer from .console_write import console_write -def semver_compat(v): - """ - Converts a string version number into SemVer. If the version is based on - a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. - - :param v: - A string, dict with 'version' key, or a SemVer object - - :return: - A string that is a valid semantic version number - """ - - if isinstance(v, SemVer): - # SemVer only defined __str__, not __unicode__, so we always use str() - return str(v) - - # Allowing passing in a dict containing info about a package - if isinstance(v, dict): - if 'version' not in v: - return '0' - v = v['version'] - - # Trim v off of the front - v = re.sub('^v', '', v) - - # We prepend 0 to all date-based version numbers so that developers - # may switch to explicit versioning from GitHub/BitBucket - # versioning based on commit dates. - # - # When translating dates into semver, the way to get each date - # segment into the version is to treat the year and month as - # minor and patch, and then the rest as a numeric build version - # with four different parts. The result looks like: - # 0.2012.11+10.31.23.59 - date_match = re.match(r'(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$', v) - if date_match: - v = '0.0.1+%s.%s.%s.%s.%s.%s' % date_match.groups() - - # This handles version that were valid pre-semver with 4+ dotted - # groups, such as 1.6.9.0 - four_plus_match = re.match(r'(\d+\.\d+\.\d+)[T\.](\d+(\.\d+)*)$', v) - if four_plus_match: - v = '%s+%s' % (four_plus_match.group(1), four_plus_match.group(2)) - - # Semver must have major, minor, patch - elif re.match(r'^\d+$', v): - v += '.0.0' - elif re.match(r'^\d+\.\d+$', v): - v += '.0' - return v - - -def version_comparable(string): - return SemVer(semver_compat(string)) +class PackageVersion(SemVer): + + _date_pattern = re.compile(r'^(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$') + _pre_semver_pattern = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+)(?:[T\.](\d+(\.\d+)*))?)?)?$') + + @classmethod + def _parse(cls, ver): + """ + Converts a string version number into SemVer. If the version is based on + a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. + + :param ver: + A string, dict with 'version' key, or a SemVer object + + :raises: + TypeError, if ver is not one of: str, dict with version, SemVer + ValueError, if ver is no valid version string + + :return: + A list of 5 items representing a valid semantic version number + """ + + # Allowing passing in a dict containing info about a package + if isinstance(ver, dict): + if 'version' not in ver: + raise TypeError("%s is not a package or library release" % ver) + ver = ver['version'] + + if isinstance(ver, SemVer): + return ver + + if not isinstance(ver, str): + raise TypeError("%r is not a string" % ver) + + # Trim v off of the front + if ver.startswith('v'): + ver = ver[1:] + + # Match semver compatible strings + match = cls._match_regex.match(ver) + if match: + g = list(match.groups()) + for i in range(3): + g[i] = int(g[i]) + + return g + + # We prepend 0 to all date-based version numbers so that developers + # may switch to explicit versioning from GitHub/GitLab/BitBucket + # versioning based on commit dates. + # + # The resulting semver is alwass 0.0.1 with timestamp being used + # as build number, so any explicitly choosen version (via tags) will + # be greater, once a package moves from branch to tag based releases. + # + # The result looks like: + # 0.0.1+2020.07.15.10.50.38 + match = cls._date_pattern.match(ver) + if match: + return [0, 0, 1, None, '.'.join(match.groups())] + + # This handles versions that were valid pre-semver with 1 to 4+ dotted + # groups, such as 1, 1.6, or 1.6.9.0 + match = cls._pre_semver_pattern.match(ver) + if match: + return [ + int(match.group(1) or 0), + int(match.group(2) or 0), + int(match.group(3) or 0), + None, + match.group(4) + ] + + raise ValueError("'%s' is not a valid SemVer string" % ver) def version_exclude_prerelease(versions): @@ -71,50 +91,33 @@ def version_exclude_prerelease(versions): The list of versions with pre-releases removed """ - output = [] - for version in versions: - if SemVer(semver_compat(version)).prerelease is not None: - continue - output.append(version) - return output + return [v for v in versions if not PackageVersion(v).prerelease] -def version_process(versions, filter_prefix): +def version_match_prefix(version, filter_prefix): """ - Filter a list of versions to ones that are valid SemVers, if a prefix - is provided, only match versions starting with the prefix and split + Create a SemVer for a given version, if it matches filter_prefix. - :param versions: - The list of versions to filter + :param version: + The version string to match :param filter_prefix: - Remove this prefix from the version before checking if it is a valid - SemVer. If this prefix is not present, skip the version. + The prefix to match versions against - :return: - A list of dicts, each of which has the keys "version" and "prefix" + :returns: + SemVer, if version is valid and matches given filter_prefix + None, if version is invalid or doesn't match filter_prefix """ - output = [] - for version in versions: - prefix = '' - + try: if filter_prefix: - if version[0:len(filter_prefix)] != filter_prefix: - continue - check_version = version[len(filter_prefix):] - prefix = filter_prefix - + if version.startswith(filter_prefix): + return PackageVersion(version[len(filter_prefix):]) else: - check_version = re.sub('^v', '', version) - if check_version != version: - prefix = 'v' - - if not SemVer.valid(check_version): - continue - - output.append({'version': check_version, 'prefix': prefix}) - return output + return PackageVersion(version) + except ValueError: + pass + return None def version_sort(sortable, *fields, **kwargs): @@ -137,7 +140,7 @@ def version_sort(sortable, *fields, **kwargs): """ def _version_sort_key(item): - result = SemVer(semver_compat(item)) + result = PackageVersion(item) if fields: values = [result] for field in fields: From ab039dba7051d99cc0e3ca26072fb515e83c5935 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sun, 21 Aug 2022 16:47:25 +0200 Subject: [PATCH 07/39] Fix possible unbound variable warning --- app/lib/refresh_packages.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/app/lib/refresh_packages.py b/app/lib/refresh_packages.py index aa2d3f0..01895b1 100644 --- a/app/lib/refresh_packages.py +++ b/app/lib/refresh_packages.py @@ -104,12 +104,11 @@ def resolve_path(path): if not invalid_library_sources: invalid_library_sources = None + repositories = [] for provider_cls in CHANNEL_PROVIDERS: - if not provider_cls.match_url(channel): - continue - provider = provider_cls(channel, settings) - repositories = provider.get_repositories() - break + if provider_cls.match_url(channel): + repositories = provider_cls(channel, settings).get_repositories() + break affected_packages = [] affected_libraries = [] From b8845de08975b98e788daa93830eff2031b60d95 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sat, 27 Aug 2022 17:51:27 +0200 Subject: [PATCH 08/39] Fix marking packages missing due to hitting rate limits Fixes #158 --- app/lib/package_control/download_manager.py | 11 ++++------- .../downloaders/rate_limit_exception.py | 18 ++++++++++++++++-- app/lib/refresh_packages.py | 9 +++++++++ 3 files changed, 29 insertions(+), 9 deletions(-) diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index db10559..a4ab569 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -16,6 +16,7 @@ from .downloaders.urllib_downloader import UrlLibDownloader from .downloaders.binary_not_found_error import BinaryNotFoundError from .downloaders.rate_limit_exception import RateLimitException +from .downloaders.rate_limit_exception import RateLimitSkipException from .downloaders.downloader_exception import DownloaderException from .downloaders.win_downloader_exception import WinDownloaderException from .downloaders.oscrypto_downloader_exception import OscryptoDownloaderException @@ -318,14 +319,10 @@ def fetch(self, url, error_message, prefer_cached=False): ) if hostname in rate_limited_domains: - error_string = 'Skipping due to hitting rate limit for %s' % hostname + exception = RateLimitSkipException(hostname) if self.settings.get('debug'): - console_write( - ' %s', - error_string, - prefix=False - ) - raise DownloaderException(error_string) + console_write(' %s' % exception, prefix=False) + raise exception try: return self.downloader.download(url, error_message, timeout, 3, prefer_cached) diff --git a/app/lib/package_control/downloaders/rate_limit_exception.py b/app/lib/package_control/downloaders/rate_limit_exception.py index de64a3a..0f506ee 100644 --- a/app/lib/package_control/downloaders/rate_limit_exception.py +++ b/app/lib/package_control/downloaders/rate_limit_exception.py @@ -10,5 +10,19 @@ class RateLimitException(DownloaderException): def __init__(self, domain, limit): self.domain = domain self.limit = limit - message = 'Rate limit of %s exceeded for %s' % (limit, domain) - super(RateLimitException, self).__init__(message) + + def __str__(self): + return 'Rate limit of %s exceeded for %s' % (self.limit, self.domain) + + +class RateLimitSkipException(DownloaderException): + + """ + An exception for when skipping requests due to rate limit of an API has been exceeded. + """ + + def __init__(self, domain): + self.domain = domain + + def __str__(self): + return 'Skipping due to hitting rate limit for %s' % self.domain diff --git a/app/lib/refresh_packages.py b/app/lib/refresh_packages.py index 01895b1..6bf6101 100644 --- a/app/lib/refresh_packages.py +++ b/app/lib/refresh_packages.py @@ -4,6 +4,7 @@ import traceback from .package_control.providers import REPOSITORY_PROVIDERS, CHANNEL_PROVIDERS +from .package_control.downloaders.rate_limit_exception import RateLimitException, RateLimitSkipException from .package_control.download_manager import close_all_connections from .package_control.clients.readme_client import ReadmeClient from .. import config @@ -110,6 +111,8 @@ def resolve_path(path): repositories = provider_cls(channel, settings).get_repositories() break + accepted_errors = (RateLimitException, RateLimitSkipException) + affected_packages = [] affected_libraries = [] for repository in repositories: @@ -173,13 +176,19 @@ def resolve_path(path): print('-' * 60, file=sys.stderr) for source, exception in provider.get_failed_sources(): + if isinstance(exception, accepted_errors): + continue package.modify.mark_missing(source, clean_url(exception), needs_review(exception)) library.mark_missing(source, clean_url(exception), needs_review(exception)) for package_name, exception in provider.get_broken_packages(): + if isinstance(exception, accepted_errors): + continue package.modify.mark_missing_by_name(package_name, clean_url(exception), needs_review(exception)) for library_name, exception in provider.get_broken_libraries(): + if isinstance(exception, accepted_errors): + continue library.mark_missing_by_name(library_name, clean_url(exception), needs_review(exception)) break From a069c61c8b0fd1967afbf55d21a4b584ef37521e Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Tue, 30 Aug 2022 12:26:27 +0200 Subject: [PATCH 09/39] Upgrade package_control/four-point-oh --- app/lib/package_control/cache.py | 2 +- .../clients/json_api_client.py | 6 +- .../package_control/deps/asn1crypto/algos.py | 43 +- .../package_control/deps/asn1crypto/cms.py | 31 +- .../package_control/deps/asn1crypto/core.py | 4 + .../package_control/deps/asn1crypto/csr.py | 39 +- .../package_control/deps/asn1crypto/keys.py | 34 +- .../package_control/deps/asn1crypto/parser.py | 67 ++-- .../package_control/deps/asn1crypto/tsp.py | 2 +- .../deps/asn1crypto/version.py | 4 +- .../package_control/deps/asn1crypto/x509.py | 7 +- .../deps/oscrypto/_asymmetric.py | 10 +- .../deps/oscrypto/_mac/_common_crypto_cffi.py | 29 ++ .../oscrypto/_mac/_core_foundation_cffi.py | 375 ++++++++++++++++++ .../deps/oscrypto/_mac/_security.py | 1 + .../deps/oscrypto/_mac/_security_cffi.py | 238 +++++++++++ .../deps/oscrypto/_mac/asymmetric.py | 29 +- .../package_control/deps/oscrypto/_mac/tls.py | 11 +- .../deps/oscrypto/_openssl/_libcrypto.py | 26 +- .../deps/oscrypto/_openssl/_libcrypto_cffi.py | 278 +++++++++++++ .../oscrypto/_openssl/_libcrypto_ctypes.py | 23 +- .../deps/oscrypto/_openssl/_libssl_cffi.py | 99 +++++ .../deps/oscrypto/_openssl/asymmetric.py | 156 +++++--- .../deps/oscrypto/_openssl/symmetric.py | 42 +- .../deps/oscrypto/_openssl/tls.py | 37 +- .../package_control/deps/oscrypto/_pkcs1.py | 4 +- .../package_control/deps/oscrypto/_pkcs12.py | 2 +- app/lib/package_control/deps/oscrypto/_tls.py | 16 + .../deps/oscrypto/_win/_advapi32.py | 2 +- .../deps/oscrypto/_win/_advapi32_cffi.py | 145 +++++++ .../deps/oscrypto/_win/_cng_cffi.py | 120 ++++++ .../deps/oscrypto/_win/_crypt32_cffi.py | 188 +++++++++ .../deps/oscrypto/_win/_kernel32_cffi.py | 44 ++ .../deps/oscrypto/_win/_secur32_cffi.py | 129 ++++++ .../deps/oscrypto/_win/asymmetric.py | 62 ++- .../deps/oscrypto/_win/symmetric.py | 6 +- .../package_control/deps/oscrypto/_win/tls.py | 2 +- .../package_control/deps/oscrypto/version.py | 4 +- app/lib/package_control/download_manager.py | 146 +++++-- .../package_control/downloaders/__init__.py | 31 +- .../downloaders/rate_limit_exception.py | 4 +- .../providers/channel_provider.py | 5 +- .../providers/release_selector.py | 82 ---- .../providers/repository_provider.py | 5 +- 44 files changed, 2270 insertions(+), 320 deletions(-) create mode 100644 app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py create mode 100644 app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py delete mode 100644 app/lib/package_control/providers/release_selector.py diff --git a/app/lib/package_control/cache.py b/app/lib/package_control/cache.py index 5ea1b9b..1d84540 100644 --- a/app/lib/package_control/cache.py +++ b/app/lib/package_control/cache.py @@ -28,7 +28,7 @@ def get_cache(key, default=None): struct = _channel_repository_cache.get(key, {}) expires = struct.get('expires') if expires and expires > time.time(): - return struct.get('data') + return struct.get('data', default) return default diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py index ac8e00e..73ff561 100644 --- a/app/lib/package_control/clients/json_api_client.py +++ b/app/lib/package_control/clients/json_api_client.py @@ -2,7 +2,7 @@ from urllib.parse import urlencode, urlparse from .client_exception import ClientException -from ..download_manager import downloader +from ..download_manager import http_get class JSONApiClient: @@ -35,9 +35,7 @@ def fetch(self, url, prefer_cached=False): joiner = '?%s' if url.find('?') == -1 else '&%s' url += joiner % params - with downloader(url, self.settings) as manager: - content = manager.fetch(url, 'Error downloading repository.', prefer_cached) - return content + return http_get(url, self.settings, 'Error downloading repository.', prefer_cached) def fetch_json(self, url, prefer_cached=False): """ diff --git a/app/lib/package_control/deps/asn1crypto/algos.py b/app/lib/package_control/deps/asn1crypto/algos.py index d49be26..fc25e4d 100644 --- a/app/lib/package_control/deps/asn1crypto/algos.py +++ b/app/lib/package_control/deps/asn1crypto/algos.py @@ -260,6 +260,9 @@ class SignedDigestAlgorithmId(ObjectIdentifier): '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15', '1.2.840.10040.4.1': 'dsa', '1.2.840.10045.4': 'ecdsa', + # RFC 8410 -- https://tools.ietf.org/html/rfc8410 + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', } _reverse_map = { @@ -286,6 +289,8 @@ class SignedDigestAlgorithmId(ObjectIdentifier): 'sha3_256_ecdsa': '2.16.840.1.101.3.4.3.10', 'sha3_384_ecdsa': '2.16.840.1.101.3.4.3.11', 'sha3_512_ecdsa': '2.16.840.1.101.3.4.3.12', + 'ed25519': '1.3.101.112', + 'ed448': '1.3.101.113', } @@ -304,8 +309,8 @@ class SignedDigestAlgorithm(_ForceNullParameters, Sequence): def signature_algo(self): """ :return: - A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa" or - "ecdsa" + A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", + "ecdsa", "ed25519" or "ed448" """ algorithm = self['algorithm'].native @@ -334,6 +339,8 @@ def signature_algo(self): 'sha3_384_ecdsa': 'ecdsa', 'sha3_512_ecdsa': 'ecdsa', 'ecdsa': 'ecdsa', + 'ed25519': 'ed25519', + 'ed448': 'ed448', } if algorithm in algo_map: return algo_map[algorithm] @@ -350,7 +357,7 @@ def hash_algo(self): """ :return: A unicode string of "md2", "md5", "sha1", "sha224", "sha256", - "sha384", "sha512", "sha512_224", "sha512_256" + "sha384", "sha512", "sha512_224", "sha512_256" or "shake256" """ algorithm = self['algorithm'].native @@ -371,6 +378,8 @@ def hash_algo(self): 'sha256_ecdsa': 'sha256', 'sha384_ecdsa': 'sha384', 'sha512_ecdsa': 'sha512', + 'ed25519': 'sha512', + 'ed448': 'shake256', } if algorithm in algo_map: return algo_map[algorithm] @@ -402,9 +411,21 @@ class Pbkdf2Params(Sequence): ] +class ScryptParams(Sequence): + # https://tools.ietf.org/html/rfc7914#section-7 + _fields = [ + ('salt', OctetString), + ('cost_parameter', Integer), + ('block_size', Integer), + ('parallelization_parameter', Integer), + ('key_length', Integer, {'optional': True}), + ] + + class KdfAlgorithmId(ObjectIdentifier): _map = { - '1.2.840.113549.1.5.12': 'pbkdf2' + '1.2.840.113549.1.5.12': 'pbkdf2', + '1.3.6.1.4.1.11591.4.11': 'scrypt', } @@ -415,7 +436,8 @@ class KdfAlgorithm(Sequence): ] _oid_pair = ('algorithm', 'parameters') _oid_specs = { - 'pbkdf2': Pbkdf2Params + 'pbkdf2': Pbkdf2Params, + 'scrypt': ScryptParams, } @@ -738,6 +760,8 @@ def kdf_hmac(self): encryption_algo = self['algorithm'].native if encryption_algo == 'pbes2': + if self.kdf == 'scrypt': + return None return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native if encryption_algo.find('.') == -1: @@ -818,6 +842,8 @@ def kdf_iterations(self): encryption_algo = self['algorithm'].native if encryption_algo == 'pbes2': + if self.kdf == 'scrypt': + return None return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native if encryption_algo.find('.') == -1: @@ -874,8 +900,7 @@ def key_length(self): return cipher_lengths[encryption_algo] if encryption_algo == 'rc2': - rc2_params = self['parameters'].parsed['encryption_scheme']['parameters'].parsed - rc2_parameter_version = rc2_params['rc2_parameter_version'].native + rc2_parameter_version = self['parameters']['rc2_parameter_version'].native # See page 24 of # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf @@ -1042,7 +1067,7 @@ def encryption_block_size(self): return cipher_map[encryption_algo] if encryption_algo == 'rc5': - return self['parameters'].parsed['block_size_in_bits'].native / 8 + return self['parameters']['block_size_in_bits'].native // 8 if encryption_algo == 'pbes2': return self['parameters']['encryption_scheme'].encryption_block_size @@ -1084,7 +1109,7 @@ def encryption_iv(self): encryption_algo = self['algorithm'].native if encryption_algo in set(['rc2', 'rc5']): - return self['parameters'].parsed['iv'].native + return self['parameters']['iv'].native # For DES/Triple DES and AES the IV is the entirety of the parameters octet_string_iv_oids = set([ diff --git a/app/lib/package_control/deps/asn1crypto/cms.py b/app/lib/package_control/deps/asn1crypto/cms.py index 2115aed..c395b22 100644 --- a/app/lib/package_control/deps/asn1crypto/cms.py +++ b/app/lib/package_control/deps/asn1crypto/cms.py @@ -30,6 +30,7 @@ _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, + EncryptionAlgorithmId, HmacAlgorithm, KdfAlgorithm, RSAESOAEPParams, @@ -100,6 +101,8 @@ class CMSAttributeType(ObjectIdentifier): '1.2.840.113549.1.9.4': 'message_digest', '1.2.840.113549.1.9.5': 'signing_time', '1.2.840.113549.1.9.6': 'counter_signature', + # https://datatracker.ietf.org/doc/html/rfc2633#section-2.5.2 + '1.2.840.113549.1.9.15': 'smime_capabilities', # https://tools.ietf.org/html/rfc2633#page-26 '1.2.840.113549.1.9.16.2.11': 'encrypt_key_pref', # https://tools.ietf.org/html/rfc3161#page-20 @@ -273,7 +276,7 @@ class V2Form(Sequence): class AttCertIssuer(Choice): _alternatives = [ ('v1_form', GeneralNames), - ('v2_form', V2Form, {'explicit': 0}), + ('v2_form', V2Form, {'implicit': 0}), ] @@ -315,7 +318,7 @@ class SetOfSvceAuthInfo(SetOf): class RoleSyntax(Sequence): _fields = [ ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}), - ('role_name', GeneralName, {'implicit': 1}), + ('role_name', GeneralName, {'explicit': 1}), ] @@ -337,7 +340,7 @@ class ClassList(BitString): class SecurityCategory(Sequence): _fields = [ ('type', ObjectIdentifier, {'implicit': 0}), - ('value', Any, {'implicit': 1}), + ('value', Any, {'explicit': 1}), ] @@ -347,9 +350,9 @@ class SetOfSecurityCategory(SetOf): class Clearance(Sequence): _fields = [ - ('policy_id', ObjectIdentifier, {'implicit': 0}), - ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}), - ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}), + ('policy_id', ObjectIdentifier), + ('class_list', ClassList, {'default': set(['unclassified'])}), + ('security_categories', SetOfSecurityCategory, {'optional': True}), ] @@ -946,6 +949,21 @@ class SMIMEEncryptionKeyPreferences(SetOf): _child_spec = SMIMEEncryptionKeyPreference +class SMIMECapabilityIdentifier(Sequence): + _fields = [ + ('capability_id', EncryptionAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +class SMIMECapabilites(SequenceOf): + _child_spec = SMIMECapabilityIdentifier + + +class SetOfSMIMECapabilites(SetOf): + _child_spec = SMIMECapabilites + + ContentInfo._oid_specs = { 'data': OctetString, 'signed_data': SignedData, @@ -981,4 +999,5 @@ class SMIMEEncryptionKeyPreferences(SetOf): 'microsoft_nested_signature': SetOfContentInfo, 'microsoft_time_stamp_token': SetOfContentInfo, 'encrypt_key_pref': SMIMEEncryptionKeyPreferences, + 'smime_capabilities': SetOfSMIMECapabilites, } diff --git a/app/lib/package_control/deps/asn1crypto/core.py b/app/lib/package_control/deps/asn1crypto/core.py index 7133367..364c6b5 100644 --- a/app/lib/package_control/deps/asn1crypto/core.py +++ b/app/lib/package_control/deps/asn1crypto/core.py @@ -4113,6 +4113,10 @@ def dump(self, force=False): if self._header is not None and self._header[-1:] == b'\x80': force = True + # We can't force encoding if we don't have a spec + if force and self._fields == [] and self.__class__ is Sequence: + force = False + if force: self._set_contents(force=force) diff --git a/app/lib/package_control/deps/asn1crypto/csr.py b/app/lib/package_control/deps/asn1crypto/csr.py index 7ea2848..7d5ba44 100644 --- a/app/lib/package_control/deps/asn1crypto/csr.py +++ b/app/lib/package_control/deps/asn1crypto/csr.py @@ -4,7 +4,7 @@ ASN.1 type classes for certificate signing requests (CSR). Exports the following items: - - CertificatationRequest() + - CertificationRequest() Other type classes are defined that help compose the types listed above. """ @@ -14,11 +14,14 @@ from .algos import SignedDigestAlgorithm from .core import ( Any, + BitString, + BMPString, Integer, ObjectIdentifier, OctetBitString, Sequence, SetOf, + UTF8String ) from .keys import PublicKeyInfo from .x509 import DirectoryString, Extensions, Name @@ -39,6 +42,12 @@ class CSRAttributeType(ObjectIdentifier): '1.2.840.113549.1.9.7': 'challenge_password', '1.2.840.113549.1.9.9': 'extended_certificate_attributes', '1.2.840.113549.1.9.14': 'extension_request', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1 + '1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34 + '1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7 + '1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info', } @@ -61,6 +70,31 @@ class SetOfExtensions(SetOf): _child_spec = Extensions +class MicrosoftEnrollmentCSProvider(Sequence): + _fields = [ + ('keyspec', Integer), + ('cspname', BMPString), # cryptographic service provider name + ('signature', BitString), + ] + + +class SetOfMicrosoftEnrollmentCSProvider(SetOf): + _child_spec = MicrosoftEnrollmentCSProvider + + +class MicrosoftRequestClientInfo(Sequence): + _fields = [ + ('clientid', Integer), + ('machinename', UTF8String), + ('username', UTF8String), + ('processname', UTF8String), + ] + + +class SetOfMicrosoftRequestClientInfo(SetOf): + _child_spec = MicrosoftRequestClientInfo + + class CRIAttribute(Sequence): _fields = [ ('type', CSRAttributeType), @@ -72,6 +106,9 @@ class CRIAttribute(Sequence): 'challenge_password': SetOfDirectoryString, 'extended_certificate_attributes': SetOfAttributes, 'extension_request': SetOfExtensions, + 'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider, + 'microsoft_os_version': SetOfDirectoryString, + 'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo, } diff --git a/app/lib/package_control/deps/asn1crypto/keys.py b/app/lib/package_control/deps/asn1crypto/keys.py index 96b763e..b4a87ae 100644 --- a/app/lib/package_control/deps/asn1crypto/keys.py +++ b/app/lib/package_control/deps/asn1crypto/keys.py @@ -666,6 +666,11 @@ class PrivateKeyAlgorithmId(ObjectIdentifier): '1.2.840.10040.4.1': 'dsa', # https://tools.ietf.org/html/rfc3279#page-13 '1.2.840.10045.2.1': 'ec', + # https://tools.ietf.org/html/rfc8410#section-9 + '1.3.101.110': 'x25519', + '1.3.101.111': 'x448', + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', } @@ -707,6 +712,12 @@ def _private_key_spec(self): 'rsassa_pss': RSAPrivateKey, 'dsa': Integer, 'ec': ECPrivateKey, + # These should be treated as opaque octet strings according + # to RFC 8410 + 'x25519': OctetString, + 'x448': OctetString, + 'ed25519': OctetString, + 'ed448': OctetString, }[algorithm] _spec_callbacks = { @@ -741,7 +752,7 @@ def wrap(cls, private_key, algorithm): type_name(private_key) )) - if algorithm == 'rsa': + if algorithm == 'rsa' or algorithm == 'rsassa_pss': if not isinstance(private_key, RSAPrivateKey): private_key = RSAPrivateKey.load(private_key) params = Null() @@ -882,7 +893,7 @@ def hash_algo(self): def algorithm(self): """ :return: - A unicode string of "rsa", "dsa" or "ec" + A unicode string of "rsa", "rsassa_pss", "dsa" or "ec" """ if self._algorithm is None: @@ -897,7 +908,7 @@ def bit_size(self): """ if self._bit_size is None: - if self.algorithm == 'rsa': + if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss': prime = self['private_key'].parsed['modulus'].native elif self.algorithm == 'dsa': prime = self['private_key_algorithm']['parameters']['p'].native @@ -1017,6 +1028,11 @@ class PublicKeyAlgorithmId(ObjectIdentifier): '1.2.840.10045.2.1': 'ec', # https://tools.ietf.org/html/rfc3279#page-10 '1.2.840.10046.2.1': 'dh', + # https://tools.ietf.org/html/rfc8410#section-9 + '1.3.101.110': 'x25519', + '1.3.101.111': 'x448', + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', } @@ -1063,6 +1079,12 @@ def _public_key_spec(self): # decompose the byte string into the constituent X and Y coords 'ec': (ECPointBitString, None), 'dh': Integer, + # These should be treated as opaque bit strings according + # to RFC 8410, and need not even be valid ASN.1 + 'x25519': (OctetBitString, None), + 'x448': (OctetBitString, None), + 'ed25519': (OctetBitString, None), + 'ed448': (OctetBitString, None), }[algorithm] _spec_callbacks = { @@ -1098,7 +1120,7 @@ def wrap(cls, public_key, algorithm): type_name(public_key) )) - if algorithm != 'rsa': + if algorithm != 'rsa' and algorithm != 'rsassa_pss': raise ValueError(unwrap( ''' algorithm must "rsa", not %s @@ -1200,7 +1222,7 @@ def hash_algo(self): def algorithm(self): """ :return: - A unicode string of "rsa", "dsa" or "ec" + A unicode string of "rsa", "rsassa_pss", "dsa" or "ec" """ if self._algorithm is None: @@ -1218,7 +1240,7 @@ def bit_size(self): if self.algorithm == 'ec': self._bit_size = int(((len(self['public_key'].native) - 1) / 2) * 8) else: - if self.algorithm == 'rsa': + if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss': prime = self['public_key'].parsed['modulus'].native elif self.algorithm == 'dsa': prime = self['algorithm']['parameters']['p'].native diff --git a/app/lib/package_control/deps/asn1crypto/parser.py b/app/lib/package_control/deps/asn1crypto/parser.py index c4f91f6..2f5a63e 100644 --- a/app/lib/package_control/deps/asn1crypto/parser.py +++ b/app/lib/package_control/deps/asn1crypto/parser.py @@ -20,6 +20,7 @@ _PY2 = sys.version_info <= (3,) _INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available' +_MAX_DEPTH = 10 def emit(class_, method, tag, contents): @@ -136,7 +137,7 @@ def peek(contents): return consumed -def _parse(encoded_data, data_len, pointer=0, lengths_only=False): +def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0): """ Parses a byte string into component parts @@ -154,83 +155,89 @@ def _parse(encoded_data, data_len, pointer=0, lengths_only=False): number of bytes in the header and the integer number of bytes in the contents. Internal use only. + :param depth: + The recursion depth when evaluating indefinite-length encoding. + :return: A 2-element tuple: - 0: A tuple of (class_, method, tag, header, content, trailer) - 1: An integer indicating how many bytes were consumed """ - if data_len < pointer + 2: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer)) + if depth > _MAX_DEPTH: + raise ValueError('Indefinite-length recursion limit exceeded') start = pointer + + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] + pointer += 1 tag = first_octet & 31 + constructed = (first_octet >> 5) & 1 # Base 128 length using 8th bit as continuation indicator if tag == 31: tag = 0 while True: + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] pointer += 1 + if num == 0x80 and tag == 0: + raise ValueError('Non-minimal tag encoding') tag *= 128 tag += num & 127 if num >> 7 == 0: break + if tag < 31: + raise ValueError('Non-minimal tag encoding') + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] pointer += 1 + trailer = b'' if length_octet >> 7 == 0: - if lengths_only: - return (pointer, pointer + (length_octet & 127)) contents_end = pointer + (length_octet & 127) else: length_octets = length_octet & 127 if length_octets: + if data_len < pointer + length_octets: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer)) pointer += length_octets contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False) - if lengths_only: - return (pointer, contents_end) else: # To properly parse indefinite length values, we need to scan forward # parsing headers until we find a value with a length of zero. If we # just scanned looking for \x00\x00, nested indefinite length values # would not work. + if not constructed: + raise ValueError('Indefinite-length element must be constructed') contents_end = pointer - while contents_end < data_len: - sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True) - if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00': - break - if lengths_only: - return (pointer, contents_end) - if contents_end > data_len: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) - return ( - ( - first_octet >> 6, - (first_octet >> 5) & 1, - tag, - encoded_data[start:pointer], - encoded_data[pointer:contents_end - 2], - b'\x00\x00' - ), - contents_end - ) + while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00': + _, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1) + contents_end += 2 + trailer = b'\x00\x00' if contents_end > data_len: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer)) + + if lengths_only: + return (pointer, contents_end) + return ( ( first_octet >> 6, - (first_octet >> 5) & 1, + constructed, tag, encoded_data[start:pointer], - encoded_data[pointer:contents_end], - b'' + encoded_data[pointer:contents_end-len(trailer)], + trailer ), contents_end ) diff --git a/app/lib/package_control/deps/asn1crypto/tsp.py b/app/lib/package_control/deps/asn1crypto/tsp.py index bd40810..f006da9 100644 --- a/app/lib/package_control/deps/asn1crypto/tsp.py +++ b/app/lib/package_control/deps/asn1crypto/tsp.py @@ -169,7 +169,7 @@ class MetaData(Sequence): ] -class TimeStampAndCRL(SequenceOf): +class TimeStampAndCRL(Sequence): _fields = [ ('time_stamp', EncapsulatedContentInfo), ('crl', CertificateList, {'optional': True}), diff --git a/app/lib/package_control/deps/asn1crypto/version.py b/app/lib/package_control/deps/asn1crypto/version.py index 3cf4892..966b57a 100644 --- a/app/lib/package_control/deps/asn1crypto/version.py +++ b/app/lib/package_control/deps/asn1crypto/version.py @@ -2,5 +2,5 @@ from __future__ import unicode_literals, division, absolute_import, print_function -__version__ = '1.4.0' -__version_info__ = (1, 4, 0) +__version__ = '1.5.1' +__version_info__ = (1, 5, 1) diff --git a/app/lib/package_control/deps/asn1crypto/x509.py b/app/lib/package_control/deps/asn1crypto/x509.py index 16f7deb..8cfb2c7 100644 --- a/app/lib/package_control/deps/asn1crypto/x509.py +++ b/app/lib/package_control/deps/asn1crypto/x509.py @@ -987,7 +987,7 @@ def build(cls, name_dict, use_printable=False): :param name_dict: A dict of name information, e.g. {"common_name": "Will Bond", - "country_name": "US", "organization": "Codex Non Sufficit LC"} + "country_name": "US", "organization_name": "Codex Non Sufficit LC"} :param use_printable: A bool - if PrintableString should be used for encoding instead of @@ -2079,6 +2079,8 @@ class ExtensionId(ObjectIdentifier): '2.16.840.1.113730.1.1': 'netscape_certificate_type', # https://tools.ietf.org/html/rfc6962.html#page-14 '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/3aec3e50-511a-42f9-a5d5-240af503e470 + '1.3.6.1.4.1.311.20.2': 'microsoft_enroll_certtype', } @@ -2114,6 +2116,9 @@ class Extension(Sequence): 'entrust_version_extension': EntrustVersionInfo, 'netscape_certificate_type': NetscapeCertificateType, 'signed_certificate_timestamp_list': OctetString, + # Not UTF8String as Microsofts docs claim, see: + # https://www.alvestrand.no/objectid/1.3.6.1.4.1.311.20.2.html + 'microsoft_enroll_certtype': BMPString, } diff --git a/app/lib/package_control/deps/oscrypto/_asymmetric.py b/app/lib/package_control/deps/oscrypto/_asymmetric.py index 1e7cca4..aec0385 100644 --- a/app/lib/package_control/deps/oscrypto/_asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_asymmetric.py @@ -241,10 +241,12 @@ def _unwrap_private_key_info(key_info): - asn1crypto.keys.ECPrivateKey """ - if key_info.algorithm == 'rsa': + key_alg = key_info.algorithm + + if key_alg == 'rsa' or key_alg == 'rsassa_pss': return key_info['private_key'].parsed - if key_info.algorithm == 'dsa': + if key_alg == 'dsa': params = key_info['private_key_algorithm']['parameters'] parsed = key_info['private_key'].parsed return DSAPrivateKey({ @@ -260,7 +262,7 @@ def _unwrap_private_key_info(key_info): 'private_key': parsed, }) - if key_info.algorithm == 'ec': + if key_alg == 'ec': parsed = key_info['private_key'].parsed parsed['parameters'] = key_info['private_key_algorithm']['parameters'] return parsed @@ -660,7 +662,7 @@ def _unarmor_pem(data, password=None): data = data.strip() # RSA private keys are encrypted after being DER-encoded, but before base64 - # encoding, so they need to be hanlded specially + # encoding, so they need to be handled specially if pem_header in set(['RSA PRIVATE KEY', 'DSA PRIVATE KEY', 'EC PRIVATE KEY']): algo = armor_type.group(2).lower() return ('private key', algo, _unarmor_pem_openssl_private(headers, der_bytes, password)) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py new file mode 100644 index 0000000..30c768b --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py @@ -0,0 +1,29 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi + +from cffi import FFI + + +__all__ = [ + 'CommonCrypto', +] + + +ffi = FFI() +ffi.cdef(""" + typedef uint32_t CCPBKDFAlgorithm; + + typedef uint32_t CCPseudoRandomAlgorithm; + typedef unsigned int uint; + + int CCKeyDerivationPBKDF(CCPBKDFAlgorithm algorithm, const char *password, size_t passwordLen, + const char *salt, size_t saltLen, CCPseudoRandomAlgorithm prf, uint rounds, + char *derivedKey, size_t derivedKeyLen); +""") + +common_crypto_path = '/usr/lib/system/libcommonCrypto.dylib' + +CommonCrypto = ffi.dlopen(common_crypto_path) +register_ffi(CommonCrypto, ffi) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py new file mode 100644 index 0000000..d0c7951 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py @@ -0,0 +1,375 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import ( + buffer_from_bytes, + byte_string_from_buffer, + deref, + is_null, + new, + register_ffi, +) + +from cffi import FFI + + +__all__ = [ + 'CFHelpers', + 'CoreFoundation', +] + + +ffi = FFI() +ffi.cdef(""" + typedef bool Boolean; + typedef long CFIndex; + typedef unsigned long CFStringEncoding; + typedef unsigned long CFNumberType; + typedef unsigned long CFTypeID; + + typedef void *CFTypeRef; + typedef CFTypeRef CFArrayRef; + typedef CFTypeRef CFDataRef; + typedef CFTypeRef CFStringRef; + typedef CFTypeRef CFNumberRef; + typedef CFTypeRef CFBooleanRef; + typedef CFTypeRef CFDictionaryRef; + typedef CFTypeRef CFErrorRef; + typedef CFTypeRef CFAllocatorRef; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + void *hash; + } CFDictionaryKeyCallBacks; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + } CFDictionaryValueCallBacks; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + } CFArrayCallBacks; + + CFIndex CFDataGetLength(CFDataRef theData); + const char *CFDataGetBytePtr(CFDataRef theData); + CFDataRef CFDataCreate(CFAllocatorRef allocator, const char *bytes, CFIndex length); + + CFDictionaryRef CFDictionaryCreate(CFAllocatorRef allocator, const void **keys, const void **values, + CFIndex numValues, const CFDictionaryKeyCallBacks *keyCallBacks, + const CFDictionaryValueCallBacks *valueCallBacks); + CFIndex CFDictionaryGetCount(CFDictionaryRef theDict); + + const char *CFStringGetCStringPtr(CFStringRef theString, CFStringEncoding encoding); + Boolean CFStringGetCString(CFStringRef theString, char *buffer, CFIndex bufferSize, CFStringEncoding encoding); + CFStringRef CFStringCreateWithCString(CFAllocatorRef alloc, const char *cStr, CFStringEncoding encoding); + + CFNumberRef CFNumberCreate(CFAllocatorRef allocator, CFNumberType theType, const void *valuePtr); + + CFStringRef CFCopyTypeIDDescription(CFTypeID type_id); + + void CFRelease(CFTypeRef cf); + void CFRetain(CFTypeRef cf); + + CFStringRef CFErrorCopyDescription(CFErrorRef err); + CFStringRef CFErrorGetDomain(CFErrorRef err); + CFIndex CFErrorGetCode(CFErrorRef err); + + Boolean CFBooleanGetValue(CFBooleanRef boolean); + + CFTypeID CFDictionaryGetTypeID(void); + CFTypeID CFNumberGetTypeID(void); + CFTypeID CFStringGetTypeID(void); + CFTypeID CFDataGetTypeID(void); + + CFArrayRef CFArrayCreate(CFAllocatorRef allocator, const void **values, CFIndex numValues, + const CFArrayCallBacks *callBacks); + CFIndex CFArrayGetCount(CFArrayRef theArray); + CFTypeRef CFArrayGetValueAtIndex(CFArrayRef theArray, CFIndex idx); + CFNumberType CFNumberGetType(CFNumberRef number); + Boolean CFNumberGetValue(CFNumberRef number, CFNumberType theType, void *valuePtr); + CFIndex CFDictionaryGetKeysAndValues(CFDictionaryRef theDict, const void **keys, const void **values); + CFTypeID CFGetTypeID(CFTypeRef cf); + + extern CFAllocatorRef kCFAllocatorDefault; + extern CFArrayCallBacks kCFTypeArrayCallBacks; + extern CFBooleanRef kCFBooleanTrue; + extern CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; + extern CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; +""") + +core_foundation_path = '/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation' + +CoreFoundation = ffi.dlopen(core_foundation_path) +register_ffi(CoreFoundation, ffi) + +kCFNumberCFIndexType = 14 +kCFStringEncodingUTF8 = 0x08000100 + + +class CFHelpers(): + """ + Namespace for core foundation helpers + """ + + _native_map = {} + + @classmethod + def register_native_mapping(cls, type_id, callback): + """ + Register a function to convert a core foundation data type into its + equivalent in python + + :param type_id: + The CFTypeId for the type + + :param callback: + A callback to pass the CFType object to + """ + + cls._native_map[int(type_id)] = callback + + @staticmethod + def cf_number_to_number(value): + """ + Converts a CFNumber object to a python float or integer + + :param value: + The CFNumber object + + :return: + A python number (float or integer) + """ + + type_ = CoreFoundation.CFNumberGetType(value) + type_name_ = { + 1: 'int8_t', # kCFNumberSInt8Type + 2: 'in16_t', # kCFNumberSInt16Type + 3: 'int32_t', # kCFNumberSInt32Type + 4: 'int64_t', # kCFNumberSInt64Type + 5: 'float', # kCFNumberFloat32Type + 6: 'double', # kCFNumberFloat64Type + 7: 'char', # kCFNumberCharType + 8: 'short', # kCFNumberShortType + 9: 'int', # kCFNumberIntType + 10: 'long', # kCFNumberLongType + 11: 'long long', # kCFNumberLongLongType + 12: 'float', # kCFNumberFloatType + 13: 'double', # kCFNumberDoubleType + 14: 'long', # kCFNumberCFIndexType + 15: 'int', # kCFNumberNSIntegerType + 16: 'double', # kCFNumberCGFloatType + }[type_] + output = new(CoreFoundation, type_name_ + ' *') + CoreFoundation.CFNumberGetValue(value, type_, output) + return deref(output) + + @staticmethod + def cf_dictionary_to_dict(dictionary): + """ + Converts a CFDictionary object into a python dictionary + + :param dictionary: + The CFDictionary to convert + + :return: + A python dict + """ + + dict_length = CoreFoundation.CFDictionaryGetCount(dictionary) + + keys = new(CoreFoundation, 'CFTypeRef[%s]' % dict_length) + values = new(CoreFoundation, 'CFTypeRef[%s]' % dict_length) + CoreFoundation.CFDictionaryGetKeysAndValues( + dictionary, + keys, + values + ) + + output = {} + for index in range(0, dict_length): + output[CFHelpers.native(keys[index])] = CFHelpers.native(values[index]) + + return output + + @classmethod + def native(cls, value): + """ + Converts a CF* object into its python equivalent + + :param value: + The CF* object to convert + + :return: + The native python object + """ + + type_id = CoreFoundation.CFGetTypeID(value) + if type_id in cls._native_map: + return cls._native_map[type_id](value) + else: + return value + + @staticmethod + def cf_string_to_unicode(value): + """ + Creates a python unicode string from a CFString object + + :param value: + The CFString to convert + + :return: + A python unicode string + """ + + string_ptr = CoreFoundation.CFStringGetCStringPtr( + value, + kCFStringEncodingUTF8 + ) + string = None if is_null(string_ptr) else ffi.string(string_ptr) + if string is None: + buffer = buffer_from_bytes(1024) + result = CoreFoundation.CFStringGetCString( + value, + buffer, + 1024, + kCFStringEncodingUTF8 + ) + if not result: + raise OSError('Error copying C string from CFStringRef') + string = byte_string_from_buffer(buffer) + if string is not None: + string = string.decode('utf-8') + return string + + @staticmethod + def cf_string_from_unicode(string): + """ + Creates a CFStringRef object from a unicode string + + :param string: + The unicode string to create the CFString object from + + :return: + A CFStringRef + """ + + return CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + string.encode('utf-8'), + kCFStringEncodingUTF8 + ) + + @staticmethod + def cf_data_to_bytes(value): + """ + Extracts a bytestring from a CFData object + + :param value: + A CFData object + + :return: + A byte string + """ + + start = CoreFoundation.CFDataGetBytePtr(value) + num_bytes = CoreFoundation.CFDataGetLength(value) + return ffi.buffer(start, num_bytes)[:] + + @staticmethod + def cf_data_from_bytes(bytes_): + """ + Creates a CFDataRef object from a byte string + + :param bytes_: + The data to create the CFData object from + + :return: + A CFDataRef + """ + + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, + bytes_, + len(bytes_) + ) + + @staticmethod + def cf_dictionary_from_pairs(pairs): + """ + Creates a CFDictionaryRef object from a list of 2-element tuples + representing the key and value. Each key should be a CFStringRef and each + value some sort of CF* type. + + :param pairs: + A list of 2-element tuples + + :return: + A CFDictionaryRef + """ + + length = len(pairs) + keys = [] + values = [] + for pair in pairs: + key, value = pair + keys.append(key) + values.append(value) + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + keys, + values, + length, + ffi.addressof(CoreFoundation.kCFTypeDictionaryKeyCallBacks), + ffi.addressof(CoreFoundation.kCFTypeDictionaryValueCallBacks) + ) + + @staticmethod + def cf_array_from_list(values): + """ + Creates a CFArrayRef object from a list of CF* type objects. + + :param values: + A list of CF* type object + + :return: + A CFArrayRef + """ + + length = len(values) + return CoreFoundation.CFArrayCreate( + CoreFoundation.kCFAllocatorDefault, + values, + length, + ffi.addressof(CoreFoundation.kCFTypeArrayCallBacks) + ) + + @staticmethod + def cf_number_from_integer(integer): + """ + Creates a CFNumber object from an integer + + :param integer: + The integer to create the CFNumber for + + :return: + A CFNumber + """ + + integer_as_long = ffi.new('long *', integer) + return CoreFoundation.CFNumberCreate( + CoreFoundation.kCFAllocatorDefault, + kCFNumberCFIndexType, + integer_as_long + ) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security.py b/app/lib/package_control/deps/oscrypto/_mac/_security.py index 03ef22a..0d2e08d 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/_security.py +++ b/app/lib/package_control/deps/oscrypto/_mac/_security.py @@ -119,6 +119,7 @@ class SecurityConst(): CSSMERR_TP_CERT_NOT_VALID_YET = -2147409653 CSSMERR_TP_CERT_REVOKED = -2147409652 CSSMERR_TP_NOT_TRUSTED = -2147409622 + CSSMERR_TP_CERT_SUSPENDED = -2147409651 CSSM_CERT_X_509v3 = 0x00000004 diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py new file mode 100644 index 0000000..d277d98 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py @@ -0,0 +1,238 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import platform + +from .._ffi import register_ffi + +from cffi import FFI + + +__all__ = [ + 'Security', + 'version', + 'version_info', +] + + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split('.'))) + +if version_info < (10, 7): + raise OSError('Only OS X 10.7 and newer are supported, not %s.%s' % (version_info[0], version_info[1])) + +ffi = FFI() +ffi.cdef(""" + typedef bool Boolean; + typedef long CFIndex; + typedef int32_t OSStatus; + typedef unsigned long CFTypeID; + typedef uint32_t SecTrustSettingsDomain; + typedef uint32_t SecPadding; + typedef uint32_t SecItemImportExportFlags; + typedef uint32_t SecKeyImportExportFlags; + typedef uint32_t SecExternalFormat; + typedef uint32_t SecExternalItemType; + typedef uint32_t CSSM_ALGORITHMS; + typedef uint64_t CSSM_CC_HANDLE; + typedef uint32_t CSSM_KEYUSE; + typedef uint32_t CSSM_CERT_TYPE; + typedef uint32_t SSLProtocol; + typedef uint32_t SSLCipherSuite; + typedef uint32_t SecTrustResultType; + + typedef void *CFTypeRef; + typedef CFTypeRef CFArrayRef; + typedef CFTypeRef CFDataRef; + typedef CFTypeRef CFStringRef; + typedef CFTypeRef CFDictionaryRef; + typedef CFTypeRef CFErrorRef; + typedef CFTypeRef CFAllocatorRef; + + typedef ... *SecKeyRef; + typedef ... *SecCertificateRef; + typedef ... *SecTransformRef; + typedef ... *SecRandomRef; + typedef ... *SecPolicyRef; + typedef ... *SecPolicySearchRef; + typedef ... *SecAccessRef; + typedef struct + { + uint32_t version; + SecKeyImportExportFlags flags; + CFTypeRef passphrase; + CFStringRef alertTitle; + CFStringRef alertPrompt; + SecAccessRef accessRef; + CFArrayRef keyUsage; + CFArrayRef keyAttributes; + } SecItemImportExportKeyParameters; + typedef ... *SecKeychainRef; + typedef ... *SSLContextRef; + typedef ... *SecTrustRef; + typedef uint32_t SSLConnectionRef; + + typedef struct { + uint32_t Length; + char *Data; + } CSSM_DATA, CSSM_OID; + + typedef struct { + uint32_t Version; + uint32_t Flags; + CSSM_DATA *LocalResponder; + CSSM_DATA *LocalResponderCert; + } CSSM_APPLE_TP_OCSP_OPTIONS; + + typedef struct { + uint32_t Version; + uint32_t CrlFlags; + void *crlStore; + } CSSM_APPLE_TP_CRL_OPTIONS; + + OSStatus SecKeychainCreate(char *path, uint32_t pass_len, void *pass, + Boolean prompt, SecAccessRef initialAccess, SecKeychainRef *keychain); + OSStatus SecKeychainDelete(SecKeychainRef keychain); + int SecRandomCopyBytes(SecRandomRef rnd, size_t count, char *bytes); + SecKeyRef SecKeyCreateFromData(CFDictionaryRef parameters, CFDataRef keyData, CFErrorRef *error); + SecTransformRef SecEncryptTransformCreate(SecKeyRef keyRef, CFErrorRef *error); + SecTransformRef SecDecryptTransformCreate(SecKeyRef keyRef, CFErrorRef *error); + Boolean SecTransformSetAttribute(SecTransformRef transformRef, CFStringRef key, CFTypeRef value, CFErrorRef *error); + CFTypeRef SecTransformExecute(SecTransformRef transformRef, CFErrorRef *errorRef); + SecTransformRef SecVerifyTransformCreate(SecKeyRef key, CFDataRef signature, CFErrorRef *error); + SecTransformRef SecSignTransformCreate(SecKeyRef key, CFErrorRef *error); + SecCertificateRef SecCertificateCreateWithData(CFAllocatorRef allocator, CFDataRef data); + OSStatus SecCertificateCopyPublicKey(SecCertificateRef certificate, SecKeyRef *key); + CFStringRef SecCopyErrorMessageString(OSStatus status, void *reserved); + OSStatus SecTrustCopyAnchorCertificates(CFArrayRef *anchors); + CFDataRef SecCertificateCopyData(SecCertificateRef certificate); + OSStatus SecTrustSettingsCopyCertificates(SecTrustSettingsDomain domain, CFArrayRef *certArray); + OSStatus SecTrustSettingsCopyTrustSettings(SecCertificateRef certRef, SecTrustSettingsDomain domain, + CFArrayRef *trustSettings); + CFDictionaryRef SecPolicyCopyProperties(SecPolicyRef policyRef); + CFTypeID SecPolicyGetTypeID(void); + OSStatus SecKeyEncrypt(SecKeyRef key, SecPadding padding, const char *plainText, size_t plainTextLen, + char *cipherText, size_t *cipherTextLen); + OSStatus SecKeyDecrypt(SecKeyRef key, SecPadding padding, const char *cipherText, size_t cipherTextLen, + char *plainText, size_t *plainTextLen); + OSStatus SecKeyRawSign(SecKeyRef key, SecPadding padding, const char *dataToSign, size_t dataToSignLen, + char *sig, size_t * sigLen); + OSStatus SecKeyRawVerify(SecKeyRef key, SecPadding padding, const char *signedData, size_t signedDataLen, + const char *sig, size_t sigLen); + OSStatus SecItemImport(CFDataRef importedData, CFStringRef fileNameOrExtension, + SecExternalFormat *inputFormat, SecExternalItemType *itemType, + SecItemImportExportFlags flags, const SecItemImportExportKeyParameters *keyParams, + SecKeychainRef importKeychain, CFArrayRef *outItems); + OSStatus SecItemExport(CFTypeRef secItemOrArray, SecExternalFormat outputFormat, SecItemImportExportFlags flags, + const SecItemImportExportKeyParameters *keyParams, CFDataRef *exportedData); + OSStatus SecAccessCreate(CFStringRef descriptor, CFArrayRef trustedlist, SecAccessRef *accessRef); + OSStatus SecKeyCreatePair(SecKeychainRef keychainRef, CSSM_ALGORITHMS algorithm, uint32_t keySizeInBits, + CSSM_CC_HANDLE contextHandle, CSSM_KEYUSE publicKeyUsage, uint32_t publicKeyAttr, + CSSM_KEYUSE privateKeyUsage, uint32_t privateKeyAttr, SecAccessRef initialAccess, + SecKeyRef* publicKeyRef, SecKeyRef* privateKeyRef); + OSStatus SecKeychainItemDelete(SecKeyRef itemRef); + + typedef OSStatus (*SSLReadFunc)(SSLConnectionRef connection, char *data, size_t *dataLength); + typedef OSStatus (*SSLWriteFunc)(SSLConnectionRef connection, const char *data, size_t *dataLength); + OSStatus SSLSetIOFuncs(SSLContextRef context, SSLReadFunc readFunc, SSLWriteFunc writeFunc); + + OSStatus SSLSetPeerID(SSLContextRef context, const char *peerID, size_t peerIDLen); + + OSStatus SSLSetConnection(SSLContextRef context, SSLConnectionRef connection); + OSStatus SSLSetPeerDomainName(SSLContextRef context, const char *peerName, size_t peerNameLen); + OSStatus SSLHandshake(SSLContextRef context); + OSStatus SSLGetBufferedReadSize(SSLContextRef context, size_t *bufSize); + OSStatus SSLRead(SSLContextRef context, char *data, size_t dataLength, size_t *processed); + OSStatus SSLWrite(SSLContextRef context, const char *data, size_t dataLength, size_t *processed); + OSStatus SSLClose(SSLContextRef context); + + OSStatus SSLGetNumberSupportedCiphers(SSLContextRef context, size_t *numCiphers); + OSStatus SSLGetSupportedCiphers(SSLContextRef context, SSLCipherSuite *ciphers, size_t *numCiphers); + OSStatus SSLSetEnabledCiphers(SSLContextRef context, const SSLCipherSuite *ciphers, size_t numCiphers); + OSStatus SSLGetNumberEnabledCiphers(SSLContextRef context, size_t *numCiphers); + OSStatus SSLGetEnabledCiphers(SSLContextRef context, SSLCipherSuite *ciphers, size_t *numCiphers); + + OSStatus SSLGetNegotiatedCipher(SSLContextRef context, SSLCipherSuite *cipherSuite); + OSStatus SSLGetNegotiatedProtocolVersion(SSLContextRef context, SSLProtocol *protocol); + + OSStatus SSLCopyPeerTrust(SSLContextRef context, SecTrustRef *trust); + OSStatus SecTrustGetCssmResultCode(SecTrustRef trust, OSStatus *resultCode); + CFIndex SecTrustGetCertificateCount(SecTrustRef trust); + SecCertificateRef SecTrustGetCertificateAtIndex(SecTrustRef trust, CFIndex ix); + OSStatus SecTrustSetAnchorCertificates(SecTrustRef trust, CFArrayRef anchorCertificates); + OSStatus SecTrustSetAnchorCertificatesOnly(SecTrustRef trust, Boolean anchorCertificatesOnly); + OSStatus SecTrustSetPolicies(SecTrustRef trust, CFArrayRef policies); + SecPolicyRef SecPolicyCreateSSL(Boolean server, CFStringRef hostname); + OSStatus SecPolicySearchCreate(CSSM_CERT_TYPE certType, const CSSM_OID *policyOID, const CSSM_DATA *value, + SecPolicySearchRef *searchRef); + OSStatus SecPolicySearchCopyNext(SecPolicySearchRef searchRef, SecPolicyRef *policyRef); + OSStatus SecPolicySetValue(SecPolicyRef policyRef, const CSSM_DATA *value); + OSStatus SecTrustEvaluate(SecTrustRef trust, SecTrustResultType *result); + + extern SecRandomRef kSecRandomDefault; + + extern CFStringRef kSecPaddingKey; + extern CFStringRef kSecPaddingPKCS7Key; + extern CFStringRef kSecPaddingPKCS5Key; + extern CFStringRef kSecPaddingPKCS1Key; + extern CFStringRef kSecPaddingOAEPKey; + extern CFStringRef kSecPaddingNoneKey; + extern CFStringRef kSecModeCBCKey; + extern CFStringRef kSecTransformInputAttributeName; + extern CFStringRef kSecDigestTypeAttribute; + extern CFStringRef kSecDigestLengthAttribute; + extern CFStringRef kSecIVKey; + + extern CFStringRef kSecAttrIsExtractable; + + extern CFStringRef kSecDigestSHA1; + extern CFStringRef kSecDigestSHA2; + extern CFStringRef kSecDigestMD5; + + extern CFStringRef kSecAttrKeyType; + + extern CFTypeRef kSecAttrKeyTypeRSA; + extern CFTypeRef kSecAttrKeyTypeDSA; + extern CFTypeRef kSecAttrKeyTypeECDSA; + + extern CFStringRef kSecAttrKeySizeInBits; + extern CFStringRef kSecAttrLabel; + + extern CFTypeRef kSecAttrCanSign; + extern CFTypeRef kSecAttrCanVerify; + + extern CFTypeRef kSecAttrKeyTypeAES; + extern CFTypeRef kSecAttrKeyTypeRC4; + extern CFTypeRef kSecAttrKeyTypeRC2; + extern CFTypeRef kSecAttrKeyType3DES; + extern CFTypeRef kSecAttrKeyTypeDES; +""") + +if version_info < (10, 8): + ffi.cdef(""" + OSStatus SSLNewContext(Boolean isServer, SSLContextRef *contextPtr); + OSStatus SSLDisposeContext(SSLContextRef context); + + OSStatus SSLSetEnableCertVerify(SSLContextRef context, Boolean enableVerify); + + OSStatus SSLSetProtocolVersionEnabled(SSLContextRef context, SSLProtocol protocol, Boolean enable); + """) +else: + ffi.cdef(""" + typedef uint32_t SSLProtocolSide; + typedef uint32_t SSLConnectionType; + typedef uint32_t SSLSessionOption; + + SSLContextRef SSLCreateContext(CFAllocatorRef alloc, SSLProtocolSide protocolSide, + SSLConnectionType connectionType); + + OSStatus SSLSetSessionOption(SSLContextRef context, SSLSessionOption option, Boolean value); + + OSStatus SSLSetProtocolVersionMin(SSLContextRef context, SSLProtocol minVersion); + OSStatus SSLSetProtocolVersionMax(SSLContextRef context, SSLProtocol maxVersion); + """) + +security_path = '/System/Library/Frameworks/Security.framework/Security' + +Security = ffi.dlopen(security_path) +register_ffi(Security, ffi) diff --git a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py index 1d30aa1..131197b 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py @@ -250,8 +250,18 @@ def public_key(self): """ if not self._public_key and self.sec_certificate_ref: + if self.asn1.signature_algo == "rsassa_pss": + # macOS doesn't like importing RSA PSS certs, so we treat it like a + # traditional RSA cert + asn1 = self.asn1.copy() + asn1['tbs_certificate']['subject_public_key_info']['algorithm']['algorithm'] = 'rsa' + temp_cert = _load_x509(asn1) + sec_cert_ref = temp_cert.sec_certificate_ref + else: + sec_cert_ref = self.sec_certificate_ref + sec_public_key_ref_pointer = new(Security, 'SecKeyRef *') - res = Security.SecCertificateCopyPublicKey(self.sec_certificate_ref, sec_public_key_ref_pointer) + res = Security.SecCertificateCopyPublicKey(sec_cert_ref, sec_public_key_ref_pointer) handle_sec_error(res) sec_public_key_ref = unwrap(sec_public_key_ref_pointer) self._public_key = PublicKey(sec_public_key_ref, self.asn1['tbs_certificate']['subject_public_key_info']) @@ -274,6 +284,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -832,7 +844,14 @@ def _load_key(key_object): )) if isinstance(key_object, PublicKeyInfo): - source = key_object.dump() + if key_object.algorithm == 'rsassa_pss': + # We have to masquerade an RSA PSS key as plain RSA or it won't + # import properly + temp_key_object = key_object.copy() + temp_key_object['algorithm']['algorithm'] = 'rsa' + source = temp_key_object.dump() + else: + source = key_object.dump() item_type = SecurityConst.kSecItemTypePublicKey else: @@ -1392,7 +1411,8 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): type_name(data) )) - if certificate_or_public_key.algorithm != 'rsa': + cp_algo = certificate_or_public_key.algorithm + if cp_algo != 'rsa' and cp_algo != 'rsassa_pss': raise ValueError('The key specified is not an RSA public key') hash_length = { @@ -1735,7 +1755,8 @@ def rsa_pss_sign(private_key, data, hash_algorithm): type_name(data) )) - if private_key.algorithm != 'rsa': + pk_algo = private_key.algorithm + if pk_algo != 'rsa' and pk_algo != 'rsassa_pss': raise ValueError('The key specified is not an RSA private key') hash_length = { diff --git a/app/lib/package_control/deps/oscrypto/_mac/tls.py b/app/lib/package_control/deps/oscrypto/_mac/tls.py index a0ca540..f936407 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/tls.py +++ b/app/lib/package_control/deps/oscrypto/_mac/tls.py @@ -50,6 +50,7 @@ raise_expired_not_yet_valid, raise_handshake, raise_hostname, + raise_lifetime_too_long, raise_no_issuer, raise_protocol_error, raise_protocol_version, @@ -103,7 +104,7 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): Callback called by Secure Transport to actually read the socket :param connection_id: - An integer identifing the connection + An integer identifying the connection :param data_buffer: A char pointer FFI type to write the data to @@ -218,7 +219,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): Callback called by Secure Transport to actually write to the socket :param connection_id: - An integer identifing the connection + An integer identifying the connection :param data_buffer: A char pointer FFI type containing the data to write @@ -463,7 +464,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to @@ -875,6 +876,7 @@ def _handshake(self): expired = result_code == SecurityConst.CSSMERR_TP_CERT_EXPIRED not_yet_valid = result_code == SecurityConst.CSSMERR_TP_CERT_NOT_VALID_YET bad_hostname = result_code == SecurityConst.CSSMERR_APPLETP_HOSTNAME_MISMATCH + validity_too_long = result_code == SecurityConst.CSSMERR_TP_CERT_SUSPENDED # On macOS 10.12, some expired certificates return errSSLInternal if osx_version_info >= (10, 12): @@ -903,6 +905,9 @@ def _handshake(self): elif self_signed: raise_self_signed(cert) + elif validity_too_long: + raise_lifetime_too_long(cert) + if detect_client_auth_request(self._server_hello): raise_client_auth() diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py index 2881689..1c52488 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py @@ -22,6 +22,7 @@ __all__ = [ 'handle_openssl_error', 'libcrypto', + 'libcrypto_legacy_support', 'libcrypto_version', 'libcrypto_version_info', 'LibcryptoConst', @@ -38,6 +39,17 @@ libcrypto.OPENSSL_config(null()) +# This enables legacy algorithms in OpenSSL 3.0, such as RC2, etc +# which are used by various tests and some old protocols and things +# like PKCS12 +libcrypto_legacy_support = True +if libcrypto_version_info >= (3, ): + if libcrypto.OSSL_PROVIDER_available(null(), "legacy".encode("ascii")): + libcrypto.OSSL_PROVIDER_load(null(), "legacy".encode("ascii")) + else: + libcrypto_legacy_support = False + + def _try_decode(value): try: @@ -57,7 +69,7 @@ def _try_decode(value): def handle_openssl_error(result, exception_class=None): """ - Checks if an error occured, and if so throws an OSError containing the + Checks if an error occurred, and if so throws an OSError containing the last OpenSSL error message :param result: @@ -95,9 +107,15 @@ def peek_openssl_error(): """ error = libcrypto.ERR_peek_error() - lib = int((error >> 24) & 0xff) - func = int((error >> 12) & 0xfff) - reason = int(error & 0xfff) + if libcrypto_version_info < (3, 0): + lib = int((error >> 24) & 0xff) + func = int((error >> 12) & 0xfff) + reason = int(error & 0xfff) + else: + lib = int((error >> 23) & 0xff) + # OpenSSL 3.0 removed ERR_GET_FUNC() + func = 0 + reason = int(error & 0x7fffff) return (lib, func, reason) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py new file mode 100644 index 0000000..8aed03e --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py @@ -0,0 +1,278 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import re + +from .. import _backend_config +from .._errors import pretty_message +from .._ffi import get_library, register_ffi +from ..errors import LibraryNotFoundError + +from cffi import FFI + + +__all__ = [ + 'is_libressl', + 'libcrypto', + 'libressl_version', + 'libressl_version_info', + 'version', + 'version_info', +] + +libcrypto_path = _backend_config().get('libcrypto_path') +if libcrypto_path is None: + libcrypto_path = get_library('crypto', 'libcrypto.dylib', '42') +if not libcrypto_path: + raise LibraryNotFoundError('The library libcrypto could not be found') + +try: + vffi = FFI() + vffi.cdef("const char *SSLeay_version(int type);") + version_string = vffi.string(vffi.dlopen(libcrypto_path).SSLeay_version(0)).decode('utf-8') +except (AttributeError): + vffi = FFI() + vffi.cdef("const char *OpenSSL_version(int type);") + version_string = vffi.string(vffi.dlopen(libcrypto_path).OpenSSL_version(0)).decode('utf-8') + +is_libressl = 'LibreSSL' in version_string + +version_match = re.search('\\b(\\d\\.\\d\\.\\d[a-z]*)\\b', version_string) +if not version_match: + version_match = re.search('(?<=LibreSSL )(\\d\\.\\d(\\.\\d)?)\\b', version_string) +if not version_match: + raise LibraryNotFoundError('Error detecting the version of libcrypto') +version = version_match.group(1) +version_parts = re.sub('(\\d)([a-z]+)', '\\1.\\2', version).split('.') +version_info = tuple(int(part) if part.isdigit() else part for part in version_parts) + +# LibreSSL is compatible with libcrypto from OpenSSL 1.0.1 +libressl_version = '' +libressl_version_info = tuple() +if is_libressl: + libressl_version = version + libressl_version_info = version_info + version = '1.0.1' + version_info = (1, 0, 1) + +ffi = FFI() + +libcrypto = ffi.dlopen(libcrypto_path) +register_ffi(libcrypto, ffi) + +if version_info < (0, 9, 8): + raise LibraryNotFoundError(pretty_message( + ''' + OpenSSL versions older than 0.9.8 are not supported - found version %s + ''', + version + )) + +if version_info < (1, 1): + ffi.cdef(""" + void ERR_load_crypto_strings(void); + void ERR_free_strings(void); + """) + + +if version_info >= (3, ): + ffi.cdef(""" + typedef ... OSSL_LIB_CTX; + typedef ... OSSL_PROVIDER; + + int OSSL_PROVIDER_available(OSSL_LIB_CTX *libctx, const char *name); + OSSL_PROVIDER *OSSL_PROVIDER_load(OSSL_LIB_CTX *libctx, const char *name); + """) + +# The typedef uintptr_t lines here allow us to check for a NULL pointer, +# without having to redefine the structs in our code. This is kind of a hack, +# but it should cause problems since we treat these as opaque. +ffi.cdef(""" + typedef ... EVP_MD; + typedef uintptr_t EVP_CIPHER_CTX; + typedef ... EVP_CIPHER; + typedef ... ENGINE; + typedef uintptr_t EVP_PKEY; + typedef uintptr_t X509; + typedef uintptr_t DH; + typedef uintptr_t RSA; + typedef uintptr_t DSA; + typedef uintptr_t EC_KEY; + typedef ... EVP_MD_CTX; + typedef ... EVP_PKEY_CTX; + typedef ... BN_GENCB; + typedef ... BIGNUM; + + unsigned long ERR_get_error(void); + char *ERR_error_string(unsigned long e, char *buf); + unsigned long ERR_peek_error(void); + + void OPENSSL_config(const char *config_name); + + EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void); + void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx); + + int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *x, int keylen); + int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *x, int padding); + int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr); + + const EVP_CIPHER *EVP_aes_128_cbc(void); + const EVP_CIPHER *EVP_aes_192_cbc(void); + const EVP_CIPHER *EVP_aes_256_cbc(void); + const EVP_CIPHER *EVP_des_cbc(void); + const EVP_CIPHER *EVP_des_ede_cbc(void); + const EVP_CIPHER *EVP_des_ede3_cbc(void); + const EVP_CIPHER *EVP_rc4(void); + const EVP_CIPHER *EVP_rc2_cbc(void); + + int EVP_EncryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, + ENGINE *impl, const char *key, + const char *iv); + int EVP_EncryptUpdate(EVP_CIPHER_CTX *ctx, char *out, int *outl, + const char *in, int inl); + int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *ctx, char *out, int *outl); + + int EVP_DecryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, + ENGINE *impl, const char *key, + const char *iv); + int EVP_DecryptUpdate(EVP_CIPHER_CTX *ctx, char *out, int *outl, + const char *in, int inl); + int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *ctx, char *out, int *outl); + + EVP_PKEY *d2i_AutoPrivateKey(EVP_PKEY **a, const char **pp, + long length); + EVP_PKEY *d2i_PUBKEY(EVP_PKEY **a, const char **pp, long length); + int i2d_PUBKEY(EVP_PKEY *a, char **pp); + void EVP_PKEY_free(EVP_PKEY *key); + + X509 *d2i_X509(X509 **px, const char **in, int len); + int i2d_X509(X509 *x, char **out); + EVP_PKEY *X509_get_pubkey(X509 *x); + void X509_free(X509 *a); + + RSA *EVP_PKEY_get1_RSA(EVP_PKEY *pkey); + void RSA_free(RSA *r); + + int RSA_public_encrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_private_encrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_public_decrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_private_decrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + + int EVP_DigestUpdate(EVP_MD_CTX *ctx, const void *d, unsigned int cnt); + + const EVP_MD *EVP_md5(void); + const EVP_MD *EVP_sha1(void); + const EVP_MD *EVP_sha224(void); + const EVP_MD *EVP_sha256(void); + const EVP_MD *EVP_sha384(void); + const EVP_MD *EVP_sha512(void); + + int PKCS12_key_gen_uni(char *pass, int passlen, char *salt, + int saltlen, int id, int iter, int n, + char *out, const EVP_MD *md_type); + + void BN_free(BIGNUM *a); + int BN_dec2bn(BIGNUM **a, const char *str); + + DH *DH_new(void); + int DH_generate_parameters_ex(DH *dh, int prime_len, int generator, BN_GENCB *cb); + int i2d_DHparams(const DH *a, char **pp); + void DH_free(DH *dh); + + RSA *RSA_new(void); + int RSA_generate_key_ex(RSA *rsa, int bits, BIGNUM *e, BN_GENCB *cb); + int i2d_RSAPublicKey(RSA *a, char **pp); + int i2d_RSAPrivateKey(RSA *a, char **pp); + + DSA *DSA_new(void); + int DSA_generate_parameters_ex(DSA *dsa, int bits, + const char *seed, int seed_len, int *counter_ret, + unsigned long *h_ret, BN_GENCB *cb); + int DSA_generate_key(DSA *a); + int i2d_DSA_PUBKEY(const DSA *a, char **pp); + int i2d_DSAPrivateKey(const DSA *a, char **pp); + void DSA_free(DSA *dsa); + + EC_KEY *EC_KEY_new_by_curve_name(int nid); + int EC_KEY_generate_key(EC_KEY *key); + void EC_KEY_set_asn1_flag(EC_KEY *, int); + int i2d_ECPrivateKey(EC_KEY *key, char **out); + int i2o_ECPublicKey(EC_KEY *key, char **out); + void EC_KEY_free(EC_KEY *key); +""") + +if version_info < (3, ): + ffi.cdef(""" + int EVP_PKEY_size(EVP_PKEY *pkey); + """) +else: + ffi.cdef(""" + int EVP_PKEY_get_size(EVP_PKEY *pkey); + """) + +if version_info < (1, 1): + ffi.cdef(""" + EVP_MD_CTX *EVP_MD_CTX_create(void); + void EVP_MD_CTX_destroy(EVP_MD_CTX *ctx); + """) +else: + ffi.cdef(""" + EVP_MD_CTX *EVP_MD_CTX_new(void); + void EVP_MD_CTX_free(EVP_MD_CTX *ctx); + """) + +if version_info < (1,): + ffi.cdef(""" + typedef ... *DSA_SIG; + typedef ... *ECDSA_SIG; + + DSA_SIG *DSA_do_sign(const char *dgst, int dlen, DSA *dsa); + ECDSA_SIG *ECDSA_do_sign(const char *dgst, int dgst_len, EC_KEY *eckey); + + DSA_SIG *d2i_DSA_SIG(DSA_SIG **v, const char **pp, long length); + ECDSA_SIG *d2i_ECDSA_SIG(ECDSA_SIG **v, const char **pp, long len); + + int i2d_DSA_SIG(const DSA_SIG *a, char **pp); + int i2d_ECDSA_SIG(const ECDSA_SIG *a, char **pp); + + int DSA_do_verify(const char *dgst, int dgst_len, DSA_SIG *sig, DSA *dsa); + int ECDSA_do_verify(const char *dgst, int dgst_len, const ECDSA_SIG *sig, EC_KEY *eckey); + + void DSA_SIG_free(DSA_SIG *a); + void ECDSA_SIG_free(ECDSA_SIG *a); + + DSA *EVP_PKEY_get1_DSA(EVP_PKEY *pkey); + EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *pkey); + + int RSA_verify_PKCS1_PSS(RSA *rsa, const char *mHash, + const EVP_MD *Hash, const char *EM, + int sLen); + int RSA_padding_add_PKCS1_PSS(RSA *rsa, char *EM, + const char *mHash, const EVP_MD *Hash, + int sLen); + + int EVP_DigestInit_ex(EVP_MD_CTX *ctx, const EVP_MD *type, ENGINE *impl); + int EVP_SignFinal(EVP_MD_CTX *ctx, char *sig, unsigned int *s, EVP_PKEY *pkey); + int EVP_VerifyFinal(EVP_MD_CTX *ctx, char *sigbuf, unsigned int siglen, EVP_PKEY *pkey); + + void EVP_MD_CTX_set_flags(EVP_MD_CTX *ctx, int flags); + """) +else: + ffi.cdef(""" + int PKCS5_PBKDF2_HMAC(const char *pass, int passlen, + const char *salt, int saltlen, int iter, + const EVP_MD *digest, + int keylen, char *out); + + int EVP_DigestSignInit(EVP_MD_CTX *ctx, EVP_PKEY_CTX **pctx, const EVP_MD *type, ENGINE *e, EVP_PKEY *pkey); + int EVP_DigestSignFinal(EVP_MD_CTX *ctx, char *sig, size_t *siglen); + + int EVP_DigestVerifyInit(EVP_MD_CTX *ctx, EVP_PKEY_CTX **pctx, const EVP_MD *type, ENGINE *e, EVP_PKEY *pkey); + int EVP_DigestVerifyFinal(EVP_MD_CTX *ctx, const char *sig, size_t siglen); + + int EVP_PKEY_CTX_ctrl(EVP_PKEY_CTX *ctx, int keytype, int optype, int cmd, int p1, void *p2); + """) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py index f783663..e33ebbc 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py @@ -73,6 +73,8 @@ P_EVP_MD = c_void_p P_ENGINE = c_void_p +OSSL_PROVIDER = c_void_p +OSSL_LIB_CTX = c_void_p P_EVP_PKEY = c_void_p EVP_PKEY_CTX = c_void_p @@ -97,6 +99,13 @@ libcrypto.ERR_free_strings.argtypes = [] libcrypto.ERR_free_strings.restype = None + if version_info >= (3, ): + libcrypto.OSSL_PROVIDER_available.argtypes = [OSSL_LIB_CTX, c_char_p] + libcrypto.OSSL_PROVIDER_available.restype = c_int + + libcrypto.OSSL_PROVIDER_load.argtypes = [OSSL_LIB_CTX, c_char_p] + libcrypto.OSSL_PROVIDER_load.restype = POINTER(OSSL_PROVIDER) + libcrypto.ERR_get_error.argtypes = [] libcrypto.ERR_get_error.restype = c_ulong @@ -301,10 +310,16 @@ libcrypto.EVP_sha512.argtypes = [] libcrypto.EVP_sha512.restype = P_EVP_MD - libcrypto.EVP_PKEY_size.argtypes = [ - P_EVP_PKEY - ] - libcrypto.EVP_PKEY_size.restype = c_int + if version_info < (3, 0): + libcrypto.EVP_PKEY_size.argtypes = [ + P_EVP_PKEY + ] + libcrypto.EVP_PKEY_size.restype = c_int + else: + libcrypto.EVP_PKEY_get_size.argtypes = [ + P_EVP_PKEY + ] + libcrypto.EVP_PKEY_get_size.restype = c_int libcrypto.EVP_PKEY_get1_RSA.argtypes = [ P_EVP_PKEY diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py b/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py new file mode 100644 index 0000000..611f50c --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py @@ -0,0 +1,99 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .. import _backend_config +from .._ffi import get_library, register_ffi +from ..errors import LibraryNotFoundError +from ._libcrypto import libcrypto_version_info + +from cffi import FFI + + +__all__ = [ + 'libssl', +] + + +ffi = FFI() + +libssl_path = _backend_config().get('libssl_path') +if libssl_path is None: + libssl_path = get_library('ssl', 'libssl', '44') +if not libssl_path: + raise LibraryNotFoundError('The library libssl could not be found') + +libssl = ffi.dlopen(libssl_path) +register_ffi(libssl, ffi) + +ffi.cdef(""" + typedef ... SSL_METHOD; + typedef uintptr_t SSL_CTX; + typedef ... SSL_SESSION; + typedef uintptr_t SSL; + typedef ... BIO_METHOD; + typedef uintptr_t BIO; + typedef uintptr_t X509; + typedef ... X509_STORE; + typedef ... X509_STORE_CTX; + typedef uintptr_t _STACK; + + BIO_METHOD *BIO_s_mem(void); + BIO *BIO_new(BIO_METHOD *type); + int BIO_free(BIO *a); + int BIO_read(BIO *b, void *buf, int len); + int BIO_write(BIO *b, const void *buf, int len); + size_t BIO_ctrl_pending(BIO *b); + + SSL_CTX *SSL_CTX_new(const SSL_METHOD *method); + long SSL_CTX_set_timeout(SSL_CTX *ctx, long t); + void SSL_CTX_set_verify(SSL_CTX *ctx, int mode, + int (*verify_callback)(int, X509_STORE_CTX *)); + int SSL_CTX_set_default_verify_paths(SSL_CTX *ctx); + int SSL_CTX_load_verify_locations(SSL_CTX *ctx, const char *CAfile, + const char *CApath); + long SSL_get_verify_result(const SSL *ssl); + X509_STORE *SSL_CTX_get_cert_store(const SSL_CTX *ctx); + int X509_STORE_add_cert(X509_STORE *ctx, X509 *x); + int SSL_CTX_set_cipher_list(SSL_CTX *ctx, const char *str); + long SSL_CTX_ctrl(SSL_CTX *ctx, int cmd, long larg, void *parg); + void SSL_CTX_free(SSL_CTX *a); + + SSL *SSL_new(SSL_CTX *ctx); + void SSL_free(SSL *ssl); + void SSL_set_bio(SSL *ssl, BIO *rbio, BIO *wbio); + long SSL_ctrl(SSL *ssl, int cmd, long larg, void *parg); + _STACK *SSL_get_peer_cert_chain(const SSL *s); + + SSL_SESSION *SSL_get1_session(const SSL *ssl); + int SSL_set_session(SSL *ssl, SSL_SESSION *session); + void SSL_SESSION_free(SSL_SESSION *session); + + void SSL_set_connect_state(SSL *ssl); + int SSL_do_handshake(SSL *ssl); + int SSL_get_error(const SSL *ssl, int ret); + const char *SSL_get_version(const SSL *ssl); + + int SSL_read(SSL *ssl, void *buf, int num); + int SSL_write(SSL *ssl, const void *buf, int num); + int SSL_pending(const SSL *ssl); + + int SSL_shutdown(SSL *ssl); +""") + +if libcrypto_version_info < (1, 1): + ffi.cdef(""" + int sk_num(const _STACK *); + X509 *sk_value(const _STACK *, int); + + int SSL_library_init(void); + void OPENSSL_add_all_algorithms_noconf(void); + + SSL_METHOD *SSLv23_method(void); + """) +else: + ffi.cdef(""" + int OPENSSL_sk_num(const _STACK *); + X509 *OPENSSL_sk_value(const _STACK *, int); + + SSL_METHOD *TLS_method(void); + """) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py b/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py index 880bf4d..a823bca 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py @@ -32,6 +32,7 @@ new, null, unwrap, + write_to_buffer, ) from ._libcrypto import libcrypto, LibcryptoConst, libcrypto_version_info, handle_openssl_error from ..errors import AsymmetricKeyError, IncompleteAsymmetricKeyError, SignatureError @@ -105,6 +106,16 @@ def public_key(self): pubkey_data = bytes_from_buffer(pubkey_buffer, pubkey_length) asn1 = PublicKeyInfo.load(pubkey_data) + + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and asn1.algorithm == 'rsassa_pss': + temp_asn1 = asn1.copy() + temp_asn1['algorithm']['algorithm'] = 'rsa' + temp_data = temp_asn1.dump() + write_to_buffer(pubkey_buffer, temp_data) + pubkey_length = len(temp_data) + pub_evp_pkey = libcrypto.d2i_PUBKEY(null(), buffer_pointer(pubkey_buffer), pubkey_length) if is_null(pub_evp_pkey): handle_openssl_error(0) @@ -212,8 +223,13 @@ def public_key(self): """ if not self._public_key and self.x509: - evp_pkey = libcrypto.X509_get_pubkey(self.x509) - self._public_key = PublicKey(evp_pkey, self.asn1['tbs_certificate']['subject_public_key_info']) + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and self.asn1.public_key.algorithm == 'rsassa_pss': + self._public_key = load_public_key(self.asn1.public_key) + else: + evp_pkey = libcrypto.X509_get_pubkey(self.x509) + self._public_key = PublicKey(evp_pkey, self.asn1.public_key) return self._public_key @@ -233,6 +249,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -692,7 +710,7 @@ def load_public_key(source): source must be a byte string, unicode string or asn1crypto.keys.PublicKeyInfo object, not %s ''', - type_name(public_key) + type_name(source) )) if public_key.algorithm == 'dsa': @@ -712,7 +730,15 @@ def load_public_key(source): ''' )) - data = public_key.dump() + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and public_key.algorithm == 'rsassa_pss': + temp_key = public_key.copy() + temp_key['algorithm']['algorithm'] = 'rsa' + data = temp_key.dump() + else: + data = public_key.dump() + buffer = buffer_from_bytes(data) evp_pkey = libcrypto.d2i_PUBKEY(null(), buffer_pointer(buffer), len(data)) if is_null(evp_pkey): @@ -928,6 +954,22 @@ def rsa_oaep_decrypt(private_key, ciphertext): return _decrypt(private_key, ciphertext, LibcryptoConst.RSA_PKCS1_OAEP_PADDING) +def _evp_pkey_get_size(evp_pkey): + """ + Handles the function name change from OpenSSL 1.1 -> 3.0 + + :param evp_pkey: + The EVP_PKEY of the Certificte or PublicKey to get the size of + + :return: + An int of the number of bytes necessary for the key + """ + + if libcrypto_version_info < (3, ): + return libcrypto.EVP_PKEY_size(evp_pkey) + return libcrypto.EVP_PKEY_get_size(evp_pkey) + + def _encrypt(certificate_or_public_key, data, padding): """ Encrypts plaintext using an RSA public key or certificate @@ -970,7 +1012,7 @@ def _encrypt(certificate_or_public_key, data, padding): rsa = None try: - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) buffer = buffer_from_bytes(buffer_size) rsa = libcrypto.EVP_PKEY_get1_RSA(certificate_or_public_key.evp_pkey) @@ -1025,7 +1067,7 @@ def _decrypt(private_key, ciphertext, padding): rsa = None try: - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) buffer = buffer_from_bytes(buffer_size) rsa = libcrypto.EVP_PKEY_get1_RSA(private_key.evp_pkey) @@ -1105,7 +1147,9 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): OSError - when an error is returned by the OS crypto library """ - if certificate_or_public_key.algorithm != 'rsa': + cp_alg = certificate_or_public_key.algorithm + + if cp_alg != 'rsa' and cp_alg != 'rsassa_pss': raise ValueError(pretty_message( ''' The key specified is not an RSA public key, but %s @@ -1235,13 +1279,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ type_name(data) )) + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']) - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha224", "sha256", "sha384", "sha512"' - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -1251,16 +1298,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ repr(hash_algorithm) )) - if certificate_or_public_key.algorithm != 'rsa' and rsa_pss_padding: + if not cp_is_rsa and rsa_pss_padding: raise ValueError(pretty_message( ''' PSS padding can only be used with RSA keys - the key provided is a %s key ''', - certificate_or_public_key.algorithm.upper() + cp_alg.upper() )) - if certificate_or_public_key.algorithm == 'rsa' and hash_algorithm == 'raw': + if cp_is_rsa and hash_algorithm == 'raw': if len(data) > certificate_or_public_key.byte_size - 11: raise ValueError(pretty_message( ''' @@ -1279,7 +1326,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) decrypted_buffer = buffer_from_bytes(buffer_size) decrypted_length = libcrypto.RSA_public_decrypt( len(signature), @@ -1323,14 +1370,14 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ }[hash_algorithm]() if libcrypto_version_info < (1,): - if certificate_or_public_key.algorithm == 'rsa' and rsa_pss_padding: + if cp_is_rsa and rsa_pss_padding: digest = getattr(hashlib, hash_algorithm)(data).digest() rsa = libcrypto.EVP_PKEY_get1_RSA(certificate_or_public_key.evp_pkey) if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) decoded_buffer = buffer_from_bytes(buffer_size) decoded_length = libcrypto.RSA_public_decrypt( len(signature), @@ -1349,7 +1396,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ LibcryptoConst.EVP_MD_CTX_FLAG_PSS_MDLEN ) - elif certificate_or_public_key.algorithm == 'rsa': + elif cp_is_rsa: res = libcrypto.EVP_DigestInit_ex(evp_md_ctx, evp_md, null()) handle_openssl_error(res) @@ -1363,7 +1410,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ certificate_or_public_key.evp_pkey ) - elif certificate_or_public_key.algorithm == 'dsa': + elif cp_alg == 'dsa': digest = getattr(hashlib, hash_algorithm)(data).digest() signature_buffer = buffer_from_bytes(signature) @@ -1378,7 +1425,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ res = libcrypto.DSA_do_verify(digest, len(digest), dsa_sig, dsa) - elif certificate_or_public_key.algorithm == 'ec': + elif cp_alg == 'ec': digest = getattr(hashlib, hash_algorithm)(data).digest() signature_buffer = buffer_from_bytes(signature) @@ -1418,15 +1465,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ handle_openssl_error(res) # Use the hash algorithm output length as the salt length - res = libcrypto.EVP_PKEY_CTX_ctrl( - evp_pkey_ctx_pointer, - LibcryptoConst.EVP_PKEY_RSA, - LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, - LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, - -1, - null() - ) - handle_openssl_error(res) + if libcrypto_version_info < (3, 0): + res = libcrypto.EVP_PKEY_CTX_ctrl( + evp_pkey_ctx_pointer, + LibcryptoConst.EVP_PKEY_RSA, + LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, + LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, + -1, + null() + ) + handle_openssl_error(res) res = libcrypto.EVP_DigestUpdate(evp_md_ctx, data, len(data)) handle_openssl_error(res) @@ -1519,12 +1567,14 @@ def rsa_pss_sign(private_key, data, hash_algorithm): A byte string of the signature """ - if private_key.algorithm != 'rsa': + pkey_alg = private_key.algorithm + + if pkey_alg != 'rsa' and pkey_alg != 'rsassa_pss': raise ValueError(pretty_message( ''' The key specified is not an RSA private key, but %s ''', - private_key.algorithm.upper() + pkey_alg.upper() )) return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) @@ -1637,13 +1687,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): type_name(data) )) + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']) - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_alg == 'rsa' and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha224", "sha256", "sha384", "sha512"' - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -1653,16 +1706,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): repr(hash_algorithm) )) - if private_key.algorithm != 'rsa' and rsa_pss_padding: + if not pkey_is_rsa and rsa_pss_padding: raise ValueError(pretty_message( ''' PSS padding can only be used with RSA keys - the key provided is a %s key ''', - private_key.algorithm.upper() + pkey_alg.upper() )) - if private_key.algorithm == 'rsa' and hash_algorithm == 'raw': + if pkey_is_rsa and hash_algorithm == 'raw': if len(data) > private_key.byte_size - 11: raise ValueError(pretty_message( ''' @@ -1681,7 +1734,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = libcrypto.RSA_private_encrypt( @@ -1722,14 +1775,14 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): }[hash_algorithm]() if libcrypto_version_info < (1,): - if private_key.algorithm == 'rsa' and rsa_pss_padding: + if pkey_is_rsa and rsa_pss_padding: digest = getattr(hashlib, hash_algorithm)(data).digest() rsa = libcrypto.EVP_PKEY_get1_RSA(private_key.evp_pkey) if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) em_buffer = buffer_from_bytes(buffer_size) res = libcrypto.RSA_padding_add_PKCS1_PSS( rsa, @@ -1750,8 +1803,8 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): ) handle_openssl_error(signature_length) - elif private_key.algorithm == 'rsa': - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + elif pkey_is_rsa: + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = new(libcrypto, 'unsigned int *') @@ -1771,7 +1824,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): signature_length = deref(signature_length) - elif private_key.algorithm == 'dsa': + elif pkey_alg == 'dsa': digest = getattr(hashlib, hash_algorithm)(data).digest() dsa = libcrypto.EVP_PKEY_get1_DSA(private_key.evp_pkey) @@ -1788,7 +1841,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): signature_length = libcrypto.i2d_DSA_SIG(dsa_sig, signature_pointer) handle_openssl_error(signature_length) - elif private_key.algorithm == 'ec': + elif pkey_alg == 'ec': digest = getattr(hashlib, hash_algorithm)(data).digest() ec_key = libcrypto.EVP_PKEY_get1_EC_KEY(private_key.evp_pkey) @@ -1806,7 +1859,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_openssl_error(signature_length) else: - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = new(libcrypto, 'size_t *', buffer_size) @@ -1834,15 +1887,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_openssl_error(res) # Use the hash algorithm output length as the salt length - res = libcrypto.EVP_PKEY_CTX_ctrl( - evp_pkey_ctx_pointer, - LibcryptoConst.EVP_PKEY_RSA, - LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, - LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, - -1, - null() - ) - handle_openssl_error(res) + if libcrypto_version_info < (3, 0): + res = libcrypto.EVP_PKEY_CTX_ctrl( + evp_pkey_ctx_pointer, + LibcryptoConst.EVP_PKEY_RSA, + LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, + LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, + -1, + null() + ) + handle_openssl_error(res) res = libcrypto.EVP_DigestUpdate(evp_md_ctx, data, len(data)) handle_openssl_error(res) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py b/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py index f41bca5..d390f89 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py @@ -5,7 +5,7 @@ from .._errors import pretty_message from .._ffi import new, null, is_null, buffer_from_bytes, bytes_from_buffer, deref -from ._libcrypto import libcrypto, LibcryptoConst, handle_openssl_error +from ._libcrypto import libcrypto, libcrypto_legacy_support, LibcryptoConst, handle_openssl_error from ..util import rand_bytes from .._types import type_name, byte_cls @@ -236,6 +236,9 @@ def rc4_encrypt(key, data): A byte string of the ciphertext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC4 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -266,6 +269,9 @@ def rc4_decrypt(key, data): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC4 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -301,6 +307,9 @@ def rc2_cbc_pkcs5_encrypt(key, data, iv): A tuple of two byte strings (iv, ciphertext) """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC2 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -345,6 +354,9 @@ def rc2_cbc_pkcs5_decrypt(key, data, iv): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC2 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -487,6 +499,9 @@ def des_cbc_pkcs5_encrypt(key, data, iv): A tuple of two byte strings (iv, ciphertext) """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without DES support') + if len(key) != 8: raise ValueError(pretty_message( ''' @@ -530,6 +545,9 @@ def des_cbc_pkcs5_decrypt(key, data, iv): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without DES support') + if len(key) != 8: raise ValueError(pretty_message( ''' @@ -604,23 +622,9 @@ def _encrypt(cipher, key, data, iv, padding): if cipher != 'rc4' and not padding: # AES in CBC mode can be allowed with no padding if - # the data is an exact multiple of the key size - aes128_no_padding = ( - cipher == 'aes128' and - padding is False and - len(data) % 16 == 0 - ) - aes192_no_padding = ( - cipher == 'aes192' and - padding is False and - len(data) % 24 == 0 - ) - aes256_no_padding = ( - cipher == 'aes256' and - padding is False and - len(data) % 32 == 0 - ) - if aes128_no_padding is False and aes192_no_padding is False and aes256_no_padding is False: + # the data is an exact multiple of the block size + is_aes = cipher in set(['aes128', 'aes192', 'aes256']) + if not is_aes or (is_aes and (len(data) % 16) != 0): raise ValueError('padding must be specified') evp_cipher_ctx = None @@ -730,7 +734,7 @@ def _decrypt(cipher, key, data, iv, padding): type_name(iv) )) - if cipher != 'rc4' and padding is None: + if cipher not in set(['rc4', 'aes128', 'aes192', 'aes256']) and not padding: raise ValueError('padding must be specified') evp_cipher_ctx = None diff --git a/app/lib/package_control/deps/oscrypto/_openssl/tls.py b/app/lib/package_control/deps/oscrypto/_openssl/tls.py index 8d64580..6f180f4 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/tls.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/tls.py @@ -65,6 +65,25 @@ } +def _homogenize_openssl3_error(error_tuple): + """ + Takes a 3-element tuple from peek_openssl_error() and modifies it + to handle the changes in OpenSSL 3.0. That release removed the + concept of an error function, meaning the second item in the tuple + will always be 0. + + :param error_tuple: + A 3-element tuple of integers + + :return: + A 3-element tuple of integers + """ + + if libcrypto_version_info < (3,): + return error_tuple + return (error_tuple[0], 0, error_tuple[2]) + + class TLSSession(object): """ A TLS session object that multiple TLSSocket objects can share for the @@ -372,7 +391,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to @@ -516,16 +535,22 @@ def _handshake(self): LibsslConst.SSL_F_SSL3_CHECK_CERT_AND_ALGORITHM, LibsslConst.SSL_R_DH_KEY_TOO_SMALL ) + dh_key_info_1 = _homogenize_openssl3_error(dh_key_info_1) + dh_key_info_2 = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_TLS_PROCESS_SKE_DHE, LibsslConst.SSL_R_DH_KEY_TOO_SMALL ) + dh_key_info_2 = _homogenize_openssl3_error(dh_key_info_2) + dh_key_info_3 = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_SSL3_GET_KEY_EXCHANGE, LibsslConst.SSL_R_BAD_DH_P_LENGTH ) + dh_key_info_3 = _homogenize_openssl3_error(dh_key_info_3) + if info == dh_key_info_1 or info == dh_key_info_2 or info == dh_key_info_3: raise_dh_params() @@ -541,6 +566,8 @@ def _handshake(self): LibsslConst.SSL_F_SSL3_GET_RECORD, LibsslConst.SSL_R_WRONG_VERSION_NUMBER ) + unknown_protocol_info = _homogenize_openssl3_error(unknown_protocol_info) + if info == unknown_protocol_info: raise_protocol_error(handshake_server_bytes) @@ -549,6 +576,7 @@ def _handshake(self): LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO, LibsslConst.SSL_R_TLSV1_ALERT_PROTOCOL_VERSION ) + tls_version_info_error = _homogenize_openssl3_error(tls_version_info_error) if info == tls_version_info_error: raise_protocol_version() @@ -557,7 +585,9 @@ def _handshake(self): LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO, LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE ) - if info == handshake_error_info: + # OpenSSL 3.0 no longer has func codes, so this can be confused + # with the following handler which needs to check for client auth + if libcrypto_version_info < (3, ) and info == handshake_error_info: raise_handshake() handshake_failure_info = ( @@ -565,6 +595,7 @@ def _handshake(self): LibsslConst.SSL_F_SSL3_READ_BYTES, LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE ) + handshake_failure_info = _homogenize_openssl3_error(handshake_failure_info) if info == handshake_failure_info: saw_client_auth = False for record_type, _, record_data in parse_tls_records(handshake_server_bytes): @@ -590,6 +621,7 @@ def _handshake(self): LibsslConst.SSL_F_TLS_PROCESS_SERVER_CERTIFICATE, LibsslConst.SSL_R_CERTIFICATE_VERIFY_FAILED ) + cert_verify_failed_info = _homogenize_openssl3_error(cert_verify_failed_info) # It would appear that some versions of OpenSSL (such as on Fedora 30) # don't even have the MD5 digest algorithm included any longer? To @@ -599,6 +631,7 @@ def _handshake(self): LibsslConst.ASN1_F_ASN1_ITEM_VERIFY, LibsslConst.ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM ) + unknown_hash_algo_info = _homogenize_openssl3_error(unknown_hash_algo_info) if info == unknown_hash_algo_info: chain = extract_chain(handshake_server_bytes) diff --git a/app/lib/package_control/deps/oscrypto/_pkcs1.py b/app/lib/package_control/deps/oscrypto/_pkcs1.py index 2044b84..66f5ed3 100644 --- a/app/lib/package_control/deps/oscrypto/_pkcs1.py +++ b/app/lib/package_control/deps/oscrypto/_pkcs1.py @@ -651,7 +651,7 @@ def raw_rsa_private_crypt(private_key, data): )) algo = private_key.asn1['private_key_algorithm']['algorithm'].native - if algo != 'rsa': + if algo != 'rsa' and algo != 'rsassa_pss': raise ValueError(pretty_message( ''' private_key must be an RSA key, not %s @@ -712,7 +712,7 @@ def raw_rsa_public_crypt(certificate_or_public_key, data): )) algo = certificate_or_public_key.asn1['algorithm']['algorithm'].native - if algo != 'rsa': + if algo != 'rsa' and algo != 'rsassa_pss': raise ValueError(pretty_message( ''' certificate_or_public_key must be an RSA key, not %s diff --git a/app/lib/package_control/deps/oscrypto/_pkcs12.py b/app/lib/package_control/deps/oscrypto/_pkcs12.py index b8f584c..b788178 100644 --- a/app/lib/package_control/deps/oscrypto/_pkcs12.py +++ b/app/lib/package_control/deps/oscrypto/_pkcs12.py @@ -190,7 +190,7 @@ def pkcs12_kdf(hash_algorithm, password, salt, iterations, key_length, id_): i = i[0:start] + i_num2 + i[end:] - # Step 7 (one peice at a time) + # Step 7 (one piece at a time) begin = (num - 1) * u to_copy = min(key_length, u) a = a[0:begin] + a2[0:to_copy] + a[begin + to_copy:] diff --git a/app/lib/package_control/deps/oscrypto/_tls.py b/app/lib/package_control/deps/oscrypto/_tls.py index 181d82b..260e9cf 100644 --- a/app/lib/package_control/deps/oscrypto/_tls.py +++ b/app/lib/package_control/deps/oscrypto/_tls.py @@ -465,6 +465,22 @@ def raise_self_signed(certificate): raise TLSVerificationError(message, certificate) +def raise_lifetime_too_long(certificate): + """ + Raises a TLSVerificationError due to a certificate lifetime exceeding + the CAB forum certificate lifetime limit + + :param certificate: + An asn1crypto.x509.Certificate object + + :raises: + TLSVerificationError + """ + + message = 'Server certificate verification failed - certificate lifetime is too long' + raise TLSVerificationError(message, certificate) + + def raise_expired_not_yet_valid(certificate): """ Raises a TLSVerificationError due to certificate being expired, or not yet diff --git a/app/lib/package_control/deps/oscrypto/_win/_advapi32.py b/app/lib/package_control/deps/oscrypto/_win/_advapi32.py index 5066e11..f2250cb 100644 --- a/app/lib/package_control/deps/oscrypto/_win/_advapi32.py +++ b/app/lib/package_control/deps/oscrypto/_win/_advapi32.py @@ -34,7 +34,7 @@ def open_context_handle(provider, verify_only=True): else: raise ValueError('Invalid provider specified: %s' % provider) - # Ths DSS provider needs a container to allow importing and exporting + # The DSS provider needs a container to allow importing and exporting # private keys, but all of the RSA stuff works fine with CRYPT_VERIFYCONTEXT if verify_only or provider != Advapi32Const.MS_ENH_DSS_DH_PROV: container_name = null() diff --git a/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py new file mode 100644 index 0000000..49932ff --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py @@ -0,0 +1,145 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'advapi32', + 'get_error', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +ffi.cdef(""" + typedef HANDLE HCRYPTPROV; + typedef HANDLE HCRYPTKEY; + typedef HANDLE HCRYPTHASH; + typedef unsigned int ALG_ID; + + typedef struct _CRYPTOAPI_BLOB { + DWORD cbData; + BYTE *pbData; + } CRYPT_INTEGER_BLOB, CRYPT_OBJID_BLOB, CRYPT_DER_BLOB, CRYPT_ATTR_BLOB; + + typedef struct _CRYPT_ALGORITHM_IDENTIFIER { + LPSTR pszObjId; + CRYPT_OBJID_BLOB Parameters; + } CRYPT_ALGORITHM_IDENTIFIER; + + typedef struct _CRYPT_BIT_BLOB { + DWORD cbData; + BYTE *pbData; + DWORD cUnusedBits; + } CRYPT_BIT_BLOB; + + typedef struct _CERT_PUBLIC_KEY_INFO { + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_BIT_BLOB PublicKey; + } CERT_PUBLIC_KEY_INFO; + + typedef struct _CRYPT_ATTRIBUTE { + LPSTR pszObjId; + DWORD cValue; + CRYPT_ATTR_BLOB *rgValue; + } CRYPT_ATTRIBUTE; + + typedef struct _CRYPT_ATTRIBUTES { + DWORD cAttr; + CRYPT_ATTRIBUTE *rgAttr; + } CRYPT_ATTRIBUTES; + + typedef struct _CRYPT_PRIVATE_KEY_INFO { + DWORD Version; + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_DER_BLOB PrivateKey; + CRYPT_ATTRIBUTES *pAttributes; + } CRYPT_PRIVATE_KEY_INFO; + + typedef struct _PUBLICKEYSTRUC { + BYTE bType; + BYTE bVersion; + WORD reserved; + ALG_ID aiKeyAlg; + } BLOBHEADER, PUBLICKEYSTRUC; + + typedef struct _DSSPUBKEY { + DWORD magic; + DWORD bitlen; + } DSSPUBKEY; + + typedef struct _DSSBLOBHEADER { + PUBLICKEYSTRUC publickeystruc; + DSSPUBKEY dsspubkey; + } DSSBLOBHEADER; + + typedef struct _RSAPUBKEY { + DWORD magic; + DWORD bitlen; + DWORD pubexp; + } RSAPUBKEY; + + typedef struct _RSABLOBHEADER { + PUBLICKEYSTRUC publickeystruc; + RSAPUBKEY rsapubkey; + } RSABLOBHEADER; + + typedef struct _PLAINTEXTKEYBLOB { + BLOBHEADER hdr; + DWORD dwKeySize; + // rgbKeyData omitted since it is a flexible array member + } PLAINTEXTKEYBLOB; + + typedef struct _DSSSEED { + DWORD counter; + BYTE seed[20]; + } DSSSEED; + + BOOL CryptAcquireContextW(HCRYPTPROV *phProv, LPCWSTR pszContainer, LPCWSTR pszProvider, + DWORD dwProvType, DWORD dwFlags); + BOOL CryptReleaseContext(HCRYPTPROV hProv, DWORD dwFlags); + + BOOL CryptImportKey(HCRYPTPROV hProv, BYTE *pbData, DWORD dwDataLen, + HCRYPTKEY hPubKey, DWORD dwFlags, HCRYPTKEY *phKey); + BOOL CryptGenKey(HCRYPTPROV hProv, ALG_ID Algid, DWORD dwFlags, HCRYPTKEY *phKey); + BOOL CryptGetKeyParam(HCRYPTKEY hKey, DWORD dwParam, BYTE *pbData, DWORD *pdwDataLen, DWORD dwFlags); + BOOL CryptSetKeyParam(HCRYPTKEY hKey, DWORD dwParam, void *pbData, DWORD dwFlags); + BOOL CryptExportKey(HCRYPTKEY hKey, HCRYPTKEY hExpKey, DWORD dwBlobType, + DWORD dwFlags, BYTE *pbData, DWORD *pdwDataLen); + BOOL CryptDestroyKey(HCRYPTKEY hKey); + + BOOL CryptCreateHash(HCRYPTPROV hProv, ALG_ID Algid, HCRYPTKEY hKey, + DWORD dwFlags, HCRYPTHASH *phHash); + BOOL CryptHashData(HCRYPTHASH hHash, BYTE *pbData, DWORD dwDataLen, DWORD dwFlags); + BOOL CryptSetHashParam(HCRYPTHASH hHash, DWORD dwParam, BYTE *pbData, DWORD dwFlags); + BOOL CryptSignHashW(HCRYPTHASH hHash, DWORD dwKeySpec, LPCWSTR sDescription, + DWORD dwFlags, BYTE *pbSignature, DWORD *pdwSigLen); + BOOL CryptVerifySignatureW(HCRYPTHASH hHash, BYTE *pbSignature, DWORD dwSigLen, + HCRYPTKEY hPubKey, LPCWSTR sDescription, DWORD dwFlags); + BOOL CryptDestroyHash(HCRYPTHASH hHash); + + BOOL CryptEncrypt(HCRYPTKEY hKey, HCRYPTHASH hHash, BOOL Final, DWORD dwFlags, + BYTE *pbData, DWORD *pdwDataLen, DWORD dwBufLen); + BOOL CryptDecrypt(HCRYPTKEY hKey, HCRYPTHASH hHash, BOOL Final, DWORD dwFlags, + BYTE *pbData, DWORD *pdwDataLen); +""") + + +try: + advapi32 = ffi.dlopen('advapi32.dll') + register_ffi(advapi32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('advapi32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py new file mode 100644 index 0000000..3e9d5ce --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py @@ -0,0 +1,120 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +from cffi import FFI + + +__all__ = [ + 'bcrypt', +] + + +ffi = FFI() +ffi.cdef(""" + typedef HANDLE BCRYPT_ALG_HANDLE; + typedef HANDLE BCRYPT_KEY_HANDLE; + typedef ULONG NTSTATUS; + typedef unsigned char *PUCHAR; + typedef unsigned char *PBYTE; + + + typedef struct _BCRYPT_RSAKEY_BLOB { + ULONG Magic; + ULONG BitLength; + ULONG cbPublicExp; + ULONG cbModulus; + ULONG cbPrime1; + ULONG cbPrime2; + } BCRYPT_RSAKEY_BLOB; + + typedef struct _BCRYPT_DSA_KEY_BLOB { + ULONG dwMagic; + ULONG cbKey; + UCHAR Count[4]; + UCHAR Seed[20]; + UCHAR q[20]; + } BCRYPT_DSA_KEY_BLOB; + + typedef struct _BCRYPT_DSA_KEY_BLOB_V2 { + ULONG dwMagic; + ULONG cbKey; + INT hashAlgorithm; + INT standardVersion; + ULONG cbSeedLength; + ULONG cbGroupSize; + UCHAR Count[4]; + } BCRYPT_DSA_KEY_BLOB_V2; + + typedef struct _BCRYPT_ECCKEY_BLOB { + ULONG dwMagic; + ULONG cbKey; + } BCRYPT_ECCKEY_BLOB; + + typedef struct _BCRYPT_PKCS1_PADDING_INFO { + LPCWSTR pszAlgId; + } BCRYPT_PKCS1_PADDING_INFO; + + typedef struct _BCRYPT_PSS_PADDING_INFO { + LPCWSTR pszAlgId; + ULONG cbSalt; + } BCRYPT_PSS_PADDING_INFO; + + typedef struct _BCRYPT_OAEP_PADDING_INFO { + LPCWSTR pszAlgId; + PUCHAR pbLabel; + ULONG cbLabel; + } BCRYPT_OAEP_PADDING_INFO; + + typedef struct _BCRYPT_KEY_DATA_BLOB_HEADER { + ULONG dwMagic; + ULONG dwVersion; + ULONG cbKeyData; + } BCRYPT_KEY_DATA_BLOB_HEADER; + + NTSTATUS BCryptOpenAlgorithmProvider(BCRYPT_ALG_HANDLE *phAlgorithm, LPCWSTR pszAlgId, LPCWSTR pszImplementation, + DWORD dwFlags); + NTSTATUS BCryptCloseAlgorithmProvider(BCRYPT_ALG_HANDLE hAlgorithm, DWORD dwFlags); + NTSTATUS BCryptSetProperty(HANDLE hObject, LPCWSTR pszProperty, ULONG *pbInput, ULONG cbInput, ULONG dwFlags); + + NTSTATUS BCryptImportKeyPair(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE hImportKey, LPCWSTR pszBlobType, + BCRYPT_KEY_HANDLE *phKey, PUCHAR pbInput, ULONG cbInput, ULONG dwFlags); + NTSTATUS BCryptImportKey(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE hImportKey, LPCWSTR pszBlobType, + BCRYPT_KEY_HANDLE *phKey, PUCHAR pbKeyObject, ULONG cbKeyObject, PUCHAR pbInput, ULONG cbInput, + ULONG dwFlags); + NTSTATUS BCryptDestroyKey(BCRYPT_KEY_HANDLE hKey); + + NTSTATUS BCryptVerifySignature(BCRYPT_KEY_HANDLE hKey, void *pPaddingInfo, PUCHAR pbHash, ULONG cbHash, + PUCHAR pbSignature, ULONG cbSignature, ULONG dwFlags); + NTSTATUS BCryptSignHash(BCRYPT_KEY_HANDLE hKey, void * pPaddingInfo, PBYTE pbInput, DWORD cbInput, PBYTE pbOutput, + DWORD cbOutput, DWORD *pcbResult, ULONG dwFlags); + + NTSTATUS BCryptEncrypt(BCRYPT_KEY_HANDLE hKey, PUCHAR pbInput, ULONG cbInput, void *pPaddingInfo, PUCHAR pbIV, + ULONG cbIV, PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); + NTSTATUS BCryptDecrypt(BCRYPT_KEY_HANDLE hKey, PUCHAR pbInput, ULONG cbInput, void *pPaddingInfo, PUCHAR pbIV, + ULONG cbIV, PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); + + NTSTATUS BCryptDeriveKeyPBKDF2(BCRYPT_ALG_HANDLE hPrf, PUCHAR pbPassword, ULONG cbPassword, PUCHAR pbSalt, + ULONG cbSalt, ULONGLONG cIterations, PUCHAR pbDerivedKey, ULONG cbDerivedKey, ULONG dwFlags); + + NTSTATUS BCryptGenRandom(BCRYPT_ALG_HANDLE hAlgorithm, PUCHAR pbBuffer, ULONG cbBuffer, ULONG dwFlags); + + NTSTATUS BCryptGenerateKeyPair(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE *phKey, ULONG dwLength, + ULONG dwFlags); + NTSTATUS BCryptFinalizeKeyPair(BCRYPT_KEY_HANDLE hKey, ULONG dwFlags); + NTSTATUS BCryptExportKey(BCRYPT_KEY_HANDLE hKey, BCRYPT_KEY_HANDLE hExportKey, LPCWSTR pszBlobType, + PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); +""") + + +try: + bcrypt = ffi.dlopen('bcrypt.dll') + register_ffi(bcrypt, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('bcrypt.dll could not be found - Windows XP and Server 2003 are not supported') + raise diff --git a/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py new file mode 100644 index 0000000..3952682 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py @@ -0,0 +1,188 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'crypt32', + 'get_error', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +if sys.maxsize > 2 ** 32: + ffi.cdef("typedef uint64_t ULONG_PTR;") +else: + ffi.cdef("typedef unsigned long ULONG_PTR;") +ffi.cdef(""" + typedef HANDLE HCERTSTORE; + typedef unsigned char *PBYTE; + + + typedef struct _CRYPTOAPI_BLOB { + DWORD cbData; + PBYTE pbData; + } CRYPTOAPI_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_INTEGER_BLOB; + typedef CRYPTOAPI_BLOB CERT_NAME_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_BIT_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_OBJID_BLOB; + + typedef struct _CRYPT_ALGORITHM_IDENTIFIER { + LPSTR pszObjId; + CRYPT_OBJID_BLOB Parameters; + } CRYPT_ALGORITHM_IDENTIFIER; + + typedef struct _FILETIME { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } FILETIME; + + typedef struct _CERT_PUBLIC_KEY_INFO { + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_BIT_BLOB PublicKey; + } CERT_PUBLIC_KEY_INFO; + + typedef struct _CERT_EXTENSION { + LPSTR pszObjId; + BOOL fCritical; + CRYPT_OBJID_BLOB Value; + } CERT_EXTENSION, *PCERT_EXTENSION; + + typedef struct _CERT_INFO { + DWORD dwVersion; + CRYPT_INTEGER_BLOB SerialNumber; + CRYPT_ALGORITHM_IDENTIFIER SignatureAlgorithm; + CERT_NAME_BLOB Issuer; + FILETIME NotBefore; + FILETIME NotAfter; + CERT_NAME_BLOB Subject; + CERT_PUBLIC_KEY_INFO SubjectPublicKeyInfo; + CRYPT_BIT_BLOB IssuerUniqueId; + CRYPT_BIT_BLOB SubjectUniqueId; + DWORD cExtension; + PCERT_EXTENSION *rgExtension; + } CERT_INFO, *PCERT_INFO; + + typedef struct _CERT_CONTEXT { + DWORD dwCertEncodingType; + PBYTE pbCertEncoded; + DWORD cbCertEncoded; + PCERT_INFO pCertInfo; + HCERTSTORE hCertStore; + } CERT_CONTEXT, *PCERT_CONTEXT; + + typedef struct _CERT_TRUST_STATUS { + DWORD dwErrorStatus; + DWORD dwInfoStatus; + } CERT_TRUST_STATUS, *PCERT_TRUST_STATUS; + + typedef struct _CERT_ENHKEY_USAGE { + DWORD cUsageIdentifier; + LPSTR *rgpszUsageIdentifier; + } CERT_ENHKEY_USAGE, *PCERT_ENHKEY_USAGE; + + typedef struct _CERT_CHAIN_ELEMENT { + DWORD cbSize; + PCERT_CONTEXT pCertContext; + CERT_TRUST_STATUS TrustStatus; + void *pRevocationInfo; + PCERT_ENHKEY_USAGE pIssuanceUsage; + PCERT_ENHKEY_USAGE pApplicationUsage; + LPCWSTR pwszExtendedErrorInfo; + } CERT_CHAIN_ELEMENT, *PCERT_CHAIN_ELEMENT; + + typedef struct _CERT_SIMPLE_CHAIN { + DWORD cbSize; + CERT_TRUST_STATUS TrustStatus; + DWORD cElement; + PCERT_CHAIN_ELEMENT *rgpElement; + void *pTrustListInfo; + BOOL fHasRevocationFreshnessTime; + DWORD dwRevocationFreshnessTime; + } CERT_SIMPLE_CHAIN, *PCERT_SIMPLE_CHAIN; + + typedef struct _CERT_CHAIN_CONTEXT { + DWORD cbSize; + CERT_TRUST_STATUS TrustStatus; + DWORD cChain; + PCERT_SIMPLE_CHAIN *rgpChain; + DWORD cLowerQualityChainContext; + void *rgpLowerQualityChainContext; + BOOL fHasRevocationFreshnessTime; + DWORD dwRevocationFreshnessTime; + } CERT_CHAIN_CONTEXT, *PCERT_CHAIN_CONTEXT; + + typedef struct _CERT_USAGE_MATCH { + DWORD dwType; + CERT_ENHKEY_USAGE Usage; + } CERT_USAGE_MATCH; + + typedef struct _CERT_CHAIN_PARA { + DWORD cbSize; + CERT_USAGE_MATCH RequestedUsage; + } CERT_CHAIN_PARA; + + typedef struct _CERT_CHAIN_POLICY_PARA { + DWORD cbSize; + DWORD dwFlags; + void *pvExtraPolicyPara; + } CERT_CHAIN_POLICY_PARA; + + typedef struct _HTTPSPolicyCallbackData { + DWORD cbSize; + DWORD dwAuthType; + DWORD fdwChecks; + WCHAR *pwszServerName; + } SSL_EXTRA_CERT_CHAIN_POLICY_PARA; + + typedef struct _CERT_CHAIN_POLICY_STATUS { + DWORD cbSize; + DWORD dwError; + LONG lChainIndex; + LONG lElementIndex; + void *pvExtraPolicyStatus; + } CERT_CHAIN_POLICY_STATUS; + + typedef HANDLE HCERTCHAINENGINE; + typedef HANDLE HCRYPTPROV; + + HCERTSTORE CertOpenStore(LPCSTR lpszStoreProvider, DWORD dwMsgAndCertEncodingType, HCRYPTPROV hCryptProv, + DWORD dwFlags, void *pvPara); + BOOL CertAddEncodedCertificateToStore(HCERTSTORE hCertStore, DWORD dwCertEncodingType, BYTE *pbCertEncoded, + DWORD cbCertEncoded, DWORD dwAddDisposition, PCERT_CONTEXT *ppCertContext); + BOOL CertGetCertificateChain(HCERTCHAINENGINE hChainEngine, CERT_CONTEXT *pCertContext, FILETIME *pTime, + HCERTSTORE hAdditionalStore, CERT_CHAIN_PARA *pChainPara, DWORD dwFlags, void *pvReserved, + PCERT_CHAIN_CONTEXT *ppChainContext); + BOOL CertVerifyCertificateChainPolicy(ULONG_PTR pszPolicyOID, PCERT_CHAIN_CONTEXT pChainContext, + CERT_CHAIN_POLICY_PARA *pPolicyPara, CERT_CHAIN_POLICY_STATUS *pPolicyStatus); + void CertFreeCertificateChain(PCERT_CHAIN_CONTEXT pChainContext); + + HCERTSTORE CertOpenSystemStoreW(HANDLE hprov, LPCWSTR szSubsystemProtocol); + PCERT_CONTEXT CertEnumCertificatesInStore(HCERTSTORE hCertStore, CERT_CONTEXT *pPrevCertContext); + BOOL CertCloseStore(HCERTSTORE hCertStore, DWORD dwFlags); + BOOL CertGetEnhancedKeyUsage(CERT_CONTEXT *pCertContext, DWORD dwFlags, CERT_ENHKEY_USAGE *pUsage, DWORD *pcbUsage); +""") + + +try: + crypt32 = ffi.dlopen('crypt32.dll') + register_ffi(crypt32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('crypt32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py new file mode 100644 index 0000000..1ddbae5 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py @@ -0,0 +1,44 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'get_error', + 'kernel32', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +ffi.cdef(""" + typedef long long LARGE_INTEGER; + BOOL QueryPerformanceCounter(LARGE_INTEGER *lpPerformanceCount); + + typedef struct _FILETIME { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } FILETIME; + + void GetSystemTimeAsFileTime(FILETIME *lpSystemTimeAsFileTime); +""") + + +try: + kernel32 = ffi.dlopen('kernel32.dll') + register_ffi(kernel32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('kernel32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py new file mode 100644 index 0000000..2d80e2b --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py @@ -0,0 +1,129 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'get_error', + 'secur32', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +if sys.maxsize > 2 ** 32: + ffi.cdef("typedef uint64_t ULONG_PTR;") +else: + ffi.cdef("typedef unsigned long ULONG_PTR;") +ffi.cdef(""" + typedef HANDLE HCERTSTORE; + typedef unsigned int ALG_ID; + typedef WCHAR SEC_WCHAR; + typedef unsigned long SECURITY_STATUS; + typedef void *LUID; + typedef void *SEC_GET_KEY_FN; + + typedef struct _SecHandle { + ULONG_PTR dwLower; + ULONG_PTR dwUpper; + } SecHandle; + typedef SecHandle CredHandle; + typedef SecHandle CtxtHandle; + + typedef struct _SCHANNEL_CRED { + DWORD dwVersion; + DWORD cCreds; + void *paCred; + HCERTSTORE hRootStore; + DWORD cMappers; + void **aphMappers; + DWORD cSupportedAlgs; + ALG_ID *palgSupportedAlgs; + DWORD grbitEnabledProtocols; + DWORD dwMinimumCipherStrength; + DWORD dwMaximumCipherStrength; + DWORD dwSessionLifespan; + DWORD dwFlags; + DWORD dwCredFormat; + } SCHANNEL_CRED; + + typedef struct _TimeStamp { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } TimeStamp; + + typedef struct _SecBuffer { + ULONG cbBuffer; + ULONG BufferType; + BYTE *pvBuffer; + } SecBuffer; + + typedef struct _SecBufferDesc { + ULONG ulVersion; + ULONG cBuffers; + SecBuffer *pBuffers; + } SecBufferDesc; + + typedef struct _SecPkgContext_StreamSizes { + ULONG cbHeader; + ULONG cbTrailer; + ULONG cbMaximumMessage; + ULONG cBuffers; + ULONG cbBlockSize; + } SecPkgContext_StreamSizes; + + typedef struct _CERT_CONTEXT { + DWORD dwCertEncodingType; + BYTE *pbCertEncoded; + DWORD cbCertEncoded; + void *pCertInfo; + HCERTSTORE hCertStore; + } CERT_CONTEXT; + + typedef struct _SecPkgContext_ConnectionInfo { + DWORD dwProtocol; + ALG_ID aiCipher; + DWORD dwCipherStrength; + ALG_ID aiHash; + DWORD dwHashStrength; + ALG_ID aiExch; + DWORD dwExchStrength; + } SecPkgContext_ConnectionInfo; + + SECURITY_STATUS AcquireCredentialsHandleW(SEC_WCHAR *pszPrincipal, SEC_WCHAR *pszPackage, ULONG fCredentialUse, + LUID *pvLogonID, void *pAuthData, SEC_GET_KEY_FN pGetKeyFn, void *pvGetKeyArgument, + CredHandle *phCredential, TimeStamp *ptsExpiry); + SECURITY_STATUS FreeCredentialsHandle(CredHandle *phCredential); + SECURITY_STATUS InitializeSecurityContextW(CredHandle *phCredential, CtxtHandle *phContext, + SEC_WCHAR *pszTargetName, ULONG fContextReq, ULONG Reserved1, ULONG TargetDataRep, + SecBufferDesc *pInput, ULONG Reserved2, CtxtHandle *phNewContext, SecBufferDesc *pOutput, + ULONG *pfContextAttr, TimeStamp *ptsExpiry); + SECURITY_STATUS FreeContextBuffer(void *pvContextBuffer); + SECURITY_STATUS ApplyControlToken(CtxtHandle *phContext, SecBufferDesc *pInput); + SECURITY_STATUS DeleteSecurityContext(CtxtHandle *phContext); + SECURITY_STATUS QueryContextAttributesW(CtxtHandle *phContext, ULONG ulAttribute, void *pBuffer); + SECURITY_STATUS EncryptMessage(CtxtHandle *phContext, ULONG fQOP, SecBufferDesc *pMessage, ULONG MessageSeqNo); + SECURITY_STATUS DecryptMessage(CtxtHandle *phContext, SecBufferDesc *pMessage, ULONG MessageSeqNo, ULONG *pfQOP); +""") + + +try: + secur32 = ffi.dlopen('secur32.dll') + register_ffi(secur32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('secur32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/asymmetric.py b/app/lib/package_control/deps/oscrypto/_win/asymmetric.py index 9e6eb46..dc985b0 100644 --- a/app/lib/package_control/deps/oscrypto/_win/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_win/asymmetric.py @@ -567,6 +567,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -1650,8 +1652,10 @@ def _advapi32_load_key(key_object, key_info, container): key_type = 'public' if isinstance(key_info, PublicKeyInfo) else 'private' algo = key_info.algorithm + if algo == 'rsassa_pss': + algo = 'rsa' - if algo == 'rsa': + if algo == 'rsa' or algo == 'rsassa_pss': provider = Advapi32Const.MS_ENH_RSA_AES_PROV else: provider = Advapi32Const.MS_ENH_DSS_DH_PROV @@ -1844,6 +1848,8 @@ def _bcrypt_load_key(key_object, key_info, container, curve_name): key_type = 'public' if isinstance(key_info, PublicKeyInfo) else 'private' algo = key_info.algorithm + if algo == 'rsassa_pss': + algo = 'rsa' try: alg_selector = key_info.curve[1] if algo == 'ec' else algo @@ -2282,7 +2288,9 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): OSError - when an error is returned by the OS crypto library """ - if certificate_or_public_key.algorithm != 'rsa': + cp_alg = certificate_or_public_key.algorithm + + if cp_alg != 'rsa' and cp_alg != 'rsassa_pss': raise ValueError('The key specified is not an RSA public key') return _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_padding=True) @@ -2397,13 +2405,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ type_name(data) )) + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha256', 'sha384', 'sha512']) - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha256", "sha384", "sha512"' - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -2413,13 +2424,13 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ repr(hash_algorithm) )) - if certificate_or_public_key.algorithm != 'rsa' and rsa_pss_padding is not False: + if not cp_is_rsa and rsa_pss_padding is not False: raise ValueError(pretty_message( ''' PSS padding may only be used with RSA keys - signing via a %s key was requested ''', - certificate_or_public_key.algorithm.upper() + cp_alg.upper() )) if hash_algorithm == 'raw': @@ -2468,8 +2479,9 @@ def _advapi32_verify(certificate_or_public_key, signature, data, hash_algorithm, """ algo = certificate_or_public_key.algorithm + algo_is_rsa = algo == 'rsa' or algo == 'rsassa_pss' - if algo == 'rsa' and rsa_pss_padding: + if algo_is_rsa and rsa_pss_padding: hash_length = { 'sha1': 20, 'sha224': 28, @@ -2483,7 +2495,7 @@ def _advapi32_verify(certificate_or_public_key, signature, data, hash_algorithm, raise SignatureError('Signature is invalid') return - if algo == 'rsa' and hash_algorithm == 'raw': + if algo_is_rsa and hash_algorithm == 'raw': padded_plaintext = raw_rsa_public_crypt(certificate_or_public_key, signature) try: plaintext = remove_pkcs1v15_signature_padding(certificate_or_public_key.byte_size, padded_plaintext) @@ -2591,7 +2603,10 @@ def _bcrypt_verify(certificate_or_public_key, signature, data, hash_algorithm, r padding_info = null() flags = 0 - if certificate_or_public_key.algorithm == 'rsa': + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + + if cp_is_rsa: if rsa_pss_padding: flags = BcryptConst.BCRYPT_PAD_PSS padding_info_struct_pointer = struct(bcrypt, 'BCRYPT_PSS_PADDING_INFO') @@ -2694,7 +2709,9 @@ def rsa_pss_sign(private_key, data, hash_algorithm): A byte string of the signature """ - if private_key.algorithm != 'rsa': + pkey_alg = private_key.algorithm + + if pkey_alg != 'rsa' and pkey_alg != 'rsassa_pss': raise ValueError('The key specified is not an RSA private key') return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) @@ -2797,13 +2814,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): type_name(data) )) + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha256', 'sha384', 'sha512']) if private_key.algorithm == 'rsa' and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha256", "sha384", "sha512"' - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -2813,13 +2833,13 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): repr(hash_algorithm) )) - if private_key.algorithm != 'rsa' and rsa_pss_padding is not False: + if not pkey_is_rsa and rsa_pss_padding is not False: raise ValueError(pretty_message( ''' PSS padding may only be used with RSA keys - signing via a %s key was requested ''', - private_key.algorithm.upper() + pkey_alg.upper() )) if hash_algorithm == 'raw': @@ -2867,12 +2887,13 @@ def _advapi32_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): """ algo = private_key.algorithm + algo_is_rsa = algo == 'rsa' or algo == 'rsassa_pss' - if algo == 'rsa' and hash_algorithm == 'raw': + if algo_is_rsa and hash_algorithm == 'raw': padded_data = add_pkcs1v15_signature_padding(private_key.byte_size, data) return raw_rsa_private_crypt(private_key, padded_data) - if algo == 'rsa' and rsa_pss_padding: + if algo_is_rsa and rsa_pss_padding: hash_length = { 'sha1': 20, 'sha224': 28, @@ -3003,7 +3024,10 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): padding_info = null() flags = 0 - if private_key.algorithm == 'rsa': + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + + if pkey_is_rsa: if rsa_pss_padding: hash_length = { 'md5': 16, @@ -3032,7 +3056,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): padding_info_struct.pszAlgId = cast(bcrypt, 'wchar_t *', hash_buffer) padding_info = cast(bcrypt, 'void *', padding_info_struct_pointer) - if private_key.algorithm == 'dsa' and private_key.bit_size > 1024 and hash_algorithm in set(['md5', 'sha1']): + if pkey_alg == 'dsa' and private_key.bit_size > 1024 and hash_algorithm in set(['md5', 'sha1']): raise ValueError(pretty_message( ''' Windows does not support sha1 signatures with DSA keys based on @@ -3056,7 +3080,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): buffer_len = deref(out_len) buffer = buffer_from_bytes(buffer_len) - if private_key.algorithm == 'rsa': + if pkey_is_rsa: padding_info = cast(bcrypt, 'void *', padding_info_struct_pointer) res = bcrypt.BCryptSignHash( @@ -3072,7 +3096,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_error(res) signature = bytes_from_buffer(buffer, deref(out_len)) - if private_key.algorithm != 'rsa': + if not pkey_is_rsa: # Windows doesn't use the ASN.1 Sequence for DSA/ECDSA signatures, # so we have to convert it here for the verification to work signature = DSASignature.from_p1363(signature).dump() diff --git a/app/lib/package_control/deps/oscrypto/_win/symmetric.py b/app/lib/package_control/deps/oscrypto/_win/symmetric.py index e93cd13..ff23109 100644 --- a/app/lib/package_control/deps/oscrypto/_win/symmetric.py +++ b/app/lib/package_control/deps/oscrypto/_win/symmetric.py @@ -790,8 +790,8 @@ def _encrypt(cipher, key, data, iv, padding): if cipher != 'rc4' and not padding: # AES in CBC mode can be allowed with no padding if - # the data is an exact multiple of the key size - if not (cipher == 'aes' and padding is False and len(data) % len(key) == 0): + # the data is an exact multiple of the block size + if not (cipher == 'aes' and len(data) % 16 == 0): raise ValueError('padding must be specified') if _backend == 'winlegacy': @@ -1014,7 +1014,7 @@ def _decrypt(cipher, key, data, iv, padding): type_name(iv) )) - if cipher != 'rc4' and padding is None: + if cipher not in set(['rc4', 'aes']) and not padding: raise ValueError('padding must be specified') if _backend == 'winlegacy': diff --git a/app/lib/package_control/deps/oscrypto/_win/tls.py b/app/lib/package_control/deps/oscrypto/_win/tls.py index fcb3166..23e2027 100644 --- a/app/lib/package_control/deps/oscrypto/_win/tls.py +++ b/app/lib/package_control/deps/oscrypto/_win/tls.py @@ -388,7 +388,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to diff --git a/app/lib/package_control/deps/oscrypto/version.py b/app/lib/package_control/deps/oscrypto/version.py index 85da143..b7c352c 100644 --- a/app/lib/package_control/deps/oscrypto/version.py +++ b/app/lib/package_control/deps/oscrypto/version.py @@ -2,5 +2,5 @@ from __future__ import unicode_literals, division, absolute_import, print_function -__version__ = '1.2.1' -__version_info__ = (1, 2, 1) +__version__ = '1.3.0' +__version_info__ = (1, 3, 0) diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index a4ab569..0b04a36 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -1,52 +1,87 @@ import re import socket -from threading import Lock, Timer -from contextlib import contextmanager import sys +from threading import Lock, Timer from urllib.parse import urlparse from . import __version__ - -from .show_error import show_error -from .console_write import console_write -from .cache import set_cache, get_cache from . import text +from .cache import set_cache, get_cache +from .console_write import console_write +from .show_error import show_error from .downloaders import DOWNLOADERS -from .downloaders.urllib_downloader import UrlLibDownloader from .downloaders.binary_not_found_error import BinaryNotFoundError +from .downloaders.downloader_exception import DownloaderException +from .downloaders.oscrypto_downloader_exception import OscryptoDownloaderException from .downloaders.rate_limit_exception import RateLimitException from .downloaders.rate_limit_exception import RateLimitSkipException -from .downloaders.downloader_exception import DownloaderException +from .downloaders.urllib_downloader import UrlLibDownloader from .downloaders.win_downloader_exception import WinDownloaderException -from .downloaders.oscrypto_downloader_exception import OscryptoDownloaderException from .http_cache import HttpCache -# A dict of domains - each points to a list of downloaders _managers = {} +"""A dict of domains - each points to a list of downloaders""" -# How many managers are currently checked out _in_use = 0 +"""How many managers are currently checked out""" -# Make sure connection management doesn't run into threading issues _lock = Lock() +"""Make sure connection management doesn't run into threading issues""" -# A timer used to disconnect all managers after a period of no usage _timer = None +"""A timer used to disconnect all managers after a period of no usage""" + + +def http_get(url, settings, error_message='', prefer_cached=False): + """ + Performs a HTTP GET request using best matching downloader. + + :param url: + The string URL to download + + :param settings: + The dictionary with downloader settings. + + - ``debug`` + - ``downloader_precedence`` + - ``http_basic_auth`` + - ``http_cache`` + - ``http_cache_length`` + - ``http_proxy`` + - ``https_proxy`` + - ``proxy_username`` + - ``proxy_password`` + - ``user_agent`` + - ``timeout`` + :param error_message: + The error message to include if the download fails + + :param prefer_cached: + If cached version of the URL content is preferred over a new request + + :raises: + DownloaderException: if there was an error downloading the URL + + :return: + The string contents of the URL + """ + + manager = None + result = None -@contextmanager -def downloader(url, settings): try: - manager = None manager = _grab(url, settings) - yield manager + result = manager.fetch(url, error_message, prefer_cached) finally: if manager: _release(url, manager) + return result + def _grab(url, settings): global _managers, _lock, _in_use, _timer @@ -75,7 +110,10 @@ def _release(url, manager): global _managers, _lock, _in_use, _timer with _lock: - hostname = urlparse(url).hostname.lower() + parsed = urlparse(url) + if not parsed or not parsed.hostname: + raise DownloaderException('The URL "%s" is malformed' % url) + hostname = parsed.hostname.lower() # This means the package was reloaded between _grab and _release, # so the downloader is using old code and we want to discard it @@ -103,7 +141,7 @@ def close_all_connections(): _timer.cancel() _timer = None - for domain, managers in _managers.items(): + for managers in _managers.values(): for manager in managers: manager.close() _managers = {} @@ -154,11 +192,29 @@ def __init__(self, settings): # Cache the downloader for re-use self.downloader = None - user_agent = settings.get('user_agent') - if user_agent and user_agent.find('%s') != -1: - settings['user_agent'] = user_agent % __version__ - - self.settings = settings + keys_to_copy = { + 'debug', + 'downloader_precedence', + 'http_basic_auth', + 'http_proxy', + 'https_proxy', + 'proxy_username', + 'proxy_password', + 'user_agent', + 'timeout', + } + + # Copy required settings to avoid manipulating caller's environment. + # It's needed as e.g. `cache_length` is defined with different meaning in PackageManager's + # settings. Also `cache` object shouldn't be propagated to caller. + self.settings = {key: value for key, value in settings.items() if key in keys_to_copy} + + # add package control version to user agent + user_agent = self.settings.get('user_agent') + if user_agent and '%s' in user_agent: + self.settings['user_agent'] = user_agent % __version__ + + # setup private http cache storage driver if settings.get('http_cache'): cache_length = settings.get('http_cache_length', 604800) self.settings['cache'] = HttpCache(cache_length) @@ -205,7 +261,7 @@ def fetch(self, url, error_message, prefer_cached=False): downloader_precedence = self.settings.get( 'downloader_precedence', { - "windows": ["wininet", "oscrypto"], + "windows": ["wininet", "oscrypto", "urllib"], "osx": ["urllib", "oscrypto", "curl"], "linux": ["urllib", "oscrypto", "curl", "wget"] } @@ -227,15 +283,15 @@ def fetch(self, url, error_message, prefer_cached=False): if not self.downloader or ( (is_ssl and not self.downloader.supports_ssl()) or (not is_ssl and not self.downloader.supports_plaintext())): + for downloader_name in downloader_list: - if downloader_name not in DOWNLOADERS: - # We ignore oscrypto not being present on Linux since it - # can't be used with on Linux with Sublime Text 3 - if sys.version_info[:2] == (3, 3) and \ - sys.platform == 'linux' and \ - downloader_name == 'oscrypto': + try: + downloader_class = DOWNLOADERS[downloader_name] + if downloader_class is None: continue + + except KeyError: error_string = text.format( ''' The downloader "%s" from the "downloader_precedence" @@ -247,14 +303,15 @@ def fetch(self, url, error_message, prefer_cached=False): raise DownloaderException(error_string) try: - downloader = DOWNLOADERS[downloader_name](self.settings) + downloader = downloader_class(self.settings) if is_ssl and not downloader.supports_ssl(): continue if not is_ssl and not downloader.supports_plaintext(): continue self.downloader = downloader break - except (BinaryNotFoundError): + + except BinaryNotFoundError: pass if not self.downloader: @@ -275,9 +332,11 @@ def fetch(self, url, error_message, prefer_cached=False): raise DownloaderException(error_string.replace('\n\n', ' ')) url = url.replace(' ', '%20') - hostname = urlparse(url).hostname - if hostname: - hostname = hostname.lower() + parsed = urlparse(url) + if not parsed or not parsed.hostname: + raise DownloaderException('The URL "%s" is malformed' % url) + hostname = parsed.hostname.lower() + timeout = self.settings.get('timeout', 3) rate_limited_domains = get_cache('rate_limited_domains', []) @@ -328,16 +387,18 @@ def fetch(self, url, error_message, prefer_cached=False): return self.downloader.download(url, error_message, timeout, 3, prefer_cached) except (RateLimitException) as e: - rate_limited_domains.append(hostname) - set_cache('rate_limited_domains', rate_limited_domains, self.settings.get('cache_length')) + set_cache( + 'rate_limited_domains', + rate_limited_domains, + self.settings.get('cache_length', 604800) + ) console_write( ''' - Hit rate limit of %s for %s. Skipping all futher download - requests for this domain. + %s Skipping all further download requests for this domain. ''', - (e.limit, e.domain) + str(e) ) raise @@ -354,12 +415,11 @@ def fetch(self, url, error_message, prefer_cached=False): return self.fetch(url, error_message, prefer_cached) except (WinDownloaderException) as e: - console_write( ''' Attempting to use Urllib downloader due to WinINet error: %s ''', - e + str(e) ) # Here we grab the proxy info extracted from WinInet to fill in diff --git a/app/lib/package_control/downloaders/__init__.py b/app/lib/package_control/downloaders/__init__.py index 91d763c..5101fa0 100644 --- a/app/lib/package_control/downloaders/__init__.py +++ b/app/lib/package_control/downloaders/__init__.py @@ -1,10 +1,13 @@ import sys +from ..console_write import console_write + from .urllib_downloader import UrlLibDownloader from .curl_downloader import CurlDownloader from .wget_downloader import WgetDownloader DOWNLOADERS = { + 'oscrypto': None, 'urllib': UrlLibDownloader, 'curl': CurlDownloader, 'wget': WgetDownloader @@ -15,11 +18,27 @@ # loaded from the operating system. On Python 3.8 we dynamically link OpenSSL, # so it just needs to be configured properly, which is handled in # oscrypto_downloader.py. -if sys.platform != 'linux' or sys.version_info[:2] != (3, 3) or \ - sys.executable != 'python3': - from .oscrypto_downloader import OscryptoDownloader - DOWNLOADERS['oscrypto'] = OscryptoDownloader +if sys.platform != 'linux' or sys.version_info[:2] != (3, 3) or sys.executable != 'python3': + try: + from .oscrypto_downloader import OscryptoDownloader + DOWNLOADERS['oscrypto'] = OscryptoDownloader + except Exception as e: + console_write( + ''' + OscryptoDownloader not available! %s + ''', + str(e) + ) if sys.platform == 'win32': - from .wininet_downloader import WinINetDownloader - DOWNLOADERS['wininet'] = WinINetDownloader + try: + from .wininet_downloader import WinINetDownloader + DOWNLOADERS['wininet'] = WinINetDownloader + except Exception as e: + DOWNLOADERS['wininet'] = None + console_write( + ''' + WinINetDownloader not available! %s + ''', + str(e) + ) diff --git a/app/lib/package_control/downloaders/rate_limit_exception.py b/app/lib/package_control/downloaders/rate_limit_exception.py index 0f506ee..6d9ce3d 100644 --- a/app/lib/package_control/downloaders/rate_limit_exception.py +++ b/app/lib/package_control/downloaders/rate_limit_exception.py @@ -12,7 +12,7 @@ def __init__(self, domain, limit): self.limit = limit def __str__(self): - return 'Rate limit of %s exceeded for %s' % (self.limit, self.domain) + return 'Hit rate limit of %s for %s.' % (self.limit, self.domain) class RateLimitSkipException(DownloaderException): @@ -25,4 +25,4 @@ def __init__(self, domain): self.domain = domain def __str__(self): - return 'Skipping due to hitting rate limit for %s' % self.domain + return 'Skipping %s due to rate limit.' % self.domain diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py index a775d21..53dbe77 100644 --- a/app/lib/package_control/providers/channel_provider.py +++ b/app/lib/package_control/providers/channel_provider.py @@ -4,7 +4,7 @@ from urllib.parse import urljoin from ..console_write import console_write -from ..download_manager import downloader, update_url +from ..download_manager import http_get, update_url from ..versions import version_sort from .provider_exception import ProviderException from .schema_compat import platforms_to_releases @@ -92,8 +92,7 @@ def fetch(self): return if re.match(r'https?://', self.channel_url, re.I): - with downloader(self.channel_url, self.settings) as manager: - json_string = manager.fetch(self.channel_url, 'Error downloading channel.') + json_string = http_get(self.channel_url, self.settings, 'Error downloading channel.') # All other channels are expected to be filesystem paths else: diff --git a/app/lib/package_control/providers/release_selector.py b/app/lib/package_control/providers/release_selector.py deleted file mode 100644 index e55c191..0000000 --- a/app/lib/package_control/providers/release_selector.py +++ /dev/null @@ -1,82 +0,0 @@ -import re -import sublime - -from ..versions import version_exclude_prerelease - - -def filter_releases(package, settings, releases): - """ - Returns all releases in the list of releases that are compatible with - the current platform and version of Sublime Text - - :param package: - The name of the package - - :param settings: - A dict optionally containing the `install_prereleases` key - - :param releases: - A list of release dicts - - :return: - A list of release dicts - """ - - platform_selectors = [ - sublime.platform() + '-' + sublime.arch(), - sublime.platform(), - '*' - ] - st_version = int(sublime.version()) - - install_prereleases = settings.get('install_prereleases') - allow_prereleases = install_prereleases is True - if not allow_prereleases and isinstance(install_prereleases, list) and package in install_prereleases: - allow_prereleases = True - - if not allow_prereleases: - releases = version_exclude_prerelease(releases) - - output = [] - for release in releases: - platforms = release.get('platforms') or ['*'] - if not isinstance(platforms, list): - platforms = [platforms] - - if not any(selector in platforms for selector in platform_selectors): - continue - - # Default to '*' (for legacy reasons), see #604 - if not is_compatible_version(release.get('sublime_text') or '*', st_version): - continue - - output.append(release) - - return output - - -def is_compatible_version(version_range, st_version): - if version_range == '*': - return True - - gt_match = re.match(r'>(\d{4})$', version_range) - if gt_match: - return st_version > int(gt_match.group(1)) - - ge_match = re.match(r'>=(\d{4})$', version_range) - if ge_match: - return st_version >= int(ge_match.group(1)) - - lt_match = re.match(r'<(\d{4})$', version_range) - if lt_match: - return st_version < int(lt_match.group(1)) - - le_match = re.match(r'<=(\d{4})$', version_range) - if le_match: - return st_version <= int(le_match.group(1)) - - range_match = re.match(r'(\d{4}) - (\d{4})$', version_range) - if range_match: - return st_version >= int(range_match.group(1)) and st_version <= int(range_match.group(2)) - - return None diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index 0fa194e..ee5593a 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -10,7 +10,7 @@ from ..clients.github_client import GitHubClient from ..clients.gitlab_client import GitLabClient from ..console_write import console_write -from ..download_manager import downloader, update_url +from ..download_manager import http_get, update_url from ..downloaders.downloader_exception import DownloaderException from ..versions import version_sort from .base_repository_provider import BaseRepositoryProvider @@ -157,8 +157,7 @@ def fetch_json(self, location): """ if re.match(r'https?://', location, re.I): - with downloader(location, self.settings) as manager: - json_string = manager.fetch(location, 'Error downloading repository.') + json_string = http_get(location, self.settings, 'Error downloading repository.') # Anything that is not a URL is expected to be a filesystem path else: From 05d8fbf6234d227d5833db3f5062392fe4c93147 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Tue, 30 Aug 2022 19:13:01 +0200 Subject: [PATCH 10/39] Adjustments for new http_get() --- app/lib/readme_images.py | 57 +++++++++++++++++++-------------------- app/lib/run_repo_tests.py | 19 +++++++------ 2 files changed, 37 insertions(+), 39 deletions(-) diff --git a/app/lib/readme_images.py b/app/lib/readme_images.py index a2ea0bc..a054a4c 100644 --- a/app/lib/readme_images.py +++ b/app/lib/readme_images.py @@ -3,7 +3,7 @@ import hashlib from urllib.parse import urlparse -from .package_control.download_manager import downloader +from .package_control.download_manager import http_get from .package_control.downloaders.downloader_exception import DownloaderException @@ -14,39 +14,38 @@ def cache(settings, rendered_html): urls = re.findall(' 10 and data[6:10] in [b'JFIF', b'Exif']) or (length > 24 and data[0:4] == b'\xFF\xD8\xFF\xED' and data[20:24] == b'8BIM'): - ext = '.jpg' - elif data[0:128].find(b']+>' - rendered_html = re.sub(regex, '', rendered_html) + data = http_get(url, settings, 'fetching readme image') + + # Detect file extension by file contents + if ext == '': + length = len(data) + + if data[0:8] == b'\x89PNG\r\n\x1A\n': + ext = '.png' + elif data[0:6] == b'GIF87a' or data[0:6] == b'GIF89a': + ext = '.gif' + elif (length > 10 and data[6:10] in [b'JFIF', b'Exif']) or (length > 24 and data[0:4] == b'\xFF\xD8\xFF\xED' and data[20:24] == b'8BIM'): + ext = '.jpg' + elif data[0:128].find(b']+>' + rendered_html = re.sub(regex, '', rendered_html) return rendered_html diff --git a/app/lib/run_repo_tests.py b/app/lib/run_repo_tests.py index b74f6a5..388e29e 100644 --- a/app/lib/run_repo_tests.py +++ b/app/lib/run_repo_tests.py @@ -12,7 +12,7 @@ import imp from .package_control.providers import RepositoryProvider -from .package_control.download_manager import downloader, close_all_connections +from .package_control.download_manager import close_all_connections, http_get from .package_control.downloaders.downloader_exception import DownloaderException from .. import config from .st_package_reviewer.check import file as file_checkers @@ -137,9 +137,9 @@ def run_tests(spec): tmp_package_path = os.path.join(tmpdir, '%s.sublime-package' % name) tmp_package_dir = os.path.join(tmpdir, name) os.mkdir(tmp_package_dir) - with open(tmp_package_path, 'wb') as package_file, downloader(url, settings) as manager: + with open(tmp_package_path, 'wb') as package_file: try: - package_file.write(manager.fetch(url, 'fetching package')) + package_file.write(http_get(url, settings, 'fetching package')) except DownloaderException as e: errors.append(format_report(str(e))) return build_result(errors, warnings) @@ -509,13 +509,12 @@ def test_pull_request(pr): output.append(' - ERROR: External repositories added to the default channel must be served over HTTPS') # Continue with testing regardless - with downloader(repo, settings) as manager: - try: - raw_data = manager.fetch(repo, 'fetching repository') - except DownloaderException as e: - errors = True - output.append(' - ERROR: %s' % str(e)) - continue + try: + raw_data = http_get(repo, settings, 'fetching repository') + except DownloaderException as e: + errors = True + output.append(' - ERROR: %s' % str(e)) + continue try: raw_data = raw_data.decode('utf-8') From 5ad7962fc1faec7723077ed90eaff1777fa99e03 Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Tue, 30 Aug 2022 21:10:38 +0200 Subject: [PATCH 11/39] Cleanup generate_channel imports --- app/tasks/generate_channel_v3_json.py | 8 +++----- app/tasks/generate_channel_v4_json.py | 8 +++----- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/app/tasks/generate_channel_v3_json.py b/app/tasks/generate_channel_v3_json.py index 0110cb3..039874f 100644 --- a/app/tasks/generate_channel_v3_json.py +++ b/app/tasks/generate_channel_v3_json.py @@ -1,12 +1,10 @@ -import re +import bz2 +import gzip import json import os -import gzip -import bz2 -from urllib.parse import urlparse -from ..models import package, library from ..lib.json_datetime_encoder import JsonDatetimeEncoder +from ..models import package if 'PACKAGE_CONTROL_ASSETS' not in os.environ: diff --git a/app/tasks/generate_channel_v4_json.py b/app/tasks/generate_channel_v4_json.py index 58194ef..1d7fbae 100644 --- a/app/tasks/generate_channel_v4_json.py +++ b/app/tasks/generate_channel_v4_json.py @@ -1,12 +1,10 @@ -import re +import bz2 +import gzip import json import os -import gzip -import bz2 -from urllib.parse import urlparse -from ..models import package, library from ..lib.json_datetime_encoder import JsonDatetimeEncoder +from ..models import package, library if 'PACKAGE_CONTROL_ASSETS' not in os.environ: From ef7653fd400b77182137743565ae88eb76aab7de Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Tue, 30 Aug 2022 21:23:57 +0200 Subject: [PATCH 12/39] Update tasks update_package_control_lib --- app/lib/package_control/__init__.py | 4 +- app/lib/package_control/unicode.py | 148 ------------------------ app/tasks/update_package_control_lib.py | 13 +-- 3 files changed, 7 insertions(+), 158 deletions(-) delete mode 100644 app/lib/package_control/unicode.py diff --git a/app/lib/package_control/__init__.py b/app/lib/package_control/__init__.py index 4cda0f1..53b3b77 100644 --- a/app/lib/package_control/__init__.py +++ b/app/lib/package_control/__init__.py @@ -1,2 +1,2 @@ -__version__ = "3.4.1" -__version_info__ = (3, 4, 1) +__version__ = "4.0.0-dev" +__version_info__ = (4, 0, 0, 'dev') diff --git a/app/lib/package_control/unicode.py b/app/lib/package_control/unicode.py deleted file mode 100644 index 4e98c9c..0000000 --- a/app/lib/package_control/unicode.py +++ /dev/null @@ -1,148 +0,0 @@ -import locale -import sys -import tempfile -import os - -if sys.platform == 'win32': - import ctypes - -try: - str_cls = unicode -except (NameError): - str_cls = str - -# Sublime Text on OS X does not seem to report the correct encoding -# so we hard-code that to UTF-8 -_encoding = 'utf-8' if sys.platform == 'darwin' else locale.getpreferredencoding() - -_fallback_encodings = ['utf-8', 'cp1252'] - - -def unicode_from_os(e): - """ - This is needed as some exceptions coming from the OS are - already encoded and so just calling unicode(e) will result - in an UnicodeDecodeError as the string isn't in ascii form. - - :param e: - The exception to get the value of - - :return: - The unicode version of the exception message - """ - - if sys.version_info >= (3,): - return str(e) - - try: - if isinstance(e, Exception): - e = e.args[0] - - if isinstance(e, str_cls): - return e - - if isinstance(e, int): - e = str(e) - - return str_cls(e, _encoding) - - # If the "correct" encoding did not work, try some defaults, and then just - # obliterate characters that we can't seen to decode properly - except UnicodeDecodeError: - for encoding in _fallback_encodings: - try: - return str_cls(e, encoding, errors='strict') - except (Exception): - pass - return str_cls(e, errors='replace') - - -def tempfile_unicode_patch(): - """ - This function monkey-patches the tempfile module in ST2 on Windows to - properly handle non-ASCII paths from environmental variables being - used as the basis for a temp directory. - """ - - if sys.version_info >= (3,): - return - - if sys.platform != 'win32': - return - - if hasattr(tempfile._candidate_tempdir_list, 'patched'): - return - - unicode_error = False - for var in ['TMPDIR', 'TEMP', 'TMP']: - dir_ = os.getenv(var) - if not dir_: - continue - # If the path contains a non-unicode chars that is also - # non-ASCII, then this will fail - try: - dir_ + u'' - except (UnicodeDecodeError): - unicode_error = True - break - # Windows paths can not contain a ?, so this is evidence - # that a unicode deocding issue happened - if dir_.find('?') != -1: - unicode_error = True - break - - if not unicode_error: - return - - kernel32 = ctypes.windll.kernel32 - - kernel32.GetEnvironmentStringsW.argtypes = [] - kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p - - str_pointer = kernel32.GetEnvironmentStringsW() - string = ctypes.wstring_at(str_pointer) - - env_vars = {} - while string != '': - if string[0].isalpha(): - name, value = string.split(u'=', 1) - env_vars[name.encode('ascii')] = value - # Include the trailing null byte, and measure each - # char as 2 bytes since Windows uses UTF-16 for - # wide chars - str_pointer += (len(string) + 1) * 2 - - string = ctypes.wstring_at(str_pointer) - - # This is pulled from tempfile.py in Python 2.6 and patched to grab the - # temp path environmental variables as unicode from the call to - # GetEnvironmentStringsW() - def _candidate_tempdir_list(): - dirlist = [] - - # First, try the environment. - for envname in 'TMPDIR', 'TEMP', 'TMP': - dirname = env_vars.get(envname) - if dirname: - dirlist.append(dirname) - - # Failing that, try OS-specific locations. - if os.name == 'riscos': - dirname = os.getenv('Wimp$ScrapDir') - if dirname: - dirlist.append(dirname) - elif os.name == 'nt': - dirlist.extend([r'c:\temp', r'c:\tmp', r'\temp', r'\tmp']) - else: - dirlist.extend(['/tmp', '/var/tmp', '/usr/tmp']) - - # As a last resort, the current directory. - try: - dirlist.append(os.getcwd()) - except (AttributeError, os.error): - dirlist.append(os.curdir) - - return dirlist - - tempfile._candidate_tempdir_list = _candidate_tempdir_list - setattr(tempfile._candidate_tempdir_list, 'patched', True) diff --git a/app/tasks/update_package_control_lib.py b/app/tasks/update_package_control_lib.py index 59602a1..d14d452 100644 --- a/app/tasks/update_package_control_lib.py +++ b/app/tasks/update_package_control_lib.py @@ -18,21 +18,18 @@ REQUIRED_PATHS = [ + 'clients', + 'deps', + 'downloaders', + 'http', + 'providers', '__init__.py', 'ca_certs.py', 'cache.py', - 'clients', 'cmd.py', 'console_write.py', - 'deps', 'download_manager.py', - 'downloaders', - 'file_not_found_error.py', - 'http', - 'providers', - 'semver.py', 'text.py', - 'unicode.py', 'versions.py', ] From 8484fef5907e1f7ad5c60023cf988d72f15c9dcf Mon Sep 17 00:00:00 2001 From: DeathAxe Date: Sat, 3 Sep 2022 11:54:45 +0200 Subject: [PATCH 13/39] Update ChannelProvider --- app/lib/package_control/download_manager.py | 40 +- .../providers/channel_provider.py | 356 ++++++++------- .../providers/repository_provider.py | 418 ++++++++---------- .../providers/schema_compat.py | 49 +- app/models/package/find.py | 3 +- 5 files changed, 418 insertions(+), 448 deletions(-) diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index 0b04a36..05c93aa 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -1,8 +1,9 @@ +import os import re import socket import sys from threading import Lock, Timer -from urllib.parse import urlparse +from urllib.parse import urljoin, urlparse from . import __version__ from . import text @@ -147,6 +148,43 @@ def close_all_connections(): _managers = {} +def resolve_urls(root_url, uris): + """ + Convert a list of relative uri's to absolute urls/paths. + + :param root_url: + The root url string + + :param uris: + An iteratable of relative uri's to resolve. + + :returns: + A generator of resolved URLs + """ + + scheme_match = re.match(r'(https?:)//', root_url, re.I) + if scheme_match is None: + root_dir = os.path.dirname(root_url) + else: + root_dir = '' + + for url in uris: + if url.startswith('//'): + if scheme_match is not None: + url = scheme_match.group(1) + url + else: + url = 'https:' + url + elif url.startswith('/'): + # We don't allow absolute repositories + continue + elif url.startswith('./') or url.startswith('../'): + if root_dir: + url = os.path.normpath(os.path.join(root_dir, url)) + else: + url = urljoin(root_url, url) + yield url + + def update_url(url, debug): """ Takes an old, out-dated URL and updates it. Mostly used with GitHub URLs diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py index 53dbe77..8a56893 100644 --- a/app/lib/package_control/providers/channel_provider.py +++ b/app/lib/package_control/providers/channel_provider.py @@ -1,13 +1,12 @@ import json import os import re -from urllib.parse import urljoin +from itertools import chain from ..console_write import console_write -from ..download_manager import http_get, update_url +from ..download_manager import http_get, resolve_urls, update_url from ..versions import version_sort from .provider_exception import ProviderException -from .schema_compat import platforms_to_releases from .schema_compat import SchemaVersion @@ -16,7 +15,7 @@ class InvalidChannelFileException(ProviderException): def __init__(self, channel, reason_message): super().__init__( 'Channel %s does not appear to be a valid channel file because' - ' %s' % (channel.url, reason_message)) + ' %s' % (channel.channel_url, reason_message)) class ChannelProvider: @@ -113,7 +112,7 @@ def fetch(self): try: channel_info = json.loads(json_string.decode('utf-8')) - except (ValueError): + except ValueError: raise InvalidChannelFileException(self, 'parsing JSON failed.') try: @@ -123,35 +122,11 @@ def fetch(self): except ValueError as e: raise InvalidChannelFileException(self, e) - # Fix any out-dated repository URLs in the package cache - debug = self.settings.get('debug') - packages_key = 'packages_cache' if schema_version.major >= 2 else 'packages' - if packages_key in channel_info: - original_cache = channel_info[packages_key] - new_cache = {} - for repo in original_cache: - new_cache[update_url(repo, debug)] = original_cache[repo] - channel_info[packages_key] = new_cache - - self.channel_info = channel_info - self.schema_version = schema_version - - def get_name_map(self): - """ - :raises: - ProviderException: when an error occurs with the channel contents - DownloaderException: when an error occurs trying to open a URL + if 'repositories' not in channel_info: + raise InvalidChannelFileException(self, 'the "repositories" JSON key is missing.') - :return: - A dict of the mapping for URL slug -> package name - """ - - self.fetch() - - if self.schema_version.major >= 2: - return {} - - return self.channel_info.get('package_name_map', {}) + self.channel_info = self._migrate_channel_info(channel_info, schema_version) + self.schema_version = schema_version def get_renamed_packages(self): """ @@ -165,19 +140,15 @@ def get_renamed_packages(self): self.fetch() - if self.schema_version.major >= 2: - output = {} - if 'packages_cache' in self.channel_info: - for repo in self.channel_info['packages_cache']: - for package in self.channel_info['packages_cache'][repo]: - previous_names = package.get('previous_names', []) - if not isinstance(previous_names, list): - previous_names = [previous_names] - for previous_name in previous_names: - output[previous_name] = package['name'] - return output + output = {} + for package in chain(*self.channel_info.get('packages_cache', {}).values()): + previous_names = package.get('previous_names', []) + if not isinstance(previous_names, list): + previous_names = [previous_names] + for previous_name in previous_names: + output[previous_name] = package['name'] - return self.channel_info.get('renamed_packages', {}) + return output def get_repositories(self): """ @@ -191,39 +162,7 @@ def get_repositories(self): self.fetch() - if 'repositories' not in self.channel_info: - raise InvalidChannelFileException( - self, 'the "repositories" JSON key is missing.') - - # Determine a relative root so repositories can be defined - # relative to the location of the channel file. - scheme_match = re.match(r'(https?:)//', self.channel_url, re.I) - if scheme_match is None: - relative_base = os.path.dirname(self.channel_url) - is_http = False - else: - is_http = True - - debug = self.settings.get('debug') - output = [] - for repository in self.channel_info['repositories']: - if repository.startswith('//'): - if scheme_match is not None: - repository = scheme_match.group(1) + repository - else: - repository = 'https:' + repository - elif repository.startswith('/'): - # We don't allow absolute repositories - continue - elif repository.startswith('./') or repository.startswith('../'): - if is_http: - repository = urljoin(self.channel_url, repository) - else: - repository = os.path.join(relative_base, repository) - repository = os.path.normpath(repository) - output.append(update_url(repository, debug)) - - return output + return self.channel_info['repositories'] def get_sources(self): """ @@ -248,85 +187,41 @@ def get_packages(self, repo_url): DownloaderException: when an error occurs trying to open a URL :return: - A dict in the format: - { - 'Package Name': { + A generator of + ( + 'Package Name', + { 'name': name, 'description': description, 'author': author, 'homepage': homepage, + 'previous_names': [old_name, ...], + 'labels': [label, ...], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': url, 'last_modified': last modified date, 'releases': [ { - 'sublime_text': '*', - 'platforms': ['*'], + 'sublime_text': compatible version, + 'platforms': [platform name, ...], 'url': url, 'date': date, - 'version': version + 'version': version, + 'libraries': [library name, ...] }, ... - ], - 'previous_names': [old_name, ...], - 'labels': [label, ...], - 'readme': url, - 'issues': url, - 'donate': url, - 'buy': url - }, - ... - } + ] + } + ) + tuples """ self.fetch() - repo_url = update_url(repo_url, self.settings.get('debug')) - - # The 2.0 channel schema renamed the key cached package info was - # stored under in order to be more clear to new users. - packages_key = 'packages_cache' if self.schema_version.major >= 2 else 'packages' - - output = {} - for package in self.channel_info.get(packages_key, {}).get(repo_url, []): - copy = package.copy() - - # In schema version 2.0, we store a list of dicts containing info - # about all available releases. These include "version" and - # "platforms" keys that are used to pick the download for the - # current machine. - if self.schema_version.major < 2: - copy['releases'] = platforms_to_releases(copy, self.settings.get('debug')) - del copy['platforms'] - else: - last_modified = None - - for release in copy.get('releases', []): - date = release.get('date') - if not last_modified or (date and date > last_modified): - last_modified = date - - if self.schema_version.major < 4: - if 'dependencies' in release: - release['libraries'] = release['dependencies'] - del release['dependencies'] - - copy['last_modified'] = last_modified - - defaults = { - 'buy': None, - 'issues': None, - 'labels': [], - 'previous_names': [], - 'readme': None, - 'donate': None - } - for field in defaults: - if field not in copy: - copy[field] = defaults[field] - - copy['releases'] = version_sort(copy['releases'], 'platforms', reverse=True) - - output[copy['name']] = copy - - return output + for package in self.channel_info.get('packages_cache', {}).get(repo_url, []): + if package['releases']: + yield (package['name'], package) def get_libraries(self, repo_url): """ @@ -340,40 +235,177 @@ def get_libraries(self, repo_url): DownloaderException: when an error occurs trying to open a URL :return: - A dict in the format: - { - 'Library Name': { + A generator of + ( + 'Library Name', + { 'name': name, - 'load_order': two digit string, 'description': description, 'author': author, 'issues': URL, 'releases': [ { - 'sublime_text': '*', - 'platforms': ['*'], + 'sublime_text': compatible version, + 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], 'url': url, - 'date': date, 'version': version, - 'sha256': hex_hash + 'sha256': hex hash }, ... ] - }, - ... - } + } + ) + tuples """ self.fetch() - repo_url = update_url(repo_url, self.settings.get('debug')) + for library in self.channel_info.get('libraries_cache', {}).get(repo_url, []): + if library['releases']: + yield (library['name'], library) - # The 4.0.0 channel schema renamed the key cached package info was - # stored under in order to be more clear to new users. - libraries_key = 'libraries_cache' if self.schema_version.major >= 4 else 'dependencies_cache' + def get_broken_packages(self): + """ + Provide package names without releases. - output = {} - for library in self.channel_info.get(libraries_key, {}).get(repo_url, []): + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A generator of 'package names' + """ + + self.fetch() + + for package in chain(*self.channel_info.get('packages_cache', {}).values()): + if not package['releases']: + yield package['name'] + + def get_broken_libraries(self): + """ + Provide library names without releases. + + :raises: + ProviderException: when an error occurs with the channel contents + DownloaderException: when an error occurs trying to open a URL + + :return: + A generator of 'library names' + """ + + self.fetch() + + for library in chain(*self.channel_info.get('libraries_cache', {}).values()): + if not library['releases']: + yield library['name'] + + def _migrate_channel_info(self, channel_info, schema_version): + """ + Transform input channel_info to scheme version 4.0.0 + + :param channel_info: + The input channel information of any scheme version + + :param schema_version: + The schema version of the input channel information + + :returns: + channel_info object of scheme version 4.0.0 + """ + + channel_info['repositories'] = self._migrate_repositories(channel_info, schema_version) + channel_info['packages_cache'] = self._migrate_packages_cache(channel_info, schema_version) + channel_info['libraries_cache'] = self._migrate_libraries_cache(channel_info, schema_version) + return channel_info + + def _migrate_repositories(self, channel_info, schema_version): + + debug = self.settings.get('debug') + + return [ + update_url(url, debug) + for url in resolve_urls(self.channel_url, channel_info['repositories']) + ] + + def _migrate_packages_cache(self, channel_info, schema_version): + """ + Transform input packages cache to scheme version 4.0.0 + + :param channel_info: + The input channel information of any scheme version + + :param schema_version: + The schema version of the input channel information + + :returns: + packages_cache object of scheme version 4.0.0 + """ + + debug = self.settings.get('debug') + + package_cache = channel_info.get('packages_cache', {}) + + defaults = { + 'buy': None, + 'issues': None, + 'labels': [], + 'previous_names': [], + 'readme': None, + 'donate': None + } + + for package in chain(*package_cache.values()): + + for field in defaults: + if field not in package: + package[field] = defaults[field] + + releases = version_sort(package.get('releases', []), 'platforms', reverse=True) + package['releases'] = releases + package['last_modified'] = releases[0]['date'] if releases else None + + # The 4.0.0 channel schema renamed the `dependencies` key to `libraries`. + if schema_version.major < 4: + for release in package['releases']: + if 'dependencies' in release: + release['libraries'] = release.pop('dependencies') + + # Fix any out-dated repository URLs in packages cache + return {update_url(name, debug): info for name, info in package_cache.items()} + + def _migrate_libraries_cache(self, channel_info, schema_version): + """ + Transform input libraries cache to scheme version 4.0.0 + + :param channel_info: + The input channel information of any scheme version + + :param schema_version: + The schema version of the input channel information + + :returns: + libraries_cache object of scheme version 4.0.0 + """ + + debug = self.settings.get('debug') + + if schema_version.major < 4: + # The 4.0.0 channel schema renamed the key cached package info was + # stored under in order to be more clear to new users. + libraries_cache = channel_info.pop('dependencies_cache', {}) + + # The 4.0.0 channel scheme drops 'load_order' from each library + # and adds a required 'python_versions' list to each release. + for library in chain(*libraries_cache.values()): + del library['load_order'] + for release in library['releases']: + release['python_versions'] = ['3.3'] + else: + libraries_cache = channel_info.get('libraries_cache', {}) + + for library in chain(*libraries_cache.values()): library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) - output[library['name']] = library - return output + # Fix any out-dated repository URLs in libraries cache + return {update_url(name, debug): info for name, info in libraries_cache.items()} diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index ee5593a..dc3a621 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -2,7 +2,7 @@ import re import os from itertools import chain -from urllib.parse import urljoin, urlparse +from urllib.parse import urlparse from .. import text from ..clients.bitbucket_client import BitBucketClient @@ -10,12 +10,11 @@ from ..clients.github_client import GitHubClient from ..clients.gitlab_client import GitLabClient from ..console_write import console_write -from ..download_manager import http_get, update_url +from ..download_manager import http_get, resolve_urls, update_url from ..downloaders.downloader_exception import DownloaderException from ..versions import version_sort from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -from .schema_compat import platforms_to_releases from .schema_compat import SchemaVersion @@ -104,29 +103,7 @@ def fetch_repo(self): return # Allow repositories to include other repositories - scheme_match = re.match(r'(https?:)//', self.repo_url, re.I) - if scheme_match is None: - relative_base = os.path.dirname(self.repo_url) - is_http = False - else: - is_http = True - - for include in self.repo_info.pop('includes', []): - if include.startswith('//'): - if scheme_match is not None: - include = scheme_match.group(1) + include - else: - include = 'https:' + include - elif include.startswith('/'): - # We don't allow absolute includes - continue - elif include.startswith('./') or include.startswith('../'): - if is_http: - include = urljoin(self.repo_url, include) - else: - include = os.path.join(relative_base, include) - include = os.path.normpath(include) - + for include in resolve_urls(self.repo_url, self.repo_info.pop('includes', [])): include_info = self.fetch_json(include) include_version = include_info['schema_version'] if include_version != self.schema_version: @@ -210,7 +187,6 @@ def get_libraries(self, invalid_sources=None): 'Library Name', { 'name': name, - 'load_order': two digit string, 'description': description, 'author': author, 'issues': URL, @@ -462,6 +438,13 @@ def get_packages(self, invalid_sources=None): 'description': description, 'author': author, 'homepage': homepage, + 'previous_names': [old_name, ...], + 'labels': [label, ...], + 'sources': [url, ...], + 'readme': url, + 'issues': url, + 'donate': url, + 'buy': url, 'last_modified': last modified date, 'releases': [ { @@ -473,13 +456,6 @@ def get_packages(self, invalid_sources=None): 'libraries': [library name, ...] }, ... ] - 'previous_names': [old_name, ...], - 'labels': [label, ...], - 'sources': [url, ...], - 'readme': url, - 'issues': url, - 'donate': url, - 'buy': url } ) tuples @@ -502,16 +478,6 @@ def get_packages(self, invalid_sources=None): Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient) ] - # Backfill the "previous_names" keys for old schemas - previous_names = {} - if self.schema_version.major < 2: - renamed = self.get_renamed_packages() - for old_name in renamed: - new_name = renamed[old_name] - if new_name not in previous_names: - previous_names[new_name] = [] - previous_names[new_name].append(old_name) - output = {} for package in self.repo_info['packages']: info = { @@ -535,49 +501,42 @@ def get_packages(self, invalid_sources=None): if package.get(field): info[field] = package.get(field) - details = None - releases = None - - # Schema version 2.0 allows for grabbing details about a package, or its - # download from "details" urls. See the GitHubClient and BitBucketClient - # classes for valid URLs. - if self.schema_version.major >= 2: - details = package.get('details') - releases = package.get('releases') + details = package.get('details') + releases = package.get('releases') - # Try to grab package-level details from GitHub or BitBucket - if details: - if invalid_sources is not None and details in invalid_sources: - continue + # Try to grab package-level details from GitHub or BitBucket + if details: + if invalid_sources is not None and details in invalid_sources: + continue - info['sources'].append(details) + info['sources'].append(details) - try: - repo_info = None + try: + repo_info = None - for client in clients: - repo_info = client.repo_info(details) - if repo_info: - break - else: - raise ProviderException(text.format( - ''' - Invalid "details" value "%s" for one of the packages in the repository %s. - ''', - (details, self.repo_url) - )) + for client in clients: + repo_info = client.repo_info(details) + if repo_info: + break + else: + raise ProviderException(text.format( + ''' + Invalid "details" value "%s" for one of the packages in the repository %s. + ''', + (details, self.repo_url) + )) - del repo_info['default_branch'] + del repo_info['default_branch'] - # When grabbing details, prefer explicit field values over the values - # from the GitHub or BitBucket API - info = dict(chain(repo_info.items(), info.items())) + # When grabbing details, prefer explicit field values over the values + # from the GitHub or BitBucket API + info = dict(chain(repo_info.items(), info.items())) - except (DownloaderException, ClientException, ProviderException) as e: - if 'name' in info: - self.broken_packages[info['name']] = e - self.failed_sources[details] = e - continue + except (DownloaderException, ClientException, ProviderException) as e: + if 'name' in info: + self.broken_packages[info['name']] = e + self.failed_sources[details] = e + continue if 'name' not in info: self.failed_sources[self.repo_url] = ProviderException(text.format( @@ -594,164 +553,158 @@ def get_packages(self, invalid_sources=None): if not releases and details: releases = [{'details': details}] - if self.schema_version.major >= 2: - if not releases: - e = ProviderException(text.format( - ''' - No "releases" value for the package "%s" in the repository %s. - ''', - (info['name'], self.repo_url) - )) - self.broken_packages[info['name']] = e - continue + if not releases: + e = ProviderException(text.format( + ''' + No "releases" value for the package "%s" in the repository %s. + ''', + (info['name'], self.repo_url) + )) + self.broken_packages[info['name']] = e + continue - if not isinstance(releases, list): - e = ProviderException(text.format( - ''' - The "releases" value is not an array or the package "%s" in the repository %s. - ''', - (info['name'], self.repo_url) - )) - self.broken_packages[info['name']] = e - continue + if not isinstance(releases, list): + e = ProviderException(text.format( + ''' + The "releases" value is not an array or the package "%s" in the repository %s. + ''', + (info['name'], self.repo_url) + )) + self.broken_packages[info['name']] = e + continue - # This allows developers to specify a GH or BB location to get releases from, - # especially tags URLs (https://github.com/user/repo/tags or - # https://bitbucket.org/user/repo#tags) - for release in releases: - download_details = None - download_info = {} + # This allows developers to specify a GH or BB location to get releases from, + # especially tags URLs (https://github.com/user/repo/tags or + # https://bitbucket.org/user/repo#tags) + for release in releases: + download_details = None + download_info = {} - # Make sure that explicit fields are copied over - for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'libraries']: - if field in release: - value = release[field] - if field == 'url': - value = update_url(value, debug) - if field == 'platforms' and not isinstance(release['platforms'], list): - value = [value] - download_info[field] = value + # Make sure that explicit fields are copied over + for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'libraries']: + if field in release: + value = release[field] + if field == 'url': + value = update_url(value, debug) + if field == 'platforms' and not isinstance(release['platforms'], list): + value = [value] + download_info[field] = value - if self.schema_version.major < 4 and 'dependencies' in release: - download_info['libraries'] = release['dependencies'] + if self.schema_version.major < 4 and 'dependencies' in release: + download_info['libraries'] = release['dependencies'] - if 'platforms' not in download_info: - download_info['platforms'] = ['*'] + if 'platforms' not in download_info: + download_info['platforms'] = ['*'] - if self.schema_version.major == 2: - if 'sublime_text' not in download_info: - download_info['sublime_text'] = '<3000' + if self.schema_version.major == 2: + if 'sublime_text' not in download_info: + download_info['sublime_text'] = '<3000' - if 'details' in release: - download_details = release['details'] + if 'details' in release: + download_details = release['details'] - try: - downloads = None + try: + downloads = None + for client in clients: + downloads = client.download_info(download_details) + if downloads is not None: + break + + if downloads is None: + raise ProviderException(text.format( + ''' + Invalid "details" value "%s" for one of the releases of the + package "%s" in the repository %s. + ''', + (download_details, info['name'], self.repo_url) + )) + + if downloads is False: + raise ProviderException(text.format( + ''' + No valid semver tags found at %s for the + package "%s" in the repository %s. + ''', + (download_details, info['name'], self.repo_url) + )) + + for download in downloads: + new_download = download_info.copy() + new_download.update(download) + info['releases'].append(new_download) + + except (DownloaderException, ClientException, ProviderException) as e: + self.broken_packages[info['name']] = e + + elif download_info: + info['releases'].append(download_info) + + elif self.schema_version.major >= 3: + tags = release.get('tags') + branch = release.get('branch') + + if tags or branch: + try: + base = None + if 'base' in release: + base = release['base'] + elif details: + base = details + + if not base: + raise ProviderException(text.format( + ''' + Missing root-level "details" key, or release-level "base" key + for one of the releases of the package "%s" in the repository %s. + ''', + (info['name'], self.repo_url) + )) + + downloads = None + + if tags: + extra = None + if tags is not True: + extra = tags for client in clients: - downloads = client.download_info(download_details) + downloads = client.download_info_from_tags(base, extra) + if downloads is not None: + break + else: + for client in clients: + downloads = client.download_info_from_branch(base, branch) if downloads is not None: break - if downloads is None: - raise ProviderException(text.format( - ''' - Invalid "details" value "%s" for one of the releases of the - package "%s" in the repository %s. - ''', - (download_details, info['name'], self.repo_url) - )) - - if downloads is False: - raise ProviderException(text.format( - ''' - No valid semver tags found at %s for the - package "%s" in the repository %s. - ''', - (download_details, info['name'], self.repo_url) - )) - - for download in downloads: - new_download = download_info.copy() - new_download.update(download) - info['releases'].append(new_download) - - except (DownloaderException, ClientException, ProviderException) as e: - self.broken_packages[info['name']] = e - - elif download_info: - info['releases'].append(download_info) - - elif self.schema_version.major >= 3: - tags = release.get('tags') - branch = release.get('branch') - - if tags or branch: - try: - base = None - if 'base' in release: - base = release['base'] - elif details: - base = details - - if not base: - raise ProviderException(text.format( - ''' - Missing root-level "details" key, or release-level "base" key - for one of the releases of the package "%s" in the repository %s. - ''', - (info['name'], self.repo_url) - )) - - downloads = None - - if tags: - extra = None - if tags is not True: - extra = tags - for client in clients: - downloads = client.download_info_from_tags(base, extra) - if downloads is not None: - break - else: - for client in clients: - downloads = client.download_info_from_branch(base, branch) - if downloads is not None: - break - - if downloads is None: - raise ProviderException(text.format( - ''' - Invalid "base" value "%s" for one of the releases of the - package "%s" in the repository %s. - ''', - (base, info['name'], self.repo_url) - )) - - if downloads is False: - raise ProviderException(text.format( - ''' - No valid semver tags found at %s for the - package "%s" in the repository %s. - ''', - (base, info['name'], self.repo_url) - )) - - for download in downloads: - new_download = download_info.copy() - new_download.update(download) - info['releases'].append(new_download) - - except (DownloaderException, ClientException, ProviderException) as e: - self.broken_packages[info['name']] = e - continue - elif download_info: - info['releases'].append(download_info) - - # Schema version 1.0, 1.1 and 1.2 just require that all values be - # explicitly specified in the package JSON - else: - info['releases'] = platforms_to_releases(package, debug) + if downloads is None: + raise ProviderException(text.format( + ''' + Invalid "base" value "%s" for one of the releases of the + package "%s" in the repository %s. + ''', + (base, info['name'], self.repo_url) + )) + + if downloads is False: + raise ProviderException(text.format( + ''' + No valid semver tags found at %s for the + package "%s" in the repository %s. + ''', + (base, info['name'], self.repo_url) + )) + + for download in downloads: + new_download = download_info.copy() + new_download.update(download) + info['releases'].append(new_download) + + except (DownloaderException, ClientException, ProviderException) as e: + self.broken_packages[info['name']] = e + continue + elif download_info: + info['releases'].append(download_info) info['releases'] = version_sort(info['releases'], 'platforms', reverse=True) @@ -816,9 +769,6 @@ def has_broken_release(): date = release['date'] info['last_modified'] = date - if info['name'] in previous_names: - info['previous_names'].extend(previous_names[info['name']]) - output[info['name']] = info yield (info['name'], info) @@ -836,11 +786,10 @@ def get_sources(self): return [] output = [self.repo_url] - if self.schema_version.major >= 2: - for package in self.repo_info['packages']: - details = package.get('details') - if details: - output.append(details) + for package in self.repo_info['packages']: + details = package.get('details') + if details: + output.append(details) return output def get_renamed_packages(self): @@ -849,9 +798,6 @@ def get_renamed_packages(self): if not self.fetch(): return {} - if self.schema_version.major < 2: - return self.repo_info.get('renamed_packages', {}) - output = {} for package in self.repo_info['packages']: if 'previous_names' not in package: diff --git a/app/lib/package_control/providers/schema_compat.py b/app/lib/package_control/providers/schema_compat.py index 064d980..d25ebb1 100644 --- a/app/lib/package_control/providers/schema_compat.py +++ b/app/lib/package_control/providers/schema_compat.py @@ -1,55 +1,8 @@ -from ..download_manager import update_url from ..deps.semver import SemVer -def platforms_to_releases(info, debug): - """ - Accepts a dict from a schema version 1.0, 1.1 or 1.2 package containing - a "platforms" key and converts it to a list of releases compatible with' - schema version 2.0. - - :param info: - The dict of package info - - :param debug: - If debug information should be shown - - :return: - A list of release dicts - """ - - output = [] - - temp_releases = {} - platforms = info.get('platforms') - - for platform in platforms: - for release in platforms[platform]: - key = '%s-%s' % (release['version'], release['url']) - if key not in temp_releases: - temp_releases[key] = { - 'sublime_text': '<3000', - 'version': release['version'], - 'date': info.get('last_modified', '2011-08-01 00:00:00'), - 'url': update_url(release['url'], debug), - 'platforms': [] - } - if platform == '*': - temp_releases[key]['platforms'] = ['*'] - elif temp_releases[key]['platforms'] != ['*']: - temp_releases[key]['platforms'].append(platform) - - for key in temp_releases: - release = temp_releases[key] - if release['platforms'] == ['windows', 'linux', 'osx']: - release['platforms'] = ['*'] - output.append(release) - - return output - - class SchemaVersion(SemVer): - supported_versions = ('1.0', '1.1', '1.2', '2.0', '3.0.0', '4.0.0') + supported_versions = ('2.0', '3.0.0', '4.0.0') @classmethod def _parse(cls, ver): diff --git a/app/models/package/find.py b/app/models/package/find.py index 63ea5d0..1c80b42 100644 --- a/app/models/package/find.py +++ b/app/models/package/find.py @@ -215,7 +215,8 @@ def all(limit_one_per_package=False, only_package_control=False): package_minor_versions[minor_key] = 0 package_minor_versions[minor_key] += 1 - return output + # return repos with at least one release + return {repo: info for repo, info in output if info['releases']} def old(): From 52b11d54b23202b90751423b7e630b1a5c6f6482 Mon Sep 17 00:00:00 2001 From: deathaxe Date: Tue, 7 Feb 2023 20:08:07 +0100 Subject: [PATCH 14/39] Sync with Package Control --- app/lib/package_control/ca_certs.py | 272 +++++++++++------- .../clients/bitbucket_client.py | 7 +- .../package_control/clients/github_client.py | 7 +- .../package_control/clients/gitlab_client.py | 7 +- app/lib/package_control/cmd.py | 4 +- .../downloaders/curl_downloader.py | 4 +- .../downloaders/oscrypto_downloader.py | 23 +- .../downloaders/urllib_downloader.py | 10 +- .../downloaders/wget_downloader.py | 10 +- .../downloaders/wininet_downloader.py | 12 +- app/lib/package_control/http_cache.py | 6 +- .../providers/channel_provider.py | 22 +- .../providers/repository_provider.py | 2 +- app/lib/package_control/versions.py | 160 ----------- 14 files changed, 239 insertions(+), 307 deletions(-) delete mode 100644 app/lib/package_control/versions.py diff --git a/app/lib/package_control/ca_certs.py b/app/lib/package_control/ca_certs.py index afccd19..981ad0b 100644 --- a/app/lib/package_control/ca_certs.py +++ b/app/lib/package_control/ca_certs.py @@ -1,17 +1,40 @@ import os -import time import sys +from . import sys_path from .console_write import console_write -from .sys_path import pc_cache_dir, user_config_dir +from .downloaders.downloader_exception import DownloaderException -from .deps.oscrypto import use_ctypes -use_ctypes() -from .deps.oscrypto import trust_list # noqa +try: + import certifi +except ImportError: + certifi = None +try: + from .deps.oscrypto import use_ctypes + use_ctypes() + from .deps.oscrypto import trust_list # noqa + from .deps.oscrypto.errors import CACertsError +except Exception as e: + trust_list = None + console_write('oscrypto trust lists unavailable - %s', e) -ca_bundle_dir = None -user_ca_bundle_dir = None + +MIN_BUNDLE_SIZE = 100 +""" +The least required file size a CA bundle must have to be valid. + +The size is calculated from public key boundaries +and least amount of public key size. + +``MIN_BUNDLE_SIZE = begin (27) + end (25) + newlines (2) + key (?)`` + +``` +-----BEGIN CERTIFICATE----- + +-----END CERTIFICATE----- +``` +""" def get_ca_bundle_path(settings): @@ -21,73 +44,84 @@ def get_ca_bundle_path(settings): :param settings: A dict to look in for the `debug` key + :raises: + OSError or IOError if CA bundle creation fails + :return: The filesystem path to the merged ca bundle path """ - ensure_ca_bundle_dir() + ca_bundle_dir = sys_path.pc_cache_dir() + if not ca_bundle_dir: + raise ValueError("Unknown Package Control cache directory") + + os.makedirs(ca_bundle_dir, exist_ok=True) - system_ca_bundle_path = get_system_ca_bundle_path(settings) + system_ca_bundle_path = get_system_ca_bundle_path(settings, ca_bundle_dir) user_ca_bundle_path = get_user_ca_bundle_path(settings) merged_ca_bundle_path = os.path.join(ca_bundle_dir, 'merged-ca-bundle.crt') - - merged_missing = not os.path.exists(merged_ca_bundle_path) - merged_empty = (not merged_missing) and os.stat(merged_ca_bundle_path).st_size == 0 - - regenerate = merged_missing or merged_empty - if system_ca_bundle_path and not merged_missing: - regenerate = regenerate or os.path.getmtime(system_ca_bundle_path) > os.path.getmtime(merged_ca_bundle_path) - if os.path.exists(user_ca_bundle_path) and not merged_missing: - regenerate = regenerate or os.path.getmtime(user_ca_bundle_path) > os.path.getmtime(merged_ca_bundle_path) + merged_ca_bundle_size = 0 + + try: + # file exists and is not empty + system_ca_bundle_exists = system_ca_bundle_path \ + and os.path.getsize(system_ca_bundle_path) > MIN_BUNDLE_SIZE + except FileNotFoundError: + system_ca_bundle_exists = False + + try: + # file exists and is not empty + user_ca_bundle_exists = user_ca_bundle_path \ + and os.path.getsize(user_ca_bundle_path) > MIN_BUNDLE_SIZE + except FileNotFoundError: + user_ca_bundle_exists = False + + regenerate = system_ca_bundle_exists or user_ca_bundle_exists + if regenerate: + try: + stats = os.stat(merged_ca_bundle_path) + except FileNotFoundError: + pass + else: + merged_ca_bundle_size = stats.st_size + # regenerate if merged file is empty + regenerate = merged_ca_bundle_size < MIN_BUNDLE_SIZE + # regenerate if system CA file is newer + if system_ca_bundle_exists and not regenerate: + regenerate = os.path.getmtime(system_ca_bundle_path) > stats.st_mtime + # regenerate if user CA file is newer + if user_ca_bundle_exists and not regenerate: + regenerate = os.path.getmtime(user_ca_bundle_path) > stats.st_mtime if regenerate: with open(merged_ca_bundle_path, 'w', encoding='utf-8') as merged: - if system_ca_bundle_path: + if system_ca_bundle_exists: with open(system_ca_bundle_path, 'r', encoding='utf-8') as system: system_certs = system.read().strip() merged.write(system_certs) if len(system_certs) > 0: merged.write('\n') - if os.path.exists(user_ca_bundle_path): + if user_ca_bundle_exists: with open(user_ca_bundle_path, 'r', encoding='utf-8') as user: user_certs = user.read().strip() merged.write(user_certs) if len(user_certs) > 0: merged.write('\n') - if settings.get('debug'): - console_write( - ''' - Regenerated the merged CA bundle from the system and user CA bundles - ''' - ) - return merged_ca_bundle_path - - -def get_user_ca_bundle_path(settings): - """ - Return the path to the user CA bundle, ensuring the file exists + merged_ca_bundle_size = merged.tell() - :param settings: - A dict to look in for `debug` - - :return: - The filesystem path to the user ca bundle - """ - - ensure_ca_bundle_dir() - - user_ca_bundle_path = os.path.join(user_ca_bundle_dir, 'Package Control.user-ca-bundle') - if not os.path.exists(user_ca_bundle_path): - if settings.get('debug'): + if merged_ca_bundle_size >= MIN_BUNDLE_SIZE and settings.get('debug'): console_write( ''' - Created blank user CA bundle - ''' + Regenerated the merged CA bundle from the system and user CA bundles (%d kB) + ''', + merged_ca_bundle_size / 1024 ) - open(user_ca_bundle_path, 'a').close() - return user_ca_bundle_path + if merged_ca_bundle_size < MIN_BUNDLE_SIZE: + raise DownloaderException("No CA bundle available for HTTPS!") + + return merged_ca_bundle_path def print_cert_subject(cert, reason): @@ -116,7 +150,7 @@ def print_cert_subject(cert, reason): ) -def get_system_ca_bundle_path(settings): +def get_system_ca_bundle_path(settings, ca_bundle_dir): """ Get the filesystem path to the system CA bundle. On Linux it looks in a number of predefined places, however on OS X it has to be programatically @@ -127,44 +161,57 @@ def get_system_ca_bundle_path(settings): :param settings: A dict to look in for the `debug` key + :param ca_bundle_dir: + The filesystem path to the directory to store exported CA bundle in + :return: The full filesystem path to the .ca-bundle file, or False on error """ hours_to_cache = 7 * 24 - platform = sys.platform debug = settings.get('debug') ca_path = False - if platform == 'win32' or platform == 'darwin': - ensure_ca_bundle_dir() - ca_path, _ = trust_list._ca_path(ca_bundle_dir) - - exists = os.path.exists(ca_path) - is_empty = False - is_old = False - if exists: - stats = os.stat(ca_path) - is_empty = stats.st_size == 0 - # The bundle is old if it is a week or more out of date - is_old = stats.st_mtime < time.time() - (hours_to_cache * 60 * 60) - - if not exists or is_empty or is_old: - cert_callback = None - if debug: - console_write( - ''' - Generating new CA bundle from system keychain - ''' - ) - cert_callback = print_cert_subject - trust_list.get_path(ca_bundle_dir, hours_to_cache, cert_callback=cert_callback) - if debug: + if sys.platform == 'win32' or sys.platform == 'darwin': + if trust_list is not None: + ca_path, _ = trust_list._ca_path(ca_bundle_dir) + + if trust_list._cached_path_needs_update(ca_path, hours_to_cache): + cert_callback = None + if debug: + console_write( + ''' + Generating new CA bundle from system keychain + ''' + ) + cert_callback = print_cert_subject + + try: + trust_list.get_path(ca_bundle_dir, hours_to_cache, cert_callback) + if debug: + console_write( + ''' + Finished generating new CA bundle at %s (%d bytes) + ''', + (ca_path, os.stat(ca_path).st_size) + ) + + except (CACertsError, OSError) as e: + ca_path = False + if debug: + console_write( + ''' + Failed to generate new CA bundle. %s + ''', + e + ) + + elif debug: console_write( ''' - Finished generating new CA bundle at %s (%d bytes) + Found previously exported CA bundle at %s (%d bytes) ''', (ca_path, os.stat(ca_path).st_size) ) @@ -172,9 +219,8 @@ def get_system_ca_bundle_path(settings): elif debug: console_write( ''' - Found previously exported CA bundle at %s (%d bytes) + Unable to generate system CA bundle - oscrypto not available! ''', - (ca_path, os.stat(ca_path).st_size) ) # Linux @@ -189,43 +235,67 @@ def get_system_ca_bundle_path(settings): '/usr/local/share/certs/ca-root-nss.crt', '/etc/ssl/cert.pem' ] - # First try SSL_CERT_FILE - if 'SSL_CERT_FILE' in os.environ: - paths.insert(0, os.environ['SSL_CERT_FILE']) + + # Prepend SSL_CERT_FILE only, if it doesn't match ST4's certifi CA bundle. + # Otherwise we'd never pick up any OS level CA bundle. + ssl_cert_file = os.environ.get('SSL_CERT_FILE') + if ssl_cert_file and not (certifi and os.path.samefile(ssl_cert_file, certifi.where())): + paths.insert(0, ssl_cert_file) + for path in paths: - if os.path.exists(path) and os.path.getsize(path) > 0: + if os.path.isfile(path) and os.path.getsize(path) > MIN_BUNDLE_SIZE: ca_path = path break - if debug and ca_path: + if debug: + if ca_path: + console_write( + ''' + Found system CA bundle at %s (%d bytes) + ''', + (ca_path, os.stat(ca_path).st_size) + ) + else: + console_write( + ''' + Failed to find system CA bundle. + ''' + ) + + if ca_path is False and certifi is not None: + ca_path = certifi.where() + if debug: console_write( ''' - Found system CA bundle at %s (%d bytes) + Using CA bundle from "certifi %s" instead. ''', - (ca_path, os.stat(ca_path).st_size) + certifi.__version__ ) return ca_path -def ensure_ca_bundle_dir(): +def get_user_ca_bundle_path(settings): """ - Make sure we have a placed to save the merged-ca-bundle and system-ca-bundle + Return the path to the user CA bundle, ensuring the file exists + + :param settings: + A dict to look in for `debug` + + :return: + The full filesystem path to the .user-ca-bundle file, or False on error """ - # If the sublime module is available, we bind this value at run time - # since the sublime.packages_path() is not available at import time - global ca_bundle_dir - global user_ca_bundle_dir + user_ca_bundle = os.path.join(sys_path.user_config_dir(), 'Package Control.user-ca-bundle') + try: + open(user_ca_bundle, 'xb').close() + if settings.get('debug'): + console_write('Created blank user CA bundle') + except FileExistsError: + pass + except OSError as e: + user_ca_bundle = False + if settings.get('debug'): + console_write('Unable to create blank user CA bundle - %s', e) - if not ca_bundle_dir: - ca_bundle_dir = pc_cache_dir() - if not user_ca_bundle_dir: - user_ca_bundle_dir = user_config_dir() - if not os.path.exists(ca_bundle_dir): - try: - os.mkdir(ca_bundle_dir) - except EnvironmentError: - ca_bundle_dir = '/var/tmp/package_control' - if not os.path.exists(ca_bundle_dir): - os.mkdir(ca_bundle_dir) + return user_ca_bundle diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index fb5fa5d..da2fbbd 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -2,7 +2,7 @@ from urllib.parse import urlencode, quote from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_match_prefix +from ..package_version import version_match_prefix from .json_api_client import JSONApiClient @@ -42,7 +42,10 @@ def user_repo_branch(url): (None, None, None) if no match. """ - match = re.match(r'^https?://bitbucket\.org/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/src/([^/#?]+)/?|/?)|/?)$', url) + match = re.match( + r'^https?://bitbucket\.org/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/src/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) if match: return match.groups() diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index 9952db2..6b5c0c4 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -2,7 +2,7 @@ from urllib.parse import urlencode, quote from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_match_prefix +from ..package_version import version_match_prefix from .json_api_client import JSONApiClient @@ -27,7 +27,10 @@ def user_repo_branch(url): (user name, None, None) or (None, None, None) if no match. """ - match = re.match(r'^https?://github\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/tree/([^/#?]+)/?|/?)|/?)$', url) + match = re.match( + r'^https?://github\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/tree/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) if match: return match.groups() diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index dc1a816..2ead806 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -2,7 +2,7 @@ from urllib.parse import urlencode, quote from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_match_prefix +from ..package_version import version_match_prefix from .json_api_client import JSONApiClient @@ -30,7 +30,10 @@ def user_repo_branch(url): The branch name may be a branch name or a commit """ - match = re.match(r'^https?://gitlab\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/-/tree/([^/#?]+)/?|/?)|/?)$', url) + match = re.match( + r'^https?://gitlab\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/-/tree/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) if match: return match.groups() diff --git a/app/lib/package_control/cmd.py b/app/lib/package_control/cmd.py index 62a969c..c95d0c6 100644 --- a/app/lib/package_control/cmd.py +++ b/app/lib/package_control/cmd.py @@ -46,7 +46,7 @@ def create_cmd(args, basename_binary=False): return ' '.join(escaped_args) -class Cli(object): +class Cli: """ Base class for running command line apps @@ -137,6 +137,8 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal is_vcs = True elif re.search('hg', binary_name): is_vcs = True + else: + is_vcs = False if sublime: def kill_proc(): diff --git a/app/lib/package_control/downloaders/curl_downloader.py b/app/lib/package_control/downloaders/curl_downloader.py index 5fda6e2..d0e8c1b 100644 --- a/app/lib/package_control/downloaders/curl_downloader.py +++ b/app/lib/package_control/downloaders/curl_downloader.py @@ -2,15 +2,15 @@ import re import os +from ..ca_certs import get_ca_bundle_path from ..console_write import console_write from .cli_downloader import CliDownloader from .non_clean_exit_error import NonCleanExitError from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class CurlDownloader(CliDownloader, DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py index 97e7082..ddae04c 100644 --- a/app/lib/package_control/downloaders/oscrypto_downloader.py +++ b/app/lib/package_control/downloaders/oscrypto_downloader.py @@ -3,27 +3,26 @@ from __future__ import unicode_literals, division, absolute_import, print_function import base64 -import re -import sys -import os import hashlib +import os +import re import socket +import sys from urllib.parse import urlparse from urllib.request import parse_keqv_list, parse_http_list +from .. import text +from ..ca_certs import get_user_ca_bundle_path from ..console_write import console_write +from ..deps.asn1crypto.util import OrderedDict +from ..deps.asn1crypto import pem, x509 +from ..deps.oscrypto import use_ctypes, use_openssl from .downloader_exception import DownloaderException from .oscrypto_downloader_exception import OscryptoDownloaderException -from ..ca_certs import get_user_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader -from .. import text - -from ..deps.asn1crypto.util import OrderedDict -from ..deps.asn1crypto import pem, x509 -from ..deps.oscrypto import use_ctypes, use_openssl +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader use_ctypes() @@ -33,7 +32,7 @@ # it has OpenSSL statically linked, and we can't dlopen() that. # ST 4081 broke sys.executable to return "sublime_text", but other 4xxx builds # will contain "plugin_host". -if sys.version_info == (3, 8) and sys.platform == 'linux' and ( +if sys.version_info[:2] == (3, 8) and sys.platform == 'linux' and ( 'sublime_text' in sys.executable or 'plugin_host' in sys.executable): install_dir = os.path.dirname(sys.executable) diff --git a/app/lib/package_control/downloaders/urllib_downloader.py b/app/lib/package_control/downloaders/urllib_downloader.py index ecfb81b..65d1f90 100644 --- a/app/lib/package_control/downloaders/urllib_downloader.py +++ b/app/lib/package_control/downloaders/urllib_downloader.py @@ -1,5 +1,6 @@ import re import sys +import urllib.request as urllib_compat from http.client import HTTPException, BadStatusLine from urllib.request import ( build_opener, @@ -10,19 +11,18 @@ Request, ) from urllib.error import HTTPError, URLError -import urllib.request as urllib_compat from socket import error as ConnectionError +from .. import text +from ..ca_certs import get_ca_bundle_path from ..console_write import console_write from ..http.validating_https_handler import ValidatingHTTPSHandler from ..http.debuggable_http_handler import DebuggableHTTPHandler from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader -from .. import text +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class UrlLibDownloader(DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): diff --git a/app/lib/package_control/downloaders/wget_downloader.py b/app/lib/package_control/downloaders/wget_downloader.py index d4e89b7..d010990 100644 --- a/app/lib/package_control/downloaders/wget_downloader.py +++ b/app/lib/package_control/downloaders/wget_downloader.py @@ -1,18 +1,18 @@ -import tempfile -import re import os +import re import sys +import tempfile +from ..ca_certs import get_ca_bundle_path from ..console_write import console_write from .cli_downloader import CliDownloader from .non_http_error import NonHttpError from .non_clean_exit_error import NonCleanExitError from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class WgetDownloader(CliDownloader, DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): diff --git a/app/lib/package_control/downloaders/wininet_downloader.py b/app/lib/package_control/downloaders/wininet_downloader.py index a490cd6..da7da77 100644 --- a/app/lib/package_control/downloaders/wininet_downloader.py +++ b/app/lib/package_control/downloaders/wininet_downloader.py @@ -1,22 +1,22 @@ from ctypes import windll, wintypes import ctypes -import re import datetime -import struct # To prevent import errors in thread with datetime import locale # noqa +import re +import struct from urllib.parse import urlparse -from ..console_write import console_write from .. import text -from .non_http_error import NonHttpError +from ..console_write import console_write from .http_error import HttpError +from .non_http_error import NonHttpError from .downloader_exception import DownloaderException from .win_downloader_exception import WinDownloaderException -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader wininet = windll.wininet diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py index 9a76b61..e9e4977 100644 --- a/app/lib/package_control/http_cache.py +++ b/app/lib/package_control/http_cache.py @@ -1,17 +1,17 @@ import os import time -from .sys_path import pc_cache_dir +from . import sys_path -class HttpCache(object): +class HttpCache: """ A data store for caching HTTP response data. """ def __init__(self, ttl): - self.base_path = os.path.join(pc_cache_dir(), 'http_cache') + self.base_path = os.path.join(sys_path.pc_cache_dir(), 'http_cache') os.makedirs(self.base_path, exist_ok=True) self.clear(int(ttl)) diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py index 8a56893..e1e844e 100644 --- a/app/lib/package_control/providers/channel_provider.py +++ b/app/lib/package_control/providers/channel_provider.py @@ -5,7 +5,7 @@ from ..console_write import console_write from ..download_manager import http_get, resolve_urls, update_url -from ..versions import version_sort +from ..package_version import version_sort from .provider_exception import ProviderException from .schema_compat import SchemaVersion @@ -18,6 +18,10 @@ def __init__(self, channel, reason_message): ' %s' % (channel.channel_url, reason_message)) +class UncachedChannelRepositoryError(ProviderException): + pass + + class ChannelProvider: """ Retrieves a channel and provides an API into the information @@ -183,8 +187,8 @@ def get_packages(self, repo_url): The URL of the repository to get the cached info of :raises: - ProviderException: when an error occurs with the channel contents DownloaderException: when an error occurs trying to open a URL + UncachedChannelRepositoryError when no cache entry exists for repo_url :return: A generator of @@ -219,7 +223,11 @@ def get_packages(self, repo_url): self.fetch() - for package in self.channel_info.get('packages_cache', {}).get(repo_url, []): + packages_cache = self.channel_info.get('packages_cache', {}) + if repo_url not in packages_cache: + raise UncachedChannelRepositoryError(repo_url) + + for package in packages_cache[repo_url]: if package['releases']: yield (package['name'], package) @@ -231,8 +239,8 @@ def get_libraries(self, repo_url): The URL of the repository to get the cached info of :raises: - ProviderException: when an error occurs with the channel contents DownloaderException: when an error occurs trying to open a URL + UncachedChannelRepositoryError when no cache entry exists for repo_url :return: A generator of @@ -260,7 +268,11 @@ def get_libraries(self, repo_url): self.fetch() - for library in self.channel_info.get('libraries_cache', {}).get(repo_url, []): + libraries_cache = self.channel_info.get('libraries_cache', {}) + if repo_url not in libraries_cache: + raise UncachedChannelRepositoryError(repo_url) + + for library in libraries_cache[repo_url]: if library['releases']: yield (library['name'], library) diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index dc3a621..fd03e53 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -12,7 +12,7 @@ from ..console_write import console_write from ..download_manager import http_get, resolve_urls, update_url from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_sort +from ..package_version import version_sort from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException from .schema_compat import SchemaVersion diff --git a/app/lib/package_control/versions.py b/app/lib/package_control/versions.py deleted file mode 100644 index e965c0b..0000000 --- a/app/lib/package_control/versions.py +++ /dev/null @@ -1,160 +0,0 @@ -import re - -from .deps.semver import SemVer -from .console_write import console_write - - -class PackageVersion(SemVer): - - _date_pattern = re.compile(r'^(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$') - _pre_semver_pattern = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+)(?:[T\.](\d+(\.\d+)*))?)?)?$') - - @classmethod - def _parse(cls, ver): - """ - Converts a string version number into SemVer. If the version is based on - a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. - - :param ver: - A string, dict with 'version' key, or a SemVer object - - :raises: - TypeError, if ver is not one of: str, dict with version, SemVer - ValueError, if ver is no valid version string - - :return: - A list of 5 items representing a valid semantic version number - """ - - # Allowing passing in a dict containing info about a package - if isinstance(ver, dict): - if 'version' not in ver: - raise TypeError("%s is not a package or library release" % ver) - ver = ver['version'] - - if isinstance(ver, SemVer): - return ver - - if not isinstance(ver, str): - raise TypeError("%r is not a string" % ver) - - # Trim v off of the front - if ver.startswith('v'): - ver = ver[1:] - - # Match semver compatible strings - match = cls._match_regex.match(ver) - if match: - g = list(match.groups()) - for i in range(3): - g[i] = int(g[i]) - - return g - - # We prepend 0 to all date-based version numbers so that developers - # may switch to explicit versioning from GitHub/GitLab/BitBucket - # versioning based on commit dates. - # - # The resulting semver is alwass 0.0.1 with timestamp being used - # as build number, so any explicitly choosen version (via tags) will - # be greater, once a package moves from branch to tag based releases. - # - # The result looks like: - # 0.0.1+2020.07.15.10.50.38 - match = cls._date_pattern.match(ver) - if match: - return [0, 0, 1, None, '.'.join(match.groups())] - - # This handles versions that were valid pre-semver with 1 to 4+ dotted - # groups, such as 1, 1.6, or 1.6.9.0 - match = cls._pre_semver_pattern.match(ver) - if match: - return [ - int(match.group(1) or 0), - int(match.group(2) or 0), - int(match.group(3) or 0), - None, - match.group(4) - ] - - raise ValueError("'%s' is not a valid SemVer string" % ver) - - -def version_exclude_prerelease(versions): - """ - Remove prerelease versions for a list of SemVer versions - - :param versions: - The list of versions to filter - - :return: - The list of versions with pre-releases removed - """ - - return [v for v in versions if not PackageVersion(v).prerelease] - - -def version_match_prefix(version, filter_prefix): - """ - Create a SemVer for a given version, if it matches filter_prefix. - - :param version: - The version string to match - - :param filter_prefix: - The prefix to match versions against - - :returns: - SemVer, if version is valid and matches given filter_prefix - None, if version is invalid or doesn't match filter_prefix - """ - - try: - if filter_prefix: - if version.startswith(filter_prefix): - return PackageVersion(version[len(filter_prefix):]) - else: - return PackageVersion(version) - except ValueError: - pass - return None - - -def version_sort(sortable, *fields, **kwargs): - """ - Sorts a list that is a list of versions, or dicts with a 'version' key. - Can also secondly sort by another field. - - :param sortable: - The list to sort - - :param *fields: - If sortable is a list of dicts, perform secondary sort via these fields, - in order - - :param **kwargs: - Keyword args to pass on to sorted() - - :return: - A copy of sortable that is sorted according to SemVer rules - """ - - def _version_sort_key(item): - result = PackageVersion(item) - if fields: - values = [result] - for field in fields: - values.append(item[field]) - result = tuple(values) - return result - - try: - return sorted(sortable, key=_version_sort_key, **kwargs) - except (ValueError) as e: - console_write( - ''' - Error sorting versions - %s - ''', - e - ) - return [] From c5757d8d48c459a09c0b8bcdef7bbd21d6eeda4f Mon Sep 17 00:00:00 2001 From: deathaxe Date: Tue, 21 Mar 2023 19:08:59 +0100 Subject: [PATCH 15/39] Sync with Package Control source: https://github.com/wbond/package_control/commit/32c0e0b9ae1b68a4a59f2bb0ef66d4eb1187b5c8 --- app/lib/package_control/cmd.py | 19 +++++----- app/lib/package_control/download_manager.py | 35 +++++++++++++++++++ .../providers/repository_provider.py | 33 ++++++++++++----- 3 files changed, 67 insertions(+), 20 deletions(-) diff --git a/app/lib/package_control/cmd.py b/app/lib/package_control/cmd.py index c95d0c6..ae576fe 100644 --- a/app/lib/package_control/cmd.py +++ b/app/lib/package_control/cmd.py @@ -165,16 +165,13 @@ def kill_proc(): prompt. Please ensure %s works without a prompt, or change the "ignore_vcs_packages" Package Control setting to true. - - Sublime Text will need to be restarted once these - changes are made. ''', binary_name ) show_error(message) sublime.set_timeout(kill_proc, 60000) - output, _ = proc.communicate(input) + output, error = proc.communicate(input) stuck = False @@ -182,7 +179,10 @@ def kill_proc(): output = output.replace('\r\n', '\n').rstrip(' \n\r') if proc.returncode not in self.ok_returncodes: - if not ignore_errors or re.search(ignore_errors, output) is None: + if error: + error = error.decode(encoding) + error = error.replace('\r\n', '\n').rstrip(' \n\r') + if not ignore_errors or re.search(ignore_errors, error or output) is None: message = text.format( ''' Error executing: %s @@ -191,20 +191,17 @@ def kill_proc(): %s ''', - (create_cmd(args), orig_cwd, output) - ) + (create_cmd(args), orig_cwd, error or output) + ).rstrip() if is_vcs: message += text.format( ''' VCS-based packages can be ignored by changing the "ignore_vcs_packages" setting to true. - - Sublime Text will need to be restarted once the - setting is changed. ''' ) - show_error(message) + console_write(message) return False if meaningful_output and self.debug and len(output) > 0: diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index 05c93aa..5a65514 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -185,6 +185,41 @@ def resolve_urls(root_url, uris): yield url +def resolve_url(root_url, url): + """ + Convert a list of relative uri's to absolute urls/paths. + + :param root_url: + The root url string + + :param uris: + An iteratable of relative uri's to resolve. + + :returns: + A generator of resolved URLs + """ + + scheme_match = re.match(r'(https?:)//', root_url, re.I) + if scheme_match is None: + root_dir = os.path.dirname(root_url) + else: + root_dir = '' + + if url.startswith('//'): + if scheme_match is not None: + return scheme_match.group(1) + url + else: + return 'https:' + url + + elif url.startswith('./') or url.startswith('../'): + if root_dir: + return os.path.normpath(os.path.join(root_dir, url)) + else: + return urljoin(root_url, url) + + return url + + def update_url(url, debug): """ Takes an old, out-dated URL and updates it. Mostly used with GitHub URLs diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index fd03e53..7b8e649 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -10,7 +10,7 @@ from ..clients.github_client import GitHubClient from ..clients.gitlab_client import GitLabClient from ..console_write import console_write -from ..download_manager import http_get, resolve_urls, update_url +from ..download_manager import http_get, resolve_url, resolve_urls, update_url from ..downloaders.downloader_exception import DownloaderException from ..package_version import version_sort from .base_repository_provider import BaseRepositoryProvider @@ -310,7 +310,7 @@ def assert_release_keys(download_info): # Validate url value = release.get('url') if value: - download_info['url'] = update_url(value, debug) + download_info['url'] = update_url(resolve_url(self.repo_url, value), debug) # Validate supported platforms value = release.get('platforms', ['*']) @@ -347,6 +347,7 @@ def assert_release_keys(download_info): (info['name'], self.repo_url) )) + base_url = resolve_url(self.repo_url, base) downloads = None if tags: @@ -354,12 +355,12 @@ def assert_release_keys(download_info): if tags is not True: extra = tags for client in clients: - downloads = client.download_info_from_tags(base, extra) + downloads = client.download_info_from_tags(base_url, extra) if downloads is not None: break else: for client in clients: - downloads = client.download_info_from_branch(base, branch) + downloads = client.download_info_from_branch(base_url, branch) if downloads is not None: break @@ -506,6 +507,8 @@ def get_packages(self, invalid_sources=None): # Try to grab package-level details from GitHub or BitBucket if details: + details = resolve_url(self.repo_url, details) + if invalid_sources is not None and details in invalid_sources: continue @@ -585,7 +588,7 @@ def get_packages(self, invalid_sources=None): if field in release: value = release[field] if field == 'url': - value = update_url(value, debug) + value = update_url(resolve_url(self.repo_url, value), debug) if field == 'platforms' and not isinstance(release['platforms'], list): value = [value] download_info[field] = value @@ -593,6 +596,17 @@ def get_packages(self, invalid_sources=None): if self.schema_version.major < 4 and 'dependencies' in release: download_info['libraries'] = release['dependencies'] + if self.schema_version.major >= 4: + # Package releases may optionally contain `python_versions` list to tell + # which python version they are compatibilible with. + # The main purpose is to be able to opt-in unmaintained packages to python 3.8 + # if they are known not to cause trouble. + value = release.get('python_versions') + if value: + if not isinstance(value, list): + value = [value] + download_info['python_versions'] = value + if 'platforms' not in download_info: download_info['platforms'] = ['*'] @@ -601,7 +615,7 @@ def get_packages(self, invalid_sources=None): download_info['sublime_text'] = '<3000' if 'details' in release: - download_details = release['details'] + download_details = resolve_url(self.repo_url, release['details']) try: downloads = None @@ -661,6 +675,7 @@ def get_packages(self, invalid_sources=None): (info['name'], self.repo_url) )) + base_url = resolve_url(self.repo_url, base) downloads = None if tags: @@ -668,12 +683,12 @@ def get_packages(self, invalid_sources=None): if tags is not True: extra = tags for client in clients: - downloads = client.download_info_from_tags(base, extra) + downloads = client.download_info_from_tags(base_url, extra) if downloads is not None: break else: for client in clients: - downloads = client.download_info_from_branch(base, branch) + downloads = client.download_info_from_branch(base_url, branch) if downloads is not None: break @@ -752,7 +767,7 @@ def has_broken_release(): info[field] = [] if 'readme' in info: - info['readme'] = update_url(info['readme'], debug) + info['readme'] = update_url(resolve_url(self.repo_url, info['readme']), debug) for field in ['description', 'readme', 'issues', 'donate', 'buy']: if field not in info: From ec99b1648782d5ea6e00d33529c10539bc0d2291 Mon Sep 17 00:00:00 2001 From: deathaxe Date: Tue, 21 Mar 2023 21:23:08 +0100 Subject: [PATCH 16/39] Fix package model --- app/models/package/find.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/models/package/find.py b/app/models/package/find.py index 1c80b42..fc5855a 100644 --- a/app/models/package/find.py +++ b/app/models/package/find.py @@ -216,7 +216,7 @@ def all(limit_one_per_package=False, only_package_control=False): package_minor_versions[minor_key] += 1 # return repos with at least one release - return {repo: info for repo, info in output if info['releases']} + return {repo: info for repo, info in output.items() if info['releases']} def old(): From 28250f7e36784a0bb91f64c6bc56662afa481802 Mon Sep 17 00:00:00 2001 From: deathaxe Date: Sat, 1 Apr 2023 18:20:22 +0200 Subject: [PATCH 17/39] Sync with Package Control source: https://github.com/wbond/package_control/commit/4edd2a749dac62480dd8999c98640020cfeb8b40 --- .../clients/bitbucket_client.py | 5 +- .../package_control/clients/github_client.py | 8 +- .../providers/base_repository_provider.py | 6 +- .../bitbucket_repository_provider.py | 8 +- .../providers/channel_provider.py | 68 +++++------ .../providers/github_repository_provider.py | 8 +- .../providers/github_user_provider.py | 11 +- .../providers/gitlab_repository_provider.py | 8 +- .../providers/gitlab_user_provider.py | 11 +- .../providers/repository_provider.py | 113 ++++++++++-------- 10 files changed, 130 insertions(+), 116 deletions(-) diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index da2fbbd..64c9df1 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -253,13 +253,16 @@ def repo_info(self, url): if author is None: author = repo_info['owner'].get('username') + is_client = self.settings.get('min_api_calls', False) + readme_url = None if is_client else self._readme_url(user_repo, branch) + return { 'name': repo_info['name'], 'description': repo_info['description'] or 'No description provided', 'homepage': repo_info['website'] or url, 'author': author, 'donate': None, - 'readme': self._readme_url(user_repo, branch), + 'readme': readme_url, 'issues': issues_url if repo_info['has_issues'] else None, 'default_branch': branch } diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index 6b5c0c4..dabe06a 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -178,6 +178,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=100): return user_repo = tags_match.group(1) + is_client = self.settings.get('min_api_calls', False) max_releases = self.settings.get('max_releases', 0) num_releases = 0 @@ -185,8 +186,11 @@ def _get_releases(user_repo, tag_prefix=None, page_size=100): for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): version, tag, tag_url = release - tag_info = self.fetch_json(tag_url) - timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') + if is_client: + timestamp = 0 + else: + tag_info = self.fetch_json(tag_url) + timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) diff --git a/app/lib/package_control/providers/base_repository_provider.py b/app/lib/package_control/providers/base_repository_provider.py index adf5b00..6517534 100644 --- a/app/lib/package_control/providers/base_repository_provider.py +++ b/app/lib/package_control/providers/base_repository_provider.py @@ -28,8 +28,9 @@ class BaseRepositoryProvider: __slots__ = [ 'broken_libriaries' 'broken_packages', - 'cache', 'failed_sources', + 'libraries', + 'packages', 'repo_url', 'settings', ] @@ -38,7 +39,8 @@ def __init__(self, repo_url, settings): self.broken_libriaries = {} self.broken_packages = {} self.failed_sources = {} - self.cache = {} + self.libraries = None + self.packages = None self.repo_url = repo_url self.settings = settings diff --git a/app/lib/package_control/providers/bitbucket_repository_provider.py b/app/lib/package_control/providers/bitbucket_repository_provider.py index 24b7b46..be1e496 100644 --- a/app/lib/package_control/providers/bitbucket_repository_provider.py +++ b/app/lib/package_control/providers/bitbucket_repository_provider.py @@ -91,8 +91,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -130,9 +130,9 @@ def get_packages(self, invalid_sources=None): 'donate': repo_info['donate'], 'buy': None } - self.cache['get_packages'] = {name: details} + self.packages = {name: details} yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_packages'] = {} + self.packages = {} diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py index e1e844e..fb26030 100644 --- a/app/lib/package_control/providers/channel_provider.py +++ b/app/lib/package_control/providers/channel_provider.py @@ -7,7 +7,7 @@ from ..download_manager import http_get, resolve_urls, update_url from ..package_version import version_sort from .provider_exception import ProviderException -from .schema_compat import SchemaVersion +from .schema_version import SchemaVersion class InvalidChannelFileException(ProviderException): @@ -50,16 +50,20 @@ class ChannelProvider: """ __slots__ = [ - 'channel_info', 'channel_url', 'schema_version', + 'repositories', + 'libraries_cache', + 'packages_cache', 'settings', ] def __init__(self, channel_url, settings): - self.channel_info = None self.channel_url = channel_url - self.schema_version = None + self.schema_version = SchemaVersion('4.0.0') + self.repositories = None + self.libraries_cache = {} + self.packages_cache = {} self.settings = settings @classmethod @@ -91,7 +95,7 @@ def fetch(self): DownloaderException: when an error occurs trying to open a URL """ - if self.channel_info is not None: + if self.repositories is not None: return if re.match(r'https?://', self.channel_url, re.I): @@ -129,8 +133,9 @@ def fetch(self): if 'repositories' not in channel_info: raise InvalidChannelFileException(self, 'the "repositories" JSON key is missing.') - self.channel_info = self._migrate_channel_info(channel_info, schema_version) - self.schema_version = schema_version + self.repositories = self._migrate_repositories(channel_info, schema_version) + self.packages_cache = self._migrate_packages_cache(channel_info, schema_version) + self.libraries_cache = self._migrate_libraries_cache(channel_info, schema_version) def get_renamed_packages(self): """ @@ -145,7 +150,7 @@ def get_renamed_packages(self): self.fetch() output = {} - for package in chain(*self.channel_info.get('packages_cache', {}).values()): + for package in chain(*self.packages_cache.values()): previous_names = package.get('previous_names', []) if not isinstance(previous_names, list): previous_names = [previous_names] @@ -166,7 +171,7 @@ def get_repositories(self): self.fetch() - return self.channel_info['repositories'] + return self.repositories def get_sources(self): """ @@ -210,6 +215,7 @@ def get_packages(self, repo_url): { 'sublime_text': compatible version, 'platforms': [platform name, ...], + 'python_versions': ['3.3', '3.8'], 'url': url, 'date': date, 'version': version, @@ -223,11 +229,10 @@ def get_packages(self, repo_url): self.fetch() - packages_cache = self.channel_info.get('packages_cache', {}) - if repo_url not in packages_cache: + if repo_url not in self.packages_cache: raise UncachedChannelRepositoryError(repo_url) - for package in packages_cache[repo_url]: + for package in self.packages_cache[repo_url]: if package['releases']: yield (package['name'], package) @@ -268,11 +273,10 @@ def get_libraries(self, repo_url): self.fetch() - libraries_cache = self.channel_info.get('libraries_cache', {}) - if repo_url not in libraries_cache: + if repo_url not in self.libraries_cache: raise UncachedChannelRepositoryError(repo_url) - for library in libraries_cache[repo_url]: + for library in self.libraries_cache[repo_url]: if library['releases']: yield (library['name'], library) @@ -290,7 +294,7 @@ def get_broken_packages(self): self.fetch() - for package in chain(*self.channel_info.get('packages_cache', {}).values()): + for package in chain(*self.packages_cache.values()): if not package['releases']: yield package['name'] @@ -308,29 +312,10 @@ def get_broken_libraries(self): self.fetch() - for library in chain(*self.channel_info.get('libraries_cache', {}).values()): + for library in chain(*self.libraries_cache.values()): if not library['releases']: yield library['name'] - def _migrate_channel_info(self, channel_info, schema_version): - """ - Transform input channel_info to scheme version 4.0.0 - - :param channel_info: - The input channel information of any scheme version - - :param schema_version: - The schema version of the input channel information - - :returns: - channel_info object of scheme version 4.0.0 - """ - - channel_info['repositories'] = self._migrate_repositories(channel_info, schema_version) - channel_info['packages_cache'] = self._migrate_packages_cache(channel_info, schema_version) - channel_info['libraries_cache'] = self._migrate_libraries_cache(channel_info, schema_version) - return channel_info - def _migrate_repositories(self, channel_info, schema_version): debug = self.settings.get('debug') @@ -373,6 +358,11 @@ def _migrate_packages_cache(self, channel_info, schema_version): if field not in package: package[field] = defaults[field] + # Workaround for packagecontrol.io, which adds `authors` instead of `author` + # to cached packages and libraries. + if 'authors' in package: + package['author'] = package.pop('authors') + releases = version_sort(package.get('releases', []), 'platforms', reverse=True) package['releases'] = releases package['last_modified'] = releases[0]['date'] if releases else None @@ -413,11 +403,13 @@ def _migrate_libraries_cache(self, channel_info, schema_version): del library['load_order'] for release in library['releases']: release['python_versions'] = ['3.3'] + library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) + else: libraries_cache = channel_info.get('libraries_cache', {}) - for library in chain(*libraries_cache.values()): - library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) + for library in chain(*libraries_cache.values()): + library['releases'] = version_sort(library['releases'], 'platforms', reverse=True) # Fix any out-dated repository URLs in libraries cache return {update_url(name, debug): info for name, info in libraries_cache.items()} diff --git a/app/lib/package_control/providers/github_repository_provider.py b/app/lib/package_control/providers/github_repository_provider.py index ae51fb2..c69b6f6 100644 --- a/app/lib/package_control/providers/github_repository_provider.py +++ b/app/lib/package_control/providers/github_repository_provider.py @@ -99,8 +99,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -138,9 +138,9 @@ def get_packages(self, invalid_sources=None): 'donate': repo_info['donate'], 'buy': None } - self.cache['get_packages'] = {name: details} + self.packages = {name: details} yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_packages'] = {} + self.packages = {} diff --git a/app/lib/package_control/providers/github_user_provider.py b/app/lib/package_control/providers/github_user_provider.py index f04689f..f35e976 100644 --- a/app/lib/package_control/providers/github_user_provider.py +++ b/app/lib/package_control/providers/github_user_provider.py @@ -87,8 +87,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -103,7 +103,7 @@ def get_packages(self, invalid_sources=None): raise GitProviderUserInfoException(self) except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_packages'] = {} + self.packages = {} return output = {} @@ -112,6 +112,9 @@ def get_packages(self, invalid_sources=None): name = repo_info['name'] repo_url = client.repo_url(author, name) + if invalid_sources is not None and repo_url in invalid_sources: + continue + try: downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) if not downloads: @@ -142,4 +145,4 @@ def get_packages(self, invalid_sources=None): except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[repo_url] = e - self.cache['get_packages'] = output + self.packages = output diff --git a/app/lib/package_control/providers/gitlab_repository_provider.py b/app/lib/package_control/providers/gitlab_repository_provider.py index 7521268..5872795 100644 --- a/app/lib/package_control/providers/gitlab_repository_provider.py +++ b/app/lib/package_control/providers/gitlab_repository_provider.py @@ -99,8 +99,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -138,9 +138,9 @@ def get_packages(self, invalid_sources=None): 'donate': repo_info['donate'], 'buy': None } - self.cache['get_packages'] = {name: details} + self.packages = {name: details} yield (name, details) except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_packages'] = {} + self.packages = {} diff --git a/app/lib/package_control/providers/gitlab_user_provider.py b/app/lib/package_control/providers/gitlab_user_provider.py index 26a659d..61f63f5 100644 --- a/app/lib/package_control/providers/gitlab_user_provider.py +++ b/app/lib/package_control/providers/gitlab_user_provider.py @@ -87,8 +87,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -103,7 +103,7 @@ def get_packages(self, invalid_sources=None): raise GitProviderUserInfoException(self) except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_packages'] = {} + self.packages = {} return output = {} @@ -112,6 +112,9 @@ def get_packages(self, invalid_sources=None): name = repo_info['name'] repo_url = client.repo_url(author, name) + if invalid_sources is not None and repo_url in invalid_sources: + continue + try: downloads = client.download_info_from_branch(repo_url, repo_info['default_branch']) if not downloads: @@ -142,4 +145,4 @@ def get_packages(self, invalid_sources=None): except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[repo_url] = e - self.cache['get_packages'] = output + self.packages = output diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index 7b8e649..c6fabf0 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -15,7 +15,7 @@ from ..package_version import version_sort from .base_repository_provider import BaseRepositoryProvider from .provider_exception import ProviderException -from .schema_compat import SchemaVersion +from .schema_version import SchemaVersion class InvalidRepoFileException(ProviderException): @@ -57,6 +57,7 @@ class RepositoryProvider(BaseRepositoryProvider): def __init__(self, repo_url, settings): super().__init__(repo_url, settings) + self.included_urls = set() self.repo_info = None self.schema_version = None @@ -70,55 +71,24 @@ def fetch(self): DownloaderException: when an error occurs trying to open a URL """ - if self.repo_url in self.failed_sources: - return False - if self.repo_info is not None: return True + if self.repo_url in self.failed_sources: + return False + try: - self.fetch_repo() - except (DownloaderException, ProviderException) as e: + self.repo_info = self.fetch_repo(self.repo_url) + self.schema_version = self.repo_info['schema_version'] + except (DownloaderException, ClientException, ProviderException) as e: self.failed_sources[self.repo_url] = e - self.cache['get_libraries'] = {} - self.cache['get_packages'] = {} + self.libraries = {} + self.packages = {} return False return True - def fetch_repo(self): - self.cache = {} - self.repo_info = self.fetch_json(self.repo_url) - self.schema_version = self.repo_info['schema_version'] - - # The 4.0.0 repository schema renamed dependencies key to libraries. - if self.schema_version.major < 4: - self.repo_info['libraries'] = self.repo_info.pop('dependencies', []) - - for key in ('packages', 'libraries'): - if key not in self.repo_info: - self.repo_info[key] = [] - - if 'includes' not in self.repo_info: - return - - # Allow repositories to include other repositories - for include in resolve_urls(self.repo_url, self.repo_info.pop('includes', [])): - include_info = self.fetch_json(include) - include_version = include_info['schema_version'] - if include_version != self.schema_version: - raise ProviderException( - 'Scheme version of included repository %s doesn\'t match its parent.' % include) - - included_packages = include_info.get('packages', []) - self.repo_info['packages'].extend(included_packages) - - # The 4.0.0 repository schema renamed dependencies key to libraries. - libraries_key = 'libraries' if include_version.major >= 4 else 'dependencies' - included_libraries = include_info.get(libraries_key, []) - self.repo_info['libraries'].extend(included_libraries) - - def fetch_json(self, location): + def fetch_repo(self, location): """ Fetches the contents of a URL of file path @@ -133,6 +103,12 @@ def fetch_json(self, location): A dict of the parsed JSON """ + # Prevent circular includes + if location in self.included_urls: + raise ProviderException('Error, repository "%s" already included.' % location) + + self.included_urls.add(location) + if re.match(r'https?://', location, re.I): json_string = http_get(location, self.settings, 'Error downloading repository.') @@ -159,18 +135,49 @@ def fetch_json(self, location): raise InvalidRepoFileException(self, 'parsing JSON failed.') try: - repo_info['schema_version'] = SchemaVersion(repo_info['schema_version']) + schema_version = repo_info['schema_version'] = SchemaVersion(repo_info['schema_version']) except KeyError: raise InvalidRepoFileException( self, 'the "schema_version" JSON key is missing.') except ValueError as e: raise InvalidRepoFileException(self, e) - if isinstance(repo_info['packages'], dict): - raise InvalidRepoFileException( - self, - 'the "packages" key is an object, not an array. ' - 'This indicates it is a channel not a repository.') + # Main keys depending on scheme version + if schema_version.major < 4: + repo_keys = {'packages', 'dependencies', 'includes'} + else: + repo_keys = {'packages', 'libraries', 'includes'} + + # Check existence of at least one required main key + if not set(repo_info.keys()) & repo_keys: + raise InvalidRepoFileException(self, 'it doesn\'t look like a repository.') + + # Check type of existing main keys + for key in repo_keys: + if key in repo_info and not isinstance(repo_info[key], list): + raise InvalidRepoFileException(self, 'the "%s" key is not an array.' % key) + + # Migrate dependencies to libraries + # The 4.0.0 repository schema renamed dependencies key to libraries. + if schema_version.major < 4: + repo_info['libraries'] = repo_info.pop('dependencies', []) + + # Allow repositories to include other repositories, recursively + includes = repo_info.pop('includes', None) + if includes: + for include in resolve_urls(self.repo_url, includes): + try: + include_info = self.fetch_repo(include) + except (DownloaderException, ClientException, ProviderException) as e: + self.failed_sources[include] = e + else: + include_version = include_info['schema_version'] + if include_version != schema_version: + raise ProviderException( + 'Scheme version of included repository %s doesn\'t match its parent.' % include) + + repo_info['packages'].extend(include_info.get('packages', [])) + repo_info['libraries'].extend(include_info.get('libraries', [])) return repo_info @@ -206,8 +213,8 @@ def get_libraries(self, invalid_sources=None): tuples """ - if 'get_libraries' in self.cache: - for key, value in self.cache['get_libraries'].items(): + if self.libraries is not None: + for key, value in self.libraries.items(): yield (key, value) return @@ -421,7 +428,7 @@ def assert_release_keys(download_info): except (DownloaderException, ClientException, ProviderException) as e: self.broken_libriaries[info['name']] = e - self.cache['get_libraries'] = output + self.libraries = output def get_packages(self, invalid_sources=None): """ @@ -462,8 +469,8 @@ def get_packages(self, invalid_sources=None): tuples """ - if 'get_packages' in self.cache: - for key, value in self.cache['get_packages'].items(): + if self.packages is not None: + for key, value in self.packages.items(): yield (key, value) return @@ -787,7 +794,7 @@ def has_broken_release(): output[info['name']] = info yield (info['name'], info) - self.cache['get_packages'] = output + self.packages = output def get_sources(self): """ From fa27a8eef14aa225f969e61978ecdc6f36840dc8 Mon Sep 17 00:00:00 2001 From: deathaxe Date: Fri, 7 Apr 2023 09:58:01 +0200 Subject: [PATCH 18/39] Add upstream package python_versions declaration support This commit enables packagecontrol.io to optionally store packages' python_versions, which are declared within a repository.json This way old packages, which are proofed compatible with python 3.8, can be pushed to new plugin_host without touching their github repo. A local .python-version file always overrides upstream settings. --- app/models/package/find.py | 5 +++++ app/models/package/modify.py | 5 ++++- setup/sql/migrations/libraries.sql | 1 + 3 files changed, 10 insertions(+), 1 deletion(-) diff --git a/app/models/package/find.py b/app/models/package/find.py index fc5855a..bc21fb4 100644 --- a/app/models/package/find.py +++ b/app/models/package/find.py @@ -41,6 +41,7 @@ def all(limit_one_per_package=False, only_package_control=False): 'releases': [ { 'platforms': ['*'], + 'python_versions': ['3.3', '3.8'], 'sublime_text': '*', 'version': '1.0.0', 'url': 'http://example.com/package.zip', @@ -107,6 +108,7 @@ def all(limit_one_per_package=False, only_package_control=False): SELECT r.package, r.platforms, + r.python_versions, r.sublime_text, r.version, r.url, @@ -199,6 +201,9 @@ def all(limit_one_per_package=False, only_package_control=False): 'date': row['date'] } + if row['python_versions']: + release['python_versions'] = row['python_versions'] + if row['libraries']: release['libraries'] = row['libraries'] diff --git a/app/models/package/modify.py b/app/models/package/modify.py index 73508ab..6315004 100644 --- a/app/models/package/modify.py +++ b/app/models/package/modify.py @@ -533,7 +533,7 @@ def store(values): else: st_versions.extend([2, 3, 4]) - st_versions = sorted(list(set(st_versions))) + st_versions = sorted(set(st_versions)) if not isinstance(values['author'], list): authors = re.split(r'\s*,\s*', values['author']) @@ -564,6 +564,7 @@ def store(values): INSERT INTO releases ( package, platforms, + python_versions, sublime_text, version, url, @@ -576,6 +577,7 @@ def store(values): %s, %s, %s, + %s, %s ) """ @@ -583,6 +585,7 @@ def store(values): cursor.execute(sql, [ name, release['platforms'], + release['python_versions'], _normalize_st_version(release['sublime_text']), release['version'], release['url'], diff --git a/setup/sql/migrations/libraries.sql b/setup/sql/migrations/libraries.sql index 3e89052..c9c49a8 100644 --- a/setup/sql/migrations/libraries.sql +++ b/setup/sql/migrations/libraries.sql @@ -1,3 +1,4 @@ +ALTER TABLE releases ADD COLUMN python_versions varchar[]; ALTER TABLE releases RENAME dependencies TO libraries; ALTER TABLE dependencies RENAME TO libraries; From 593c151d43802f92d0bcdfc3070e74fb06789827 Mon Sep 17 00:00:00 2001 From: deathaxe Date: Wed, 10 May 2023 14:19:07 +0200 Subject: [PATCH 19/39] Add missing modules --- app/lib/package_control/package_version.py | 146 ++++++++++++++++++ .../providers/schema_version.py | 37 +++++ 2 files changed, 183 insertions(+) create mode 100644 app/lib/package_control/package_version.py create mode 100644 app/lib/package_control/providers/schema_version.py diff --git a/app/lib/package_control/package_version.py b/app/lib/package_control/package_version.py new file mode 100644 index 0000000..1a96f5f --- /dev/null +++ b/app/lib/package_control/package_version.py @@ -0,0 +1,146 @@ +import re + +from .deps.semver import SemVer +from .console_write import console_write + + +class PackageVersion(SemVer): + + _date_pattern = re.compile(r'^(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$') + _pre_semver_pattern = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+)(?:[T\.](\d+(\.\d+)*))?)?)?$') + + @classmethod + def _parse(cls, ver): + """ + Converts a string version number into SemVer. If the version is based on + a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. + + :param ver: + A string, dict with 'version' key, or a SemVer object + + :raises: + TypeError, if ver is not one of: str, dict with version, SemVer + ValueError, if ver is no valid version string + + :return: + A list of 5 items representing a valid semantic version number + """ + + # Allowing passing in a dict containing info about a package + if isinstance(ver, dict): + if 'version' not in ver: + raise TypeError("%s is not a package or library release" % ver) + ver = ver['version'] + + if isinstance(ver, SemVer): + return ver + + if not isinstance(ver, str): + raise TypeError("%r is not a string" % ver) + + # Trim v off of the front + if ver.startswith('v'): + ver = ver[1:] + + # Match semver compatible strings + match = cls._match_regex.match(ver) + if match: + g = list(match.groups()) + for i in range(3): + g[i] = int(g[i]) + + return g + + # We prepend 0 to all date-based version numbers so that developers + # may switch to explicit versioning from GitHub/GitLab/BitBucket + # versioning based on commit dates. + # + # The resulting semver is alwass 0.0.1 with timestamp being used + # as build number, so any explicitly choosen version (via tags) will + # be greater, once a package moves from branch to tag based releases. + # + # The result looks like: + # 0.0.1+2020.07.15.10.50.38 + match = cls._date_pattern.match(ver) + if match: + return [0, 0, 1, None, '.'.join(match.groups())] + + # This handles versions that were valid pre-semver with 1 to 4+ dotted + # groups, such as 1, 1.6, or 1.6.9.0 + match = cls._pre_semver_pattern.match(ver) + if match: + return [ + int(match.group(1) or 0), + int(match.group(2) or 0), + int(match.group(3) or 0), + None, + match.group(4) + ] + + raise ValueError("'%s' is not a valid SemVer string" % ver) + + +def version_match_prefix(version, filter_prefix): + """ + Create a SemVer for a given version, if it matches filter_prefix. + + :param version: + The version string to match + + :param filter_prefix: + The prefix to match versions against + + :returns: + SemVer, if version is valid and matches given filter_prefix + None, if version is invalid or doesn't match filter_prefix + """ + + try: + if filter_prefix: + if version.startswith(filter_prefix): + return PackageVersion(version[len(filter_prefix):]) + else: + return PackageVersion(version) + except ValueError: + pass + return None + + +def version_sort(sortable, *fields, **kwargs): + """ + Sorts a list that is a list of versions, or dicts with a 'version' key. + Can also secondly sort by another field. + + :param sortable: + The list to sort + + :param *fields: + If sortable is a list of dicts, perform secondary sort via these fields, + in order + + :param **kwargs: + Keyword args to pass on to sorted() + + :return: + A copy of sortable that is sorted according to SemVer rules + """ + + def _version_sort_key(item): + result = PackageVersion(item) + if fields: + values = [result] + for field in fields: + values.append(item[field]) + result = tuple(values) + return result + + try: + return sorted(sortable, key=_version_sort_key, **kwargs) + except (ValueError) as e: + console_write( + ''' + Error sorting versions - %s + ''', + e + ) + return [] diff --git a/app/lib/package_control/providers/schema_version.py b/app/lib/package_control/providers/schema_version.py new file mode 100644 index 0000000..d25ebb1 --- /dev/null +++ b/app/lib/package_control/providers/schema_version.py @@ -0,0 +1,37 @@ +from ..deps.semver import SemVer + + +class SchemaVersion(SemVer): + supported_versions = ('2.0', '3.0.0', '4.0.0') + + @classmethod + def _parse(cls, ver): + """ + Custom version string parsing to maintain backward compatibility. + + SemVer needs all of major, minor and patch parts being present in `ver`. + + :param ver: + An integer, float or string containing a version string. + + :returns: + List of (major, minor, patch) + """ + try: + if isinstance(ver, int): + ver = float(ver) + if isinstance(ver, float): + ver = str(ver) + except ValueError: + raise ValueError('the "schema_version" is not a valid number.') + + if ver not in cls.supported_versions: + raise ValueError( + 'the "schema_version" is not recognized. Must be one of: %s or %s.' + % (', '.join(cls.supported_versions[:-1]), cls.supported_versions[-1]) + ) + + if ver.count('.') == 1: + ver += '.0' + + return SemVer._parse(ver) From f9168ba85c6a914030a56c65b3b1b66aefef29ee Mon Sep 17 00:00:00 2001 From: deathaxe Date: Fri, 7 Jul 2023 18:40:04 +0200 Subject: [PATCH 20/39] Tweak update_package_control_lib task --- app/tasks/update_package_control_lib.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/tasks/update_package_control_lib.py b/app/tasks/update_package_control_lib.py index d14d452..421e006 100644 --- a/app/tasks/update_package_control_lib.py +++ b/app/tasks/update_package_control_lib.py @@ -1,5 +1,4 @@ import os -import re import shutil import sys @@ -29,8 +28,9 @@ 'cmd.py', 'console_write.py', 'download_manager.py', + 'http_cache.py', + 'package_versions.py', 'text.py', - 'versions.py', ] PRESERVE_PATHS = [ From b28792319be7c858654cc5909ccdd5e73a52d8aa Mon Sep 17 00:00:00 2001 From: deathaxe Date: Sun, 23 Jul 2023 14:46:53 +0200 Subject: [PATCH 21/39] Sync with Package Control --- app/lib/package_control/__init__.py | 4 ++-- app/lib/package_control/ca_certs.py | 2 +- app/lib/package_control/clients/bitbucket_client.py | 2 +- app/lib/package_control/clients/github_client.py | 8 ++++---- app/lib/package_control/clients/gitlab_client.py | 6 +++--- app/lib/package_control/deps/semver.py | 6 +++--- app/lib/package_control/providers/provider_exception.py | 6 +++--- app/lib/package_control/providers/repository_provider.py | 5 +++-- 8 files changed, 20 insertions(+), 19 deletions(-) diff --git a/app/lib/package_control/__init__.py b/app/lib/package_control/__init__.py index 53b3b77..5a6c20d 100644 --- a/app/lib/package_control/__init__.py +++ b/app/lib/package_control/__init__.py @@ -1,2 +1,2 @@ -__version__ = "4.0.0-dev" -__version_info__ = (4, 0, 0, 'dev') +__version__ = "4.0.0-beta4" +__version_info__ = (4, 0, 0, 'beta', 4) diff --git a/app/lib/package_control/ca_certs.py b/app/lib/package_control/ca_certs.py index 981ad0b..fe1f9ae 100644 --- a/app/lib/package_control/ca_certs.py +++ b/app/lib/package_control/ca_certs.py @@ -153,7 +153,7 @@ def print_cert_subject(cert, reason): def get_system_ca_bundle_path(settings, ca_bundle_dir): """ Get the filesystem path to the system CA bundle. On Linux it looks in a - number of predefined places, however on OS X it has to be programatically + number of predefined places, however on OS X it has to be programmatically exported from the SystemRootCertificates.keychain. Windows does not ship with a CA bundle, but also we use WinINet on Windows, so we don't need to worry about CA certs. diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index 64c9df1..901fe55 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -64,7 +64,7 @@ def repo_url(user_name, repo_name): The repository name :return: - The repositoy URL of given owner and repo name + The repository URL of given owner and repo name """ return 'https://bitbucket.com/%s/%s' % (quote(user_name), quote(repo_name)) diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index dabe06a..a6f0f30 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -49,7 +49,7 @@ def repo_url(user_name, repo_name): The repository name :return: - The repositoy URL of given owner and repo name + The repository URL of given owner and repo name """ return 'https://github.com/%s/%s' % (quote(user_name), quote(repo_name)) @@ -161,9 +161,9 @@ def download_info_from_tags(self, url, tag_prefix=None): if not tags_match: return None - def _get_releases(user_repo, tag_prefix=None, page_size=100): + def _get_releases(user_repo, tag_prefix=None, page_size=1000): used_versions = set() - for page in range(100): + for page in range(10): query_string = urlencode({'page': page * page_size, 'per_page': page_size}) tags_url = self._api_url(user_repo, '/tags?%s' % query_string) tags_json = self.fetch_json(tags_url) @@ -187,7 +187,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=100): version, tag, tag_url = release if is_client: - timestamp = 0 + timestamp = '1970-01-01 00:00:00' else: tag_info = self.fetch_json(tag_url) timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index 2ead806..c83344d 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -52,7 +52,7 @@ def repo_url(user_name, repo_name): The repository name :return: - The repositoy URL of given owner and repo name + The repository URL of given owner and repo name """ return 'https://gitlab.com/%s/%s' % (quote(user_name), quote(repo_name)) @@ -164,9 +164,9 @@ def download_info_from_tags(self, url, tag_prefix=None): if not tags_match: return None - def _get_releases(user_repo, tag_prefix=None, page_size=100): + def _get_releases(user_repo, tag_prefix=None, page_size=1000): used_versions = set() - for page in range(100): + for page in range(10): query_string = urlencode({'page': page * page_size, 'per_page': page_size}) tags_url = self._api_url(user_repo, '/repository/tags?%s' % query_string) tags_json = self.fetch_json(tags_url) diff --git a/app/lib/package_control/deps/semver.py b/app/lib/package_control/deps/semver.py index 5d59d49..1ac82e2 100644 --- a/app/lib/package_control/deps/semver.py +++ b/app/lib/package_control/deps/semver.py @@ -123,14 +123,14 @@ class SemVer(namedtuple("_SemVer", 'major, minor, patch, prerelease, build')): """ # Static class variables - _base_regex = r'''(?x) + _base_regex = r''' (?P[0-9]+) \.(?P[0-9]+) \.(?P[0-9]+) (?:\-(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))? (?:\+(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?''' - _search_regex = re.compile(_base_regex) - _match_regex = re.compile('^%s$' % _base_regex) # required because of $ anchor + _search_regex = re.compile(r'(?x)%s' % _base_regex) + _match_regex = re.compile(r'(?x)^%s$' % _base_regex) # required because of $ anchor # "Constructor" def __new__(cls, *args, **kwargs): diff --git a/app/lib/package_control/providers/provider_exception.py b/app/lib/package_control/providers/provider_exception.py index 17db1ab..e327f59 100644 --- a/app/lib/package_control/providers/provider_exception.py +++ b/app/lib/package_control/providers/provider_exception.py @@ -8,7 +8,7 @@ def __bytes__(self): class GitProviderUserInfoException(ProviderException): """ - Exception for signaling user information download error. + Exception for signalling user information download error. The exception is used to indicate a given URL not being in expected form to be used by given provider to download user info from. @@ -24,7 +24,7 @@ def __str__(self): class GitProviderRepoInfoException(ProviderException): """ - Exception for signaling repository information download error. + Exception for signalling repository information download error. The exception is used to indicate a given URL not being in expected form to be used by given provider to download repo info from. @@ -40,7 +40,7 @@ def __str__(self): class GitProviderDownloadInfoException(ProviderException): """ - Exception for signaling download information download error. + Exception for signalling download information download error. The exception is used to indicate a given URL not being in expected form to be used by given provider to download release information from. diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py index c6fabf0..565a453 100644 --- a/app/lib/package_control/providers/repository_provider.py +++ b/app/lib/package_control/providers/repository_provider.py @@ -787,8 +787,9 @@ def has_broken_release(): # Extract a date from the newest release date = '1970-01-01 00:00:00' for release in info['releases']: - if 'date' in release and release['date'] > date: - date = release['date'] + release_date = release.get('date') + if release_date and isinstance(release_date, str) and release_date > date: + date = release_date info['last_modified'] = date output[info['name']] = info From 4f4e86171580610bc5845abca8fe7121b29f1c1e Mon Sep 17 00:00:00 2001 From: deathaxe Date: Wed, 13 Sep 2023 17:40:47 +0200 Subject: [PATCH 22/39] Sync with Package Control --- .../clients/bitbucket_client.py | 2 +- .../package_control/clients/github_client.py | 2 +- .../package_control/clients/gitlab_client.py | 2 +- app/lib/package_control/deps/semver.py | 851 ------------------ .../downloaders/background_downloader.py | 63 -- .../downloaders/oscrypto_downloader.py | 11 +- .../http/validating_https_connection.py | 8 + app/lib/package_control/package_version.py | 106 +-- app/lib/package_control/pep440.py | 645 +++++++++++++ .../providers/schema_compat.py | 37 - .../providers/schema_version.py | 16 +- .../check/file/check_messages.py | 36 +- 12 files changed, 749 insertions(+), 1030 deletions(-) delete mode 100644 app/lib/package_control/deps/semver.py delete mode 100644 app/lib/package_control/downloaders/background_downloader.py create mode 100644 app/lib/package_control/pep440.py delete mode 100644 app/lib/package_control/providers/schema_compat.py diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index 901fe55..fdc5732 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -205,7 +205,7 @@ def _get_releases(user_repo, tag_prefix, page_size=100): output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) - num_releases += not version.prerelease + num_releases += version.is_final if max_releases > 0 and num_releases >= max_releases: break diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index a6f0f30..2bb46e2 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -194,7 +194,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000): output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) - num_releases += not version.prerelease + num_releases += version.is_final if max_releases > 0 and num_releases >= max_releases: break diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index c83344d..80f0272 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -196,7 +196,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000): output.append(self._make_download_info(user_name, repo_name, tag, str(version), timestamp)) - num_releases += not version.prerelease + num_releases += version.is_final if max_releases > 0 and num_releases >= max_releases: break diff --git a/app/lib/package_control/deps/semver.py b/app/lib/package_control/deps/semver.py deleted file mode 100644 index 1ac82e2..0000000 --- a/app/lib/package_control/deps/semver.py +++ /dev/null @@ -1,851 +0,0 @@ -"""pysemver: Semantic Version comparing for Python. - -Provides comparing of semantic versions by using SemVer objects using rich comperations plus the -possibility to match a selector string against versions. Interesting for version dependencies. -Versions look like: "1.7.12+b.133" -Selectors look like: ">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113" - -Example usages: - >>> SemVer(1, 2, 3, build=13) - SemVer("1.2.3+13") - >>> SemVer.valid("1.2.3.4") - False - >>> SemVer.clean("this is unimportant text 1.2.3-2 and will be stripped") - "1.2.3-2" - >>> SemVer("1.7.12+b.133").satisfies(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113") - True - >>> SemSel(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113").matches(SemVer("1.7.12+b.133"), - ... SemVer("1.6.9+b.112"), SemVer("1.6.10")) - [SemVer("1.7.12+b.133"), SemVer("1.6.9+b.112")] - >>> min(_) - SemVer("1.6.9+b.112") - >>> _.patch - 9 - -Exported classes: - * SemVer(collections.namedtuple()) - Parses semantic versions and defines methods for them. Supports rich comparisons. - * SemSel(tuple) - Parses semantic version selector strings and defines methods for them. - * SelParseError(Exception) - An error among others raised when parsing a semantic version selector failed. - -Other classes: - * SemComparator(object) - * SemSelAndChunk(list) - * SemSelOrChunk(list) - -Functions/Variables/Constants: - none - - -Copyright (c) 2013 Zachary King, FichteFoll - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and -associated documentation files (the "Software"), to deal in the Software without restriction, -including without limitation the rights to use, copy, modify, merge, publish, distribute, -sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: The above copyright notice and this -permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT -NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES -OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -""" - -import re -import sys -from collections import namedtuple # Python >=2.6 - - -__all__ = ('SemVer', 'SemSel', 'SelParseError') - - -if sys.version_info[0] == 3: - basestring = str - - def cmp(a, b): - return (a > b) - (a < b) - - -# @functools.total_ordering would be nice here but was added in 2.7, __cmp__ is not Py3 -class SemVer(namedtuple("_SemVer", 'major, minor, patch, prerelease, build')): - - """Semantic Version, consists of 3 to 5 components defining the version's adicity. - - See http://semver.org/ (2.0.0-rc.1) for the standard mainly used for this implementation, few - changes have been made. - - Information on this particular class and their instances: - - Immutable and hashable. - - Subclasses `collections.namedtuple`. - - Always `True` in boolean context. - - len() returns an int between 3 and 5; 4 when a pre-release is set and 5 when a build is - set. Note: Still returns 5 when build is set but not pre-release. - - Parts of the semantic version can be accessed by integer indexing, key (string) indexing, - slicing and getting an attribute. Returned slices are tuple. Leading '-' and '+' of - optional components are not stripped. Supported keys/attributes: - major, minor, patch, prerelease, build. - - Examples: - s = SemVer("1.2.3-4.5+6") - s[2] == 3 - s[:3] == (1, 2, 3) - s['build'] == '-4.5' - s.major == 1 - - Short information on semantic version structure: - - Semantic versions consist of: - * a major component (numeric) - * a minor component (numeric) - * a patch component (numeric) - * a pre-release component [optional] - * a build component [optional] - - The pre-release component is indicated by a hyphen '-' and followed by alphanumeric[1] sequences - separated by dots '.'. Sequences are compared numerically if applicable (both sequences of two - versions are numeric) or lexicographically. May also include hyphens. The existence of a - pre-release component lowers the actual version; the shorter pre-release component is considered - lower. An 'empty' pre-release component is considered to be the least version for this - major-minor-patch combination (e.g. "1.0.0-"). - - The build component may follow the optional pre-release component and is indicated by a plus '+' - followed by sequences, just as the pre-release component. Comparing works similarly. However the - existence of a build component raises the actual version and may also raise a pre-release. An - 'empty' build component is considered to be the highest version for this - major-minor-patch-prerelease combination (e.g. "1.2.3+"). - - - [1]: Regexp for a sequence: r'[0-9A-Za-z-]+'. - """ - - # Static class variables - _base_regex = r''' - (?P[0-9]+) - \.(?P[0-9]+) - \.(?P[0-9]+) - (?:\-(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))? - (?:\+(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?''' - _search_regex = re.compile(r'(?x)%s' % _base_regex) - _match_regex = re.compile(r'(?x)^%s$' % _base_regex) # required because of $ anchor - - # "Constructor" - def __new__(cls, *args, **kwargs): - """There are two different constructor styles that are allowed: - - Option 1 allows specification of a semantic version as a string and the option to "clean" - the string before parsing it. - - Option 2 allows specification of each component separately as one parameter. - - Note that all the parameters specified in the following sections can be passed either as - positional or as named parameters while considering the usual Python rules for this. As - such, `SemVer(1, 2, minor=1)` will result in an exception and not in `SemVer("1.1.2")`. - - Option 1: - Constructor examples: - SemVer("1.0.1") - SemVer("this version 1.0.1-pre.1 here", True) - SemVer(ver="0.0.9-pre-alpha+34", clean=False) - - Parameters: - * ver (str) - The string containing the version. - * clean = `False` (bool; optional) - If this is true in boolean context, `SemVer.clean(ver)` is called before - parsing. - - Option 2: - Constructor examples: - SemVer(1, 0, 1) - SemVer(1, '0', prerelease='pre-alpha', patch=1, build=34) - SemVer(**dict(minor=2, major=1, patch=3)) - - Parameters: - * major (int, str, float ...) - * minor (...) - * patch (...) - Major to patch components must be an integer or convertable to an int (e.g. a - string or another number type). - - * prerelease = `None` (str, int, float ...; optional) - * build = `None` (...; optional) - Pre-release and build components should be a string (or number) type. - Will be passed to `str()` if not already a string but the final string must - match '^[0-9A-Za-z.-]*$' - - Raises: - * TypeError - Invalid parameter type(s) or combination (e.g. option 1 and 2). - * ValueError - Invalid semantic version or option 2 parameters unconvertable. - """ - ver, clean, comps = None, False, None - kw, length = kwargs.copy(), len(args) + len(kwargs) - - def inv(): - raise TypeError("Invalid parameter combination: args=%s; kwargs=%s" % (args, kwargs)) - - # Do validation and parse the parameters - if length == 0 or length > 5: - raise TypeError("SemVer accepts at least 1 and at most 5 arguments (%d given)" % length) - - elif length < 3: - if len(args) == 2: - ver, clean = args - else: - ver = args[0] if args else kw.pop('ver', None) - clean = kw.pop('clean', clean) - if kw: - inv() - - else: - comps = list(args) + [kw.pop(cls._fields[k], None) for k in range(len(args), 5)] - if kw or any(comps[i] is None for i in range(3)): - inv() - - typecheck = (int,) * 3 + (basestring,) * 2 - for i, (v, t) in enumerate(zip(comps, typecheck)): - if v is None: - continue - elif not isinstance(v, t): - try: - if i < 3: - v = typecheck[i](v) - else: # The real `basestring` can not be instatiated (Py2) - v = str(v) - except ValueError as e: - # Modify the exception message. I can't believe this actually works - e.args = ("Parameter #%d must be of type %s or convertable" - % (i, t.__name__),) - raise - else: - comps[i] = v - if t is basestring and not re.match(r"^[0-9A-Za-z.-]*$", v): - raise ValueError("Build and pre-release strings must match '^[0-9A-Za-z.-]*$'") - - # Final adjustments - if not comps: - if ver is None or clean is None: - inv() - ver = clean and cls.clean(ver) or ver - comps = cls._parse(ver) - - # Create the obj - return super(SemVer, cls).__new__(cls, *comps) - - # Magic methods - def __str__(self): - return ('.'.join(map(str, self[:3])) - + ('-' + self.prerelease if self.prerelease is not None else '') - + ('+' + self.build if self.build is not None else '')) - - def __repr__(self): - # Use the shortest representation - what would you prefer? - return 'SemVer("%s")' % str(self) - # return 'SemVer(%s)' % ', '.join('%s=%r' % (k, getattr(self, k)) for k in self._fields) - - def __hash__(self): - return hash(str(self)) - - def __len__(self): - return 3 + (self.build is not None and 2 or self.prerelease is not None) - - # Magic rich comparing methods - def __gt__(self, other): - return self._compare(other) == 1 if isinstance(other, SemVer) else NotImplemented - - def __eq__(self, other): - return self._compare(other) == 0 if isinstance(other, SemVer) else NotImplemented - - def __lt__(self, other): - return not (self > other or self == other) - - def __ge__(self, other): - return not (self < other) - - def __le__(self, other): - return not (self > other) - - def __ne__(self, other): - return not (self == other) - - # Utility (class-)methods - def satisfies(self, sel): - """Alias for `bool(sel.matches(self))` or `bool(SemSel(sel).matches(self))`. - - See `SemSel.__init__()` and `SemSel.matches(*vers)` for possible exceptions. - - Returns: - * bool: `True` if the version matches the passed selector, `False` otherwise. - """ - if not isinstance(sel, SemSel): - sel = SemSel(sel) # just "re-raise" exceptions - - return bool(sel.matches(self)) - - @classmethod - def valid(cls, ver): - """Check if `ver` is a valid semantic version. Classmethod. - - Parameters: - * ver (str) - The string that should be stripped. - - Raises: - * TypeError - Invalid parameter type. - - Returns: - * bool: `True` if it is valid, `False` otherwise. - """ - if not isinstance(ver, basestring): - raise TypeError("%r is not a string" % ver) - - if cls._match_regex.match(ver): - return True - else: - return False - - @classmethod - def clean(cls, vers): - """Remove everything before and after a valid version string. Classmethod. - - Parameters: - * vers (str) - The string that should be stripped. - - Raises: - * TypeError - Invalid parameter type. - - Returns: - * str: The stripped version string. Only the first version is matched. - * None: No version found in the string. - """ - if not isinstance(vers, basestring): - raise TypeError("%r is not a string" % vers) - m = cls._search_regex.search(vers) - if m: - return vers[m.start():m.end()] - else: - return None - - # Private (class-)methods - @classmethod - def _parse(cls, ver): - """Private. Do not touch. Classmethod. - """ - if not isinstance(ver, basestring): - raise TypeError("%r is not a string" % ver) - - match = cls._match_regex.match(ver) - - if match is None: - raise ValueError("'%s' is not a valid SemVer string" % ver) - - g = list(match.groups()) - for i in range(3): - g[i] = int(g[i]) - - return g # Will be passed as namedtuple(...)(*g) - - def _compare(self, other): - """Private. Do not touch. - self > other: 1 - self = other: 0 - self < other: -1 - """ - # Shorthand lambdas - def cp_len(t, i=0): - return cmp(len(t[i]), len(t[not i])) - - for i, (x1, x2) in enumerate(zip(self, other)): - if i > 2: - if x1 is None and x2 is None: - continue - - # self is greater when other has a prerelease but self doesn't - # self is less when other has a build but self doesn't - if x1 is None or x2 is None: - return int(2 * (i - 3.5)) * (1 - 2 * (x1 is None)) - - # self is less when other's build is empty - if i == 4 and (not x1 or not x2) and x1 != x2: - return 1 - 2 * bool(x1) - - # Split by '.' and use numeric comp or lexicographical order - t2 = [x1.split('.'), x2.split('.')] - for y1, y2 in zip(*t2): - if y1.isdigit() and y2.isdigit(): - y1 = int(y1) - y2 = int(y2) - if y1 > y2: - return 1 - elif y1 < y2: - return -1 - - # The "longer" sub-version is greater - d = cp_len(t2) - if d: - return d - else: - if x1 > x2: - return 1 - elif x1 < x2: - return -1 - - # The versions equal - return 0 - - -class SemComparator: - - """Holds a SemVer object and a comparing operator and can match these against a given version. - - Constructor: SemComparator('<=', SemVer("1.2.3")) - - Methods: - * matches(ver) - """ - # Private properties - _ops = { - '>=': '__ge__', - '<=': '__le__', - '>': '__gt__', - '<': '__lt__', - '=': '__eq__', - '!=': '__ne__' - } - _ops_satisfy = ('~', '!') - - # Constructor - def __init__(self, op, ver): - """Constructor examples: - SemComparator('<=', SemVer("1.2.3")) - SemComparator('!=', SemVer("2.3.4")) - - Parameters: - * op (str, False, None) - One of [>=, <=, >, <, =, !=, !, ~] or evaluates to `False` which defaults to '~'. - '~' means a "satisfy" operation where pre-releases and builds are ignored. - '!' is a negative "~". - * ver (SemVer) - Holds the version to compare with. - - Raises: - * ValueError - Invalid `op` parameter. - * TypeError - Invalid `ver` parameter. - """ - super(SemComparator, self).__init__() - - if op and op not in self._ops_satisfy and op not in self._ops: - raise ValueError("Invalid value for `op` parameter.") - if not isinstance(ver, SemVer): - raise TypeError("`ver` parameter is not instance of SemVer.") - - # Default to '~' for versions with no build or pre-release - op = op or '~' - # Fallback to '=' and '!=' if len > 3 - if len(ver) != 3: - if op == '~': - op = '=' - if op == '!': - op = '!=' - - self.op = op - self.ver = ver - - # Magic methods - def __str__(self): - return (self.op or "") + str(self.ver) - - # Utility methods - def matches(self, ver): - """Match the internal version (constructor) against `ver`. - - Parameters: - * ver (SemVer) - - Raises: - * TypeError - Could not compare `ver` against the version passed in the constructor with the - passed operator. - - Returns: - * bool - `True` if the version matched the specified operator and internal version, `False` - otherwise. - """ - if self.op in self._ops_satisfy: - # Compare only the first three parts (which are tuples) and directly - return bool((self.ver[:3] == ver[:3]) + (self.op == '!') * -1) - ret = getattr(ver, self._ops[self.op])(self.ver) - if ret == NotImplemented: - raise TypeError("Unable to compare %r with operator '%s'" % (ver, self.op)) - return ret - - -class SemSelAndChunk(list): - - """Extends list and defines a few methods used for matching versions. - - New elements should be added by calling `.add_child(op, ver)` which creates a SemComparator - instance and adds that to itself. - - Methods: - * matches(ver) - * add_child(op, ver) - """ - - # Magic methods - - def __str__(self): - return ' '.join(map(str, self)) - - # Utitlity methods - - def matches(self, ver): - """Match all of the added children against `ver`. - - Parameters: - * ver (SemVer) - - Raises: - * TypeError - Invalid `ver` parameter. - - Returns: - * bool: - `True` if *all* of the SemComparator children match `ver`, `False` otherwise. - """ - if not isinstance(ver, SemVer): - raise TypeError("`ver` parameter is not instance of SemVer.") - return all(cp.matches(ver) for cp in self) - - def add_child(self, op, ver): - """Create a SemComparator instance with the given parameters and appends that to self. - - Parameters: - * op (str) - * ver (SemVer) - Both parameters are forwarded to `SemComparator.__init__`, see there for a more detailed - description. - - Raises: - Exceptions raised by `SemComparator.__init__`. - """ - self.append(SemComparator(op, SemVer(ver))) - - -class SemSelOrChunk(list): - - """Extends list and defines a few methods used for matching versions. - - New elements should be added by calling `.new_child()` which returns a SemSelAndChunk - instance. - - Methods: - * matches(ver) - * new_child() - """ - - # Magic methods - - def __str__(self): - return ' || '.join(map(str, self)) - - # Utility methods - - def matches(self, ver): - """Match all of the added children against `ver`. - - Parameters: - * ver (SemVer) - - Raises: - * TypeError - Invalid `ver` parameter. - - Returns: - * bool - `True` if *any* of the SemSelAndChunk children matches `ver`. - `False` otherwise. - """ - if not isinstance(ver, SemVer): - raise TypeError("`ver` parameter is not instance of SemVer.") - return any(ch.matches(ver) for ch in self) - - def new_child(self): - """Creates a new SemSelAndChunk instance, appends it to self and returns it. - - Returns: - * SemSelAndChunk: An empty instance. - """ - ch = SemSelAndChunk() - self.append(ch) - return ch - - -class SelParseError(Exception): - - """An Exception raised when parsing a semantic selector failed. - """ - pass - - -# Subclass `tuple` because this is a somewhat simple method to make this immutable -class SemSel(tuple): - - """A Semantic Version Selector, holds a selector and can match it against semantic versions. - - Information on this particular class and their instances: - - Immutable but not hashable because the content within might have changed. - - Subclasses `tuple` but does not behave like one. - - Always `True` in boolean context. - - len() returns the number of containing *and chunks* (see below). - - Iterable, iterates over containing *and chunks*. - - When talking about "versions" it refers to a semantic version (SemVer). For information on how - versions compare to one another, see SemVer's doc string. - - List for **comparators**: - "1.0.0" matches the version 1.0.0 and all its pre-release and build variants - "!1.0.0" matches any version that is not 1.0.0 or any of its variants - "=1.0.0" matches only the version 1.0.0 - "!=1.0.0" matches any version that is not 1.0.0 - ">=1.0.0" matches versions greater than or equal 1.0.0 - "<1.0.0" matches versions smaller than 1.0.0 - "1.0.0 - 1.0.3" matches versions greater than or equal 1.0.0 thru 1.0.3 - "~1.0" matches versions greater than or equal 1.0.0 thru 1.0.9999 (and more) - "~1", "1.x", "1.*" match versions greater than or equal 1.0.0 thru 1.9999.9999 (and more) - "~1.1.2" matches versions greater than or equal 1.1.2 thru 1.1.9999 (and more) - "~1.1.2+any" matches versions greater than or equal 1.1.2+any thru 1.1.9999 (and more) - "*", "~", "~x" match any version - - Multiple comparators can be combined by using ' ' spaces and every comparator must match to make - the **and chunk** match a version. - Multiple and chunks can be combined to **or chunks** using ' || ' and match if any of the and - chunks split by these matches. - - A complete example would look like: - ~1 || 0.0.3 || <0.0.2 >0.0.1+b.1337 || 2.0.x || 2.1.0 - 2.1.0+b.12 !=2.1.0+b.9 - - Methods: - * matches(*vers) - """ - # Private properties - _fuzzy_regex = re.compile(r'''(?x)^ - (?P[<>]=?|~>?=?)? - (?:(?P\d+) - (?:\.(?P\d+) - (?:\.(?P\d+) - (?P[-+][a-zA-Z0-9-+.]*)? - )? - )? - )?$''') - _xrange_regex = re.compile(r'''(?x)^ - (?P[<>]=?|~>?=?)? - (?:(?P\d+|[xX*]) - (?:\.(?P\d+|[xX*]) - (?:\.(?P\d+|[xX*]))? - )? - ) - (?P.*)$''') - _split_op_regex = re.compile(r'^(?P=|[<>!]=?)?(?P.*)$') - - # "Constructor" - def __new__(cls, sel): - """Constructor examples: - SemSel(">1.0.0") - SemSel("~1.2.9 !=1.2.12") - - Parameters: - * sel (str) - A version selector string. - - Raises: - * TypeError - `sel` parameter is not a string. - * ValueError - A version in the selector could not be matched as a SemVer. - * SemParseError - The version selector's syntax is unparsable; invalid ranges (fuzzy, xrange or - explicit range) or invalid '||' - """ - chunk = cls._parse(sel) - return super(SemSel, cls).__new__(cls, (chunk,)) - - # Magic methods - def __str__(self): - return str(self._chunk) - - def __repr__(self): - return 'SemSel("%s")' % self._chunk - - def __len__(self): - # What would you expect? - return len(self._chunk) - - def __iter__(self): - return iter(self._chunk) - - # Read-only (private) attributes - @property - def _chunk(self): - return self[0] - - # Utility methods - def matches(self, *vers): - """Match the selector against a selection of versions. - - Parameters: - * *vers (str, SemVer) - Versions can be passed as strings and SemVer objects will be created with them. - May also be a mixed list. - - Raises: - * TypeError - A version is not an instance of str (basestring) or SemVer. - * ValueError - A string version could not be parsed as a SemVer. - - Returns: - * list - A list with all the versions that matched, may be empty. Use `max()` to determine - the highest matching version, or `min()` for the lowest. - """ - ret = [] - for v in vers: - if isinstance(v, str): - t = self._chunk.matches(SemVer(v)) - elif isinstance(v, SemVer): - t = self._chunk.matches(v) - else: - raise TypeError("Invalid parameter type '%s': %s" % (v, type(v))) - if t: - ret.append(v) - - return ret - - # Private methods - @classmethod - def _parse(cls, sel): - """Private. Do not touch. - - 1. split by whitespace into tokens - a. start new and_chunk on ' || ' - b. parse " - " ranges - c. replace "xX*" ranges with "~" equivalent - d. parse "~" ranges - e. parse unmatched token as comparator - ~. append to current and_chunk - 2. return SemSelOrChunk - - Raises TypeError, ValueError or SelParseError. - """ - if not isinstance(sel, basestring): - raise TypeError("Selector must be a string") - if not sel: - raise ValueError("String must not be empty") - - # Split selector by spaces and crawl the tokens - tokens = sel.split() - i = -1 - or_chunk = SemSelOrChunk() - and_chunk = or_chunk.new_child() - - while i + 1 < len(tokens): - i += 1 - t = tokens[i] - - # Replace x ranges with ~ selector - m = cls._xrange_regex.match(t) - m = m and m.groups('') - if m and any(not x.isdigit() for x in m[1:4]) and not m[0].startswith('>'): - # (do not match '>1.0' or '>*') - if m[4]: - raise SelParseError("XRanges do not allow pre-release or build components") - - # Only use digit parts and fail if digit found after non-digit - mm, xran = [], False - for x in m[1:4]: - if x.isdigit(): - if xran: - raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i]) - mm.append(x) - else: - xran = True - t = m[0] + '.'.join(mm) # x for x in m[1:4] if x.isdigit()) - # Append "~" if not already present - if not t.startswith('~'): - t = '~' + t - - # switch t: - if t == '||': - if i == 0 or tokens[i - 1] == '||' or i + 1 == len(tokens): - raise SelParseError("OR range must not be empty") - # Start a new and_chunk - and_chunk = or_chunk.new_child() - - elif t == '-': - # ' - ' range - i += 1 - invalid = False - try: - # If these result in exceptions, you know you're doing it wrong - t = tokens[i] - c = and_chunk[-1] - except (Exception): - raise SelParseError("Invalid ' - ' range position") - - # If there is an op in front of one of the bound versions - invalid = (c.op not in ('=', '~') - or cls._split_op_regex.match(t).group(1) not in (None, '=')) - if invalid: - raise SelParseError("Invalid ' - ' range '%s - %s'" - % (tokens[i - 2], tokens[i])) - - c.op = ">=" - and_chunk.add_child('<=', t) - - elif t == '': - # Multiple spaces - pass - - elif t.startswith('~'): - m = cls._fuzzy_regex.match(t) - if not m: - raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i]) - - mm, m = m.groups('')[1:4], m.groupdict('') # mm: major to patch - - # Minimum requirement - min_ver = ('.'.join(x or '0' for x in mm) + '-' - if not m['other'] - else cls._split_op_regex(t[1:]).group('ver')) - and_chunk.add_child('>=', min_ver) - - if m['major']: - # Increase version before none (or second to last if '~1.2.3') - e = [0, 0, 0] - for j, d in enumerate(mm): - if not d or j == len(mm) - 1: - e[j - 1] = e[j - 1] + 1 - break - e[j] = int(d) - - and_chunk.add_child('<', '.'.join(str(x) for x in e) + '-') - - # else: just plain '~' or '*', or '~>X' which are already handled - - else: - # A normal comparator - m = cls._split_op_regex.match(t).groupdict() # this regex can't fail - and_chunk.add_child(**m) - - # Finally return the or_chunk - return or_chunk diff --git a/app/lib/package_control/downloaders/background_downloader.py b/app/lib/package_control/downloaders/background_downloader.py deleted file mode 100644 index a689d8e..0000000 --- a/app/lib/package_control/downloaders/background_downloader.py +++ /dev/null @@ -1,63 +0,0 @@ -import threading - - -class BackgroundDownloader(threading.Thread): - - """ - Downloads information from one or more URLs in the background. - Normal usage is to use one BackgroundDownloader per domain name. - - :param settings: - A dict containing at least the following fields: - `cache_length`, - `debug`, - `timeout`, - `user_agent`, - `http_proxy`, - `https_proxy`, - `proxy_username`, - `proxy_password` - - :param providers: - An array of providers that can download the URLs - """ - - def __init__(self, settings, providers): - self.settings = settings - self.urls = [] - self.providers = providers - self.used_providers = {} - threading.Thread.__init__(self) - - def add_url(self, url): - """ - Adds a URL to the list to download - - :param url: - The URL to download info about - """ - - self.urls.append(url) - - def get_provider(self, url): - """ - Returns the provider for the URL specified - - :param url: - The URL to return the provider for - - :return: - The provider object for the URL - """ - - return self.used_providers.get(url) - - def run(self): - for url in self.urls: - for provider_class in self.providers: - if provider_class.match_url(url): - provider = provider_class(url, self.settings) - break - - provider.prefetch() - self.used_providers[url] = provider diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py index ddae04c..7d6346c 100644 --- a/app/lib/package_control/downloaders/oscrypto_downloader.py +++ b/app/lib/package_control/downloaders/oscrypto_downloader.py @@ -36,10 +36,13 @@ 'sublime_text' in sys.executable or 'plugin_host' in sys.executable): install_dir = os.path.dirname(sys.executable) - use_openssl( - os.path.join(install_dir, 'libcrypto.so.1.1'), - os.path.join(install_dir, 'libssl.so.1.1') - ) + try: + use_openssl( + os.path.join(install_dir, 'libcrypto.so.1.1'), + os.path.join(install_dir, 'libssl.so.1.1') + ) + except RuntimeError: + pass # runtime error may be raised, when reloading modules. from ..deps.oscrypto import tls # noqa from ..deps.oscrypto import errors as oscrypto_errors # noqa diff --git a/app/lib/package_control/http/validating_https_connection.py b/app/lib/package_control/http/validating_https_connection.py index af9e1e2..8448792 100644 --- a/app/lib/package_control/http/validating_https_connection.py +++ b/app/lib/package_control/http/validating_https_connection.py @@ -85,6 +85,14 @@ def validate_cert_host(self, cert, hostname): return True return False + # Compatibility for python 3.3 vs 3.8 + # python 3.8 replaced _set_hostport() by _get_hostport() + if not hasattr(DebuggableHTTPConnection, '_set_hostport'): + + def _set_hostport(self, host, port): + (self.host, self.port) = self._get_hostport(host, port) + self._validate_host(self.host) + def _tunnel(self): """ This custom _tunnel method allows us to read and print the debug diff --git a/app/lib/package_control/package_version.py b/app/lib/package_control/package_version.py index 1a96f5f..dd508cc 100644 --- a/app/lib/package_control/package_version.py +++ b/app/lib/package_control/package_version.py @@ -1,55 +1,38 @@ import re -from .deps.semver import SemVer from .console_write import console_write +from .pep440 import PEP440Version, PEP440InvalidVersionError -class PackageVersion(SemVer): +class PackageVersion(PEP440Version): + __slots__ = ["_str"] - _date_pattern = re.compile(r'^(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$') - _pre_semver_pattern = re.compile(r'^(\d+)(?:\.(\d+)(?:\.(\d+)(?:[T\.](\d+(\.\d+)*))?)?)?$') + _date_time_regex = re.compile(r"^\d{4}\.\d{2}\.\d{2}(?:\.\d{2}\.\d{2}\.\d{2})?$") - @classmethod - def _parse(cls, ver): + def __init__(self, ver): """ - Converts a string version number into SemVer. If the version is based on - a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. + Initialize a ``PackageVersion`` instance. + + The initializer acts as compatibility layer to convert legacy version schemes + into a ``PEP440Version``. + + If the version is based on a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. :param ver: A string, dict with 'version' key, or a SemVer object :raises: - TypeError, if ver is not one of: str, dict with version, SemVer + TypeError, if ver is not a ``str``. ValueError, if ver is no valid version string - - :return: - A list of 5 items representing a valid semantic version number """ - # Allowing passing in a dict containing info about a package - if isinstance(ver, dict): - if 'version' not in ver: - raise TypeError("%s is not a package or library release" % ver) - ver = ver['version'] - - if isinstance(ver, SemVer): - return ver - if not isinstance(ver, str): - raise TypeError("%r is not a string" % ver) + raise TypeError("{!r} is not a string".format(ver)) - # Trim v off of the front - if ver.startswith('v'): - ver = ver[1:] - - # Match semver compatible strings - match = cls._match_regex.match(ver) - if match: - g = list(match.groups()) - for i in range(3): - g[i] = int(g[i]) - - return g + # Store original version string to maintain backward compatibility + # with regards to not normalize it. + # The one and only use case is to keep existing CI tests working without change. + self._str = ver # We prepend 0 to all date-based version numbers so that developers # may switch to explicit versioning from GitHub/GitLab/BitBucket @@ -61,23 +44,24 @@ def _parse(cls, ver): # # The result looks like: # 0.0.1+2020.07.15.10.50.38 - match = cls._date_pattern.match(ver) + match = self._date_time_regex.match(ver) if match: - return [0, 0, 1, None, '.'.join(match.groups())] + ver = "0.0.1+" + ver - # This handles versions that were valid pre-semver with 1 to 4+ dotted - # groups, such as 1, 1.6, or 1.6.9.0 - match = cls._pre_semver_pattern.match(ver) - if match: - return [ - int(match.group(1) or 0), - int(match.group(2) or 0), - int(match.group(3) or 0), - None, - match.group(4) - ] + try: + super().__init__(ver) + except PEP440InvalidVersionError: + # maybe semver with incompatible pre-release tag + # if, so treat it as dev build with local version + if "-" in ver: + ver, pre = ver.split("-", 1) + if ver and pre: + super().__init__(ver + "-dev+" + pre) + return + raise - raise ValueError("'%s' is not a valid SemVer string" % ver) + def __str__(self): + return self._str def version_match_prefix(version, filter_prefix): @@ -126,21 +110,25 @@ def version_sort(sortable, *fields, **kwargs): """ def _version_sort_key(item): - result = PackageVersion(item) - if fields: - values = [result] - for field in fields: - values.append(item[field]) - result = tuple(values) - return result + if isinstance(item, dict): + if "version" not in item: + raise TypeError("%s is not a package or library release" % item) + result = PackageVersion(item["version"]) + if fields: + result = (result,) + for field in fields: + result += (item[field],) + return result + + return PackageVersion(item) try: return sorted(sortable, key=_version_sort_key, **kwargs) - except (ValueError) as e: + except ValueError as e: console_write( - ''' + """ Error sorting versions - %s - ''', - e + """, + e, ) return [] diff --git a/app/lib/package_control/pep440.py b/app/lib/package_control/pep440.py new file mode 100644 index 0000000..2f7af33 --- /dev/null +++ b/app/lib/package_control/pep440.py @@ -0,0 +1,645 @@ +""" +A PEP440 complient version module for use by Package Control. + +Note: + +This module implements ``PEP440Version`` and ``PEP440VersionSpecifier`` +using independent implementations and regex patterns to parse their string +representation, even though both share a lot. + +The reason for this kind of inlining is targetting best possible performance +for creating and compairing versions, rather than strictly following a +questionable DRY approach. + +Instantiation for each object consists of only 2 main steps: + +1. parse and validate input string using a single regular expression. +2. convert match groups into nested tuple representation, as primary + data storage and comparing key. + +The patterns include additional pre-release tag names +(e.g: ``patch``, ``prerelease``, ``developmment``, ``test``) +to maintain compatibility with various existing packages on packagecontrol.io +""" +import re + +__all__ = [ + "PEP440InvalidVersionError", + "PEP440InvalidVersionSpecifierError", + "PEP440Version", + "PEP440VersionSpecifier", + "check_version" +] + +_local_version_separators = re.compile(r"[-._]") + + +def _norm_tuples(a, b): + """ + Accepts two tuples of PEP440 version numbers and extends them until they + are the same length. This allows for comparisons between them. + + Notes: + + - prerelease segment is padded + - local version don't need padding as shorter sort before longer + + :param a: + A tuple from ``PEP440Version`` + of the format: ``(epoch, release, prerelease, local)`` + + :param b: + A tuple from ``PEP440Version`` + of the format: ``(epoch, release, prerelease, local)`` + + :return: + Two potentially modified tuples, (a, b) + """ + # pad release + ar = a[1] + br = b[1] + + arl = len(ar) + brl = len(br) + + if arl < brl: + while len(ar) < brl: + ar += (0,) + a = a[:1] + (ar,) + a[2:] + + elif arl > brl: + while arl > len(br): + br += (0,) + b = b[:1] + (br,) + b[2:] + + return a, b + + +def _trim_tuples(spec, ver): + """ + Trim version to match specification's length. + + :param spec: + A tuple from ``PEP440VersionSpecifier``, representing a version prefix. + e.g.: ``(epoch, (major [, minor [, micro] ] ) )`` + + :param ver: + A tuple from ``PEP440Version`` + + :returns: + A tuple of prefix and trimmed version. + """ + segs = len(spec[1]) + release = ver[1][:segs] + while len(release) < segs: + release += (0,) + return spec, (ver[0], release) + + +def _version_info(epoch, ver, pre, local, verbose=False): + """ + Create a ``__version_info__`` tuple representation. + + :param epoch: + The epoch + + :param ver: + A tuple of integers representing the version + + :param pre: + A tuple of tuples of integers representing pre-releases + + :param local: + Local version representation. + + :returns: + A tuple of (major, minor, micro, 'pre', 'post', 'dev') + """ + info = ver + + if pre and pre[0][0] != 0: + if verbose: + tag = ("dev", "alpha", "beta", "rc", "", "post") + else: + tag = ("dev", "a", "b", "rc", "", "post") + for t, n in pre: + if t != 0: + info += (tag[t + 4], n) + else: + info += ("final",) + + if local: + info += (".".join(str(n) if n > -1 else s for n, s in local),) + + return info + + +def _version_string(epoch, ver, pre, local, prefix=False, verbose=False): + """ + Create a normalized string representation. + + :param epoch: + The epoch + + :param ver: + A tuple of integers representing the version + + :param pre: + A tuple of tuples of integers representing pre-releases + + :param local: + Local version representation. + + :returns: + String representation of the version. + """ + string = str(epoch) + "!" if epoch else "" + string += ".".join(map(str, ver)) + + if prefix: + return string + ".*" + + if pre and pre[0][0] != 0: + if verbose: + tag = ("-dev{}", "-alpha{}", "-beta{}", "-rc{}", "", "-post{}") + else: + tag = (".dev{}", "a{}", "b{}", "rc{}", "", ".post{}") + for t, n in pre: + if t != 0: + string += tag[t + 4].format(n) + + if local: + string += "+" + ".".join(str(n) if n > -1 else s for n, s in local) + + return string + + +class PEP440InvalidVersionError(ValueError): + pass + + +class PEP440Version: + __slots__ = ["_tup"] + + _regex = re.compile( + r""" + ^\s* + v? + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                              # pre-release
+            [-_.]?
+            (?Palpha|a|beta|b|prerelease|preview|pre|c|rc)
+            [-_.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                             # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_.]?
+                (?Ppatch|post|rev|r)
+                [-_.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                              # dev release
+            [-_.]?
+            (?Pdevelopment|develop|devel|dev)
+            [-_.]?
+            (?P[0-9]+)?
+        )?
+        (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?        # local version
+        \s*$
+        """,
+        re.VERBOSE,
+    )
+
+    def __init__(self, string):
+        """
+        Constructs a new ``PEP440Version`` instance.
+
+        :param string:
+            An unicode string of the pep44ß version.
+        """
+        match = self._regex.match(string.lower())
+        if not match:
+            raise PEP440InvalidVersionError("'{}' is not a valid PEP440 version string".format(string))
+
+        (
+            epoch,
+            release,
+            pre,
+            pre_l,
+            pre_n,
+            post,
+            post_n1,
+            _,
+            post_n2,
+            dev,
+            _,
+            dev_n,
+            local,
+        ) = match.groups()
+
+        epoch = int(epoch or 0)
+        release = tuple(map(int, release.split(".")))
+
+        prerelease = ()
+
+        if pre:
+            if pre_l == "a" or pre_l == "alpha":
+                pre_l = -3
+            elif pre_l == "b" or pre_l == "beta":
+                pre_l = -2
+            else:
+                pre_l = -1
+            prerelease += ((pre_l, int(pre_n or 0)),)
+
+        if post:
+            prerelease += ((1, int(post_n1 or post_n2 or 0)),)
+
+        if dev:
+            prerelease += ((-4, int(dev_n or 0)),)
+
+        while len(prerelease) < 3:
+            prerelease += ((0, 0),)
+
+        tup = ()
+        if local:
+            # Versions with a local segment need that segment parsed to implement
+            # the sorting rules in PEP440.
+            # - Alpha numeric segments sort before numeric segments
+            # - Alpha numeric segments sort lexicographically
+            # - Numeric segments sort numerically
+            # - Shorter versions sort before longer versions when the prefixes
+            #   match exactly
+            for seg in _local_version_separators.split(local):
+                try:
+                    tup += ((int(seg), ""),)
+                except ValueError:
+                    tup += ((-1, seg),)
+
+        local = tup
+
+        self._tup = (epoch, release, prerelease, local)
+
+    def __repr__(self):
+        return "<{0.__class__.__name__}('{0!s}')>".format(self)
+
+    def __str__(self):
+        return self.version_string()
+
+    def __eq__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a == b
+
+    def __ne__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a != b
+
+    def __lt__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a < b
+
+    def __le__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a <= b
+
+    def __gt__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a > b
+
+    def __ge__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a >= b
+
+    def __hash__(self):
+        return hash(self._tup)
+
+    def version_info(self, verbose=False):
+        return _version_info(*self._tup, verbose=verbose)
+
+    def version_string(self, verbose=False):
+        return _version_string(*self._tup, verbose=verbose)
+
+    @property
+    def epoch(self):
+        return self._tup[0]
+
+    @property
+    def release(self):
+        return self._tup[1]
+
+    @property
+    def major(self):
+        try:
+            return self._tup[1][0]
+        except IndexError:
+            return 0
+
+    @property
+    def minor(self):
+        try:
+            return self._tup[1][1]
+        except IndexError:
+            return 0
+
+    @property
+    def micro(self):
+        try:
+            return self._tup[1][2]
+        except IndexError:
+            return 0
+
+    @property
+    def prerelease(self):
+        tup = ()
+        pre = self._tup[2]
+        if pre and pre[0][0] != 0:
+            tag = ("dev", "a", "b", "rc", "", "post")
+            for t, n in pre:
+                if t != 0:
+                    tup += (tag[t + 4], n)
+
+        return tup
+
+    @property
+    def local(self):
+        return ".".join(str(n) if n > -1 else s for n, s in self._tup[3])
+
+    @property
+    def is_final(self):
+        """Version represents a final release."""
+        return self._tup[2][0][0] == 0
+
+    @property
+    def is_dev(self):
+        """Version represents a pre release."""
+        return any(t[0] == -4 for t in self._tup[2])
+
+    @property
+    def is_prerelease(self):
+        """Version represents a pre release."""
+        return self._tup[2][0][0] < 0
+
+    @property
+    def is_postrelease(self):
+        """Version represents a post final release."""
+        return self._tup[2][0][0] > 0
+
+
+class PEP440InvalidVersionSpecifierError(ValueError):
+    pass
+
+
+class PEP440VersionSpecifier:
+    __slots__ = ["_operator", "_prefix", "_prereleases", "_tup"]
+
+    _regex = re.compile(
+        r"""
+        ^\s*
+        (?: (?P===|==|!=|~=|<=?|>=?) \s* )?                 # operator
+        v?
+        (?:(?P[0-9]+)!)?                             # epoch
+        (?P[0-9]+(?:\.[0-9]+)*)                    # release segment
+        (?:
+            \.(?P\*)                              # prefix-release
+            |
+            (?P
                                        # pre-release
+                [-_.]?
+                (?Palpha|a|beta|b|preview|pre|c|rc)
+                [-_.]?
+                (?P[0-9]+)?
+            )?
+            (?P                                       # post release
+                (?:-(?P[0-9]+))
+                |
+                (?:
+                    [-_.]?
+                    (?Ppost|rev|r)
+                    [-_.]?
+                    (?P[0-9]+)?
+                )
+            )?
+            (?P                                        # dev release
+                [-_.]?
+                (?Pdev)
+                [-_.]?
+                (?P[0-9]+)?
+            )?
+            (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?  # local version
+        )
+        \s*$
+        """,
+        re.VERBOSE,
+    )
+
+    _op_str = ("", "===", "==", "!=", "~=", "<", "<=", ">", ">=")
+
+    OP_ITY = 1
+    OP_EQ = 2
+    OP_NE = 3
+    OP_CPE = 4
+    OP_LT = 5
+    OP_LTE = 6
+    OP_GT = 7
+    OP_GTE = 8
+
+    def __init__(self, string, prereleases=True):
+        """
+        Constructs a new ``PEP440VersionSpecifier`` instance.
+
+        :param string:
+            An unicode string of the pep44ß version specifier.
+        """
+        match = self._regex.match(string.lower())
+        if not match:
+            raise PEP440InvalidVersionSpecifierError(
+                "'{}' is not a valid PEP 440 version specifier string".format(string)
+            )
+
+        (
+            op,
+            epoch,
+            release,
+            wildcard,
+            pre,
+            pre_l,
+            pre_n,
+            post,
+            post_n1,
+            _,
+            post_n2,
+            dev,
+            _,
+            dev_n,
+            local,
+        ) = match.groups()
+
+        self._operator = self._op_str.index(op) if op else self.OP_EQ
+        self._prefix = bool(wildcard)
+        self._prereleases = prereleases
+
+        epoch = int(epoch or 0)
+        release = tuple(map(int, release.split(".")))
+
+        if self._prefix:
+            if self._operator not in (self.OP_EQ, self.OP_NE):
+                raise PEP440InvalidVersionSpecifierError(
+                    "'{}' is not a valid PEP 440 version specifier string".format(string)
+                )
+
+            self._tup = (epoch, release)
+            return
+
+        if self._operator == self.OP_CPE and len(release) < 2:
+            raise PEP440InvalidVersionSpecifierError(
+                "'{}' is not a valid PEP 440 version specifier string".format(string)
+            )
+
+        prerelease = ()
+
+        if pre:
+            if pre_l == "a" or pre_l == "alpha":
+                pre_l = -3
+            elif pre_l == "b" or pre_l == "beta":
+                pre_l = -2
+            else:
+                pre_l = -1
+            prerelease += ((pre_l, int(pre_n or 0)),)
+
+        if post:
+            prerelease += ((1, int(post_n1 or post_n2 or 0)),)
+
+        if dev:
+            prerelease += ((-4, int(dev_n or 0)),)
+
+        while len(prerelease) < 3:
+            prerelease += ((0, 0),)
+
+        tup = ()
+        if local:
+            if self._operator not in (self.OP_EQ, self.OP_NE, self.OP_ITY):
+                raise PEP440InvalidVersionSpecifierError(
+                    "'{}' is not a valid PEP 440 version specifier string".format(string)
+                )
+
+            for seg in _local_version_separators.split(local):
+                try:
+                    tup += ((int(seg), ""),)
+                except ValueError:
+                    tup += ((-1, seg),)
+        local = tup
+
+        self._tup = (epoch, release, prerelease, local)
+
+    def __repr__(self):
+        return "<{0.__class__.__name__}('{0!s}')>".format(self)
+
+    def __str__(self):
+        return self._op_str[self._operator] + self.version_string()
+
+    def __contains__(self, version):
+        return self.contains(version)
+
+    def __hash__(self):
+        return hash((self._operator, self._tup))
+
+    def contains(self, version):
+        """
+        Ensures the version matches this specifier
+
+        :param version:
+            A ``PEP440Version`` object to check.
+
+        :return:
+            Returns ``True`` if ``version`` satisfies the ``specifier``.
+        """
+        if not self._prereleases and version.is_prerelease:
+            return False
+
+        if self._prefix:
+            # The specifier is a version prefix (aka. wildcard present).
+            # Trim and normalize version to ( epoch, ( major [, minor [, micro ] ] ) ),
+            # so it matches exactly the specifier's length.
+
+            self_tup, ver_tup = _trim_tuples(self._tup, version._tup)
+
+            if self._operator == self.OP_EQ:
+                return ver_tup == self._tup
+
+            if self._operator == self.OP_NE:
+                return ver_tup != self._tup
+
+        else:
+            if self._operator == self.OP_ITY:
+                return version.version_string(False) == self.version_string(False)
+
+            self_tup, ver_tup = _norm_tuples(self._tup, version._tup)
+
+            if self._operator == self.OP_CPE:
+                # Compatible releases have an equivalent combination of >= and ==.
+                # That is that ~=2.2 is equivalent to >=2.2,==2.*.
+                if ver_tup < self_tup:
+                    return False
+
+                # create prefix specifier with last digit removed.
+                self_tup, ver_tup = _trim_tuples((self._tup[0], self._tup[1][:-1]), version._tup)
+                return ver_tup == self_tup
+
+            if self._operator == self.OP_EQ:
+                return ver_tup == self_tup
+
+            if self._operator == self.OP_NE:
+                return ver_tup != self_tup
+
+            if self._operator == self.OP_GTE:
+                return ver_tup >= self_tup
+
+            if self._operator == self.OP_GT:
+                # TODO:
+                #  - parse local version and include into comparison result
+                #  - drop only invalid local versions
+                return ver_tup[:2] > self_tup[:2]
+
+            if self._operator == self.OP_LTE:
+                return ver_tup <= self_tup
+
+            if self._operator == self.OP_LT:
+                # TODO:
+                #  - parse local version and include into comparison result
+                #  - drop only invalid local versions
+                return ver_tup[:2] < self_tup[:2]
+
+        raise PEP440InvalidVersionSpecifierError(
+            "Invalid PEP 440 version specifier operator: {!r}".format(self._operator)
+        )
+
+    def filter(self, iterable):
+        return filter(self.contains, iterable)
+
+    def version_string(self, verbose=False):
+        return _version_string(*self._tup, prefix=self._prefix, verbose=verbose)
+
+
+def check_version(spec, version, include_prereleases=False):
+    """
+    Check if version satisfies specifications
+
+    :param spec:
+        The pep440 version specifier string.
+
+    :param version:
+        The pep440 version string or ``PEP440Version`` ojbect to check.
+
+    :param include_prereleases:
+        If ``True`` succeed also, if version is a pre-release.
+        If ``False`` (default) succeed only, if version is a final release.
+
+    :returns:
+        Returns ``True`` if ``version`` satisfies the ``specifier``.
+    """
+    if isinstance(version, str):
+        version = PEP440Version(version)
+    return PEP440VersionSpecifier(spec, include_prereleases).contains(version)
diff --git a/app/lib/package_control/providers/schema_compat.py b/app/lib/package_control/providers/schema_compat.py
deleted file mode 100644
index d25ebb1..0000000
--- a/app/lib/package_control/providers/schema_compat.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from ..deps.semver import SemVer
-
-
-class SchemaVersion(SemVer):
-    supported_versions = ('2.0', '3.0.0', '4.0.0')
-
-    @classmethod
-    def _parse(cls, ver):
-        """
-        Custom version string parsing to maintain backward compatibility.
-
-        SemVer needs all of major, minor and patch parts being present in `ver`.
-
-        :param ver:
-            An integer, float or string containing a version string.
-
-        :returns:
-            List of (major, minor, patch)
-        """
-        try:
-            if isinstance(ver, int):
-                ver = float(ver)
-            if isinstance(ver, float):
-                ver = str(ver)
-        except ValueError:
-            raise ValueError('the "schema_version" is not a valid number.')
-
-        if ver not in cls.supported_versions:
-            raise ValueError(
-                'the "schema_version" is not recognized. Must be one of: %s or %s.'
-                % (', '.join(cls.supported_versions[:-1]), cls.supported_versions[-1])
-            )
-
-        if ver.count('.') == 1:
-            ver += '.0'
-
-        return SemVer._parse(ver)
diff --git a/app/lib/package_control/providers/schema_version.py b/app/lib/package_control/providers/schema_version.py
index d25ebb1..3e5efbb 100644
--- a/app/lib/package_control/providers/schema_version.py
+++ b/app/lib/package_control/providers/schema_version.py
@@ -1,11 +1,10 @@
-from ..deps.semver import SemVer
+from ..pep440 import PEP440Version
 
 
-class SchemaVersion(SemVer):
+class SchemaVersion(PEP440Version):
     supported_versions = ('2.0', '3.0.0', '4.0.0')
 
-    @classmethod
-    def _parse(cls, ver):
+    def __init__(self, ver):
         """
         Custom version string parsing to maintain backward compatibility.
 
@@ -25,13 +24,10 @@ def _parse(cls, ver):
         except ValueError:
             raise ValueError('the "schema_version" is not a valid number.')
 
-        if ver not in cls.supported_versions:
+        if ver not in self.supported_versions:
             raise ValueError(
                 'the "schema_version" is not recognized. Must be one of: %s or %s.'
-                % (', '.join(cls.supported_versions[:-1]), cls.supported_versions[-1])
+                % (', '.join(self.supported_versions[:-1]), self.supported_versions[-1])
             )
 
-        if ver.count('.') == 1:
-            ver += '.0'
-
-        return SemVer._parse(ver)
+        super().__init__(ver)
diff --git a/app/lib/st_package_reviewer/check/file/check_messages.py b/app/lib/st_package_reviewer/check/file/check_messages.py
index 45be76f..5ea9b31 100644
--- a/app/lib/st_package_reviewer/check/file/check_messages.py
+++ b/app/lib/st_package_reviewer/check/file/check_messages.py
@@ -1,10 +1,40 @@
 import json
 import re
 
-from ...lib.semver import SemVer
-
 from . import FileChecker
 
+_semver_regex = re.compile(
+    r"""
+    ^\s*
+    v?
+    (?P[0-9]+(?:\.[0-9]+){2})                    # semver release segment
+    (?P
                                              # pre-release
+        [-_.]?
+        (?Palpha|a|beta|b|prerelease|preview|pre|c|rc)
+        [-_.]?
+        (?P[0-9]+)?
+    )?
+    (?P                                             # post release
+        (?:-(?P[0-9]+))
+        |
+        (?:
+            [-_.]?
+            (?Ppatch|post|rev|r)
+            [-_.]?
+            (?P[0-9]+)?
+        )
+    )?
+    (?P                                              # dev release
+        [-_.]?
+        (?Pdevelopment|develop|devel|dev)
+        [-_.]?
+        (?P[0-9]+)?
+    )?
+    \s*$
+    """,
+    re.VERBOSE,
+)
+
 
 class CheckMessages(FileChecker):
 
@@ -42,7 +72,7 @@ def check(self):
             for key, rel_path in data.items():
                 if key == "install":
                     pass
-                elif SemVer.valid(re.sub(prefix_regex, '', key)):
+                elif _semver_regex.match(re.sub(prefix_regex, '', key)):
                     pass
                 else:
                     self.fail("Key {!r} is not 'install' or a valid semantic version"

From 492ce55d5892c0178697fbe29f4978878062afd0 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 8 Oct 2023 17:41:19 +0200
Subject: [PATCH 23/39] Sync with Package Control

---
 app/lib/package_control/__init__.py           |   4 +-
 .../downloaders/oscrypto_downloader.py        |   2 +-
 app/lib/package_control/providers/__init__.py |   4 +-
 .../providers/json_repository_provider.py     | 860 ++++++++++++++++++
 .../providers/repository_provider.py          | 836 -----------------
 setup/scripts/extract_package_control.py      |   2 +-
 6 files changed, 866 insertions(+), 842 deletions(-)
 create mode 100644 app/lib/package_control/providers/json_repository_provider.py
 delete mode 100644 app/lib/package_control/providers/repository_provider.py

diff --git a/app/lib/package_control/__init__.py b/app/lib/package_control/__init__.py
index 5a6c20d..5672732 100644
--- a/app/lib/package_control/__init__.py
+++ b/app/lib/package_control/__init__.py
@@ -1,2 +1,2 @@
-__version__ = "4.0.0-beta4"
-__version_info__ = (4, 0, 0, 'beta', 4)
+__version__ = "4.0.0-beta8"
+__version_info__ = (4, 0, 0, 'beta', 8)
diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py
index 7d6346c..9379114 100644
--- a/app/lib/package_control/downloaders/oscrypto_downloader.py
+++ b/app/lib/package_control/downloaders/oscrypto_downloader.py
@@ -171,7 +171,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
                 if code == 304:
                     return self.cache_result('get', url, code, resp_headers, b'')
 
-                if code == 301:
+                if code == 301 or code == 302:
                     location = resp_headers.get('location')
                     if not isinstance(location, str):
                         raise OscryptoDownloaderException('Missing or duplicate Location HTTP header')
diff --git a/app/lib/package_control/providers/__init__.py b/app/lib/package_control/providers/__init__.py
index 7549f9e..416ccd0 100644
--- a/app/lib/package_control/providers/__init__.py
+++ b/app/lib/package_control/providers/__init__.py
@@ -3,7 +3,7 @@
 from .github_user_provider import GitHubUserProvider
 from .gitlab_repository_provider import GitLabRepositoryProvider
 from .gitlab_user_provider import GitLabUserProvider
-from .repository_provider import RepositoryProvider
+from .json_repository_provider import JsonRepositoryProvider
 
 from .channel_provider import ChannelProvider
 
@@ -14,7 +14,7 @@
     GitHubUserProvider,
     GitLabRepositoryProvider,
     GitLabUserProvider,
-    RepositoryProvider
+    JsonRepositoryProvider
 ]
 
 CHANNEL_PROVIDERS = [ChannelProvider]
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
new file mode 100644
index 0000000..e5eb247
--- /dev/null
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -0,0 +1,860 @@
+import json
+import re
+import os
+from itertools import chain
+from urllib.parse import urlparse
+
+from ..clients.bitbucket_client import BitBucketClient
+from ..clients.client_exception import ClientException
+from ..clients.github_client import GitHubClient
+from ..clients.gitlab_client import GitLabClient
+from ..console_write import console_write
+from ..download_manager import http_get, resolve_url, resolve_urls, update_url
+from ..downloaders.downloader_exception import DownloaderException
+from ..package_version import version_sort
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import ProviderException
+from .schema_version import SchemaVersion
+
+
+class InvalidRepoFileException(ProviderException):
+    def __init__(self, repo, reason_message):
+        super().__init__(
+            'Repository {} does not appear to be a valid repository file because'
+            ' {}'.format(repo.repo_url, reason_message))
+
+
+class InvalidLibraryReleaseKeyError(ProviderException):
+    def __init__(self, repo, name, key):
+        super().__init__(
+            'Invalid or missing release-level key "{}" in library "{}"'
+            ' in repository "{}".'.format(key, name, repo))
+
+
+class InvalidPackageReleaseKeyError(ProviderException):
+    def __init__(self, repo, name, key):
+        super().__init__(
+            'Invalid or missing release-level key "{}" in package "{}"'
+            ' in repository "{}".'.format(key, name, repo))
+
+
+class JsonRepositoryProvider(BaseRepositoryProvider):
+    """
+    Generic repository downloader that fetches package info
+
+    With the current channel/repository architecture where the channel file
+    caches info from all includes repositories, these package providers just
+    serve the purpose of downloading packages not in the default channel.
+
+    The structure of the JSON a repository should contain is located in
+    example-packages.json.
+
+    :param repo_url:
+        The URL of the package repository
+
+    :param settings:
+        A dict containing at least the following fields:
+          `cache_length`,
+          `debug`,
+          `timeout`,
+          `user_agent`
+        Optional fields:
+          `http_proxy`,
+          `https_proxy`,
+          `proxy_username`,
+          `proxy_password`,
+          `query_string_params`,
+          `http_basic_auth`
+    """
+
+    def __init__(self, repo_url, settings):
+        super().__init__(repo_url, settings)
+        self.included_urls = set()
+        self.repo_info = None
+        self.schema_version = None
+
+    def fetch(self):
+        """
+        Retrieves and loads the JSON for other methods to use
+
+        :raises:
+            InvalidChannelFileException: when parsing or validation file content fails
+            ProviderException: when an error occurs trying to open a file
+            DownloaderException: when an error occurs trying to open a URL
+        """
+
+        if self.repo_info is not None:
+            return True
+
+        if self.repo_url in self.failed_sources:
+            return False
+
+        try:
+            self.repo_info = self.fetch_repo(self.repo_url)
+            self.schema_version = self.repo_info['schema_version']
+        except (DownloaderException, ClientException, ProviderException) as e:
+            self.failed_sources[self.repo_url] = e
+            self.libraries = {}
+            self.packages = {}
+            return False
+
+        return True
+
+    def fetch_repo(self, location):
+        """
+        Fetches the contents of a URL of file path
+
+        :param location:
+            The URL or file path
+
+        :raises:
+            ProviderException: when an error occurs trying to open a file
+            DownloaderException: when an error occurs trying to open a URL
+
+        :return:
+            A dict of the parsed JSON
+        """
+
+        # Prevent circular includes
+        if location in self.included_urls:
+            raise ProviderException('Error, repository "%s" already included.' % location)
+
+        self.included_urls.add(location)
+
+        if re.match(r'https?://', location, re.I):
+            json_string = http_get(location, self.settings, 'Error downloading repository.')
+
+        # Anything that is not a URL is expected to be a filesystem path
+        else:
+            if not os.path.exists(location):
+                raise ProviderException('Error, file %s does not exist' % location)
+
+            if self.settings.get('debug'):
+                console_write(
+                    '''
+                    Loading %s as a repository
+                    ''',
+                    location
+                )
+
+            # We open as binary so we get bytes like the DownloadManager
+            with open(location, 'rb') as f:
+                json_string = f.read()
+
+        try:
+            repo_info = json.loads(json_string.decode('utf-8'))
+        except (ValueError):
+            raise InvalidRepoFileException(self, 'parsing JSON failed.')
+
+        try:
+            schema_version = repo_info['schema_version'] = SchemaVersion(repo_info['schema_version'])
+        except KeyError:
+            raise InvalidRepoFileException(
+                self, 'the "schema_version" JSON key is missing.')
+        except ValueError as e:
+            raise InvalidRepoFileException(self, e)
+
+        # Main keys depending on scheme version
+        if schema_version.major < 4:
+            repo_keys = {'packages', 'dependencies', 'includes'}
+        else:
+            repo_keys = {'packages', 'libraries', 'includes'}
+
+        # Check existence of at least one required main key
+        if not set(repo_info.keys()) & repo_keys:
+            raise InvalidRepoFileException(self, 'it doesn\'t look like a repository.')
+
+        # Check type of existing main keys
+        for key in repo_keys:
+            if key in repo_info and not isinstance(repo_info[key], list):
+                raise InvalidRepoFileException(self, 'the "%s" key is not an array.' % key)
+
+        # Migrate dependencies to libraries
+        # The 4.0.0 repository schema renamed dependencies key to libraries.
+        if schema_version.major < 4:
+            repo_info['libraries'] = repo_info.pop('dependencies', [])
+
+        # Allow repositories to include other repositories, recursively
+        includes = repo_info.pop('includes', None)
+        if includes:
+            for include in resolve_urls(self.repo_url, includes):
+                try:
+                    include_info = self.fetch_repo(include)
+                except (DownloaderException, ClientException, ProviderException) as e:
+                    self.failed_sources[include] = e
+                else:
+                    include_version = include_info['schema_version']
+                    if include_version != schema_version:
+                        raise ProviderException(
+                            'Scheme version of included repository %s doesn\'t match its parent.' % include)
+
+                    repo_info['packages'].extend(include_info.get('packages', []))
+                    repo_info['libraries'].extend(include_info.get('libraries', []))
+
+        return repo_info
+
+    def get_libraries(self, invalid_sources=None):
+        """
+        Provides access to the libraries in this repository
+
+        :param invalid_sources:
+            A list of URLs that are permissible to fetch data from
+
+        :return:
+            A generator of
+            (
+                'Library Name',
+                {
+                    'name': name,
+                    'description': description,
+                    'author': author,
+                    'issues': URL,
+                    'releases': [
+                        {
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'python_versions': ['3.3', '3.8'],
+                            'url': url,
+                            'version': version,
+                            'sha256': hex hash
+                        }, ...
+                    ],
+                    'sources': [url, ...]
+                }
+            )
+            tuples
+        """
+
+        if self.libraries is not None:
+            for key, value in self.libraries.items():
+                yield (key, value)
+            return
+
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
+
+        if not self.fetch():
+            return
+
+        if self.schema_version.major >= 4:
+            allowed_library_keys = {
+                'name', 'description', 'author', 'homepage', 'issues', 'releases'
+            }
+            allowed_release_keys = {  # todo: remove 'branch'
+                'base', 'version', 'sublime_text', 'platforms', 'python_versions', 'branch', 'tags', 'url', 'sha256'
+            }
+        else:
+            allowed_library_keys = {
+                'name', 'description', 'author', 'issues', 'load_order', 'releases'
+            }
+            allowed_release_keys = {
+                'base', 'version', 'sublime_text', 'platforms', 'branch', 'tags', 'url', 'sha256'
+            }
+
+        required_library_keys = {
+            'description', 'author', 'issues', 'releases'
+        }
+
+        copied_library_keys = ('name', 'description', 'author', 'homepage', 'issues')
+        copied_release_keys = ('date', 'version', 'sha256')
+        default_platforms = ['*']
+        default_python_versions = ['3.3']
+        default_sublime_text = '*'
+
+        debug = self.settings.get('debug')
+
+        clients = [
+            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
+        ]
+
+        output = {}
+        for library in self.repo_info['libraries']:
+            info = {
+                'releases': [],
+                'sources': [self.repo_url]
+            }
+
+            for field in copied_library_keys:
+                field_value = library.get(field)
+                if field_value:
+                    info[field] = field_value
+
+            if 'name' not in info:
+                self.failed_sources[self.repo_url] = ProviderException(
+                    'No "name" value for one of libraries'
+                    ' in repository "{}".'.format(self.repo_url)
+                )
+                continue
+
+            try:
+                unknown_keys = set(library) - allowed_library_keys
+                if unknown_keys:
+                    raise ProviderException(
+                        'The "{}" key(s) in library "{}" in repository {} are not supported.'.format(
+                            '", "'.join(sorted(unknown_keys)), info['name'],
+                            self.repo_url
+                        )
+                    )
+
+                releases = library.get('releases', [])
+                if releases and not isinstance(releases, list):
+                    raise ProviderException(
+                        'The "releases" value is not an array for library "{}"'
+                        ' in repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                for release in releases:
+                    download_info = {}
+
+                    unknown_keys = set(release) - allowed_release_keys
+                    if unknown_keys:
+                        raise ProviderException(
+                            'The "{}" key(s) in one of the releases of library "{}"'
+                            ' in repository {} are not supported.'.format(
+                                '", "'.join(sorted(unknown_keys)), info['name'], self.repo_url
+                            )
+                        )
+
+                    # Validate libraries
+                    # the key can be used to specify dependencies, upstream via repositories
+                    key = 'libraries' if self.schema_version.major >= 4 else 'dependencies'
+                    value = release.get(key, [])
+                    if value:
+                        if not isinstance(value, list):
+                            raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info['libraries'] = value
+
+                    # Validate supported platforms
+                    key = 'platforms'
+                    value = release.get(key, default_platforms)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    download_info[key] = value
+
+                    # Validate supported python_versions
+                    key = 'python_versions'
+                    value = release.get(key, default_python_versions)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    download_info[key] = value
+
+                    # Validate supported ST version
+                    key = 'sublime_text'
+                    value = release.get(key, default_sublime_text)
+                    if not isinstance(value, str):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    download_info[key] = value
+
+                    # Validate url
+                    # if present, it is an explicit or resolved release
+                    url = release.get('url')
+                    if url:
+                        for key in copied_release_keys:
+                            if key in release:
+                                value = release[key]
+                                if not value or not isinstance(value, str):
+                                    raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                                download_info[key] = value
+
+                        if 'version' not in download_info:
+                            raise ProviderException(
+                                'Missing "version" key in release with explicit "url" of library "{}"'
+                                ' in repository "%s".'.format(info['name'], self.repo_url)
+                            )
+
+                        download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                        is_http = urlparse(download_info['url']).scheme == 'http'
+                        if is_http and 'sha256' not in download_info:
+                            raise ProviderException(
+                                'No "sha256" key for the non-secure "url" value in one of the releases'
+                                ' of the library "{}" in repository {}.'.format(info['name'], self.repo_url)
+                            )
+
+                        info['releases'].append(download_info)
+                        continue
+
+                    # Resolve release template using `base` and `branch` or `tags` keys
+
+                    base = release.get('base')
+                    if not base:
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], 'base')
+
+                    base_url = resolve_url(self.repo_url, base)
+                    downloads = None
+
+                    # Evaluate and resolve "tags" and "branch" release templates
+                    tags = release.get('tags')
+                    branch = release.get('branch')
+
+                    if tags:
+                        extra = None
+                        if tags is not True:
+                            extra = tags
+                        for client in clients:
+                            downloads = client.download_info_from_tags(base_url, extra)
+                            if downloads is not None:
+                                break
+
+                    elif branch:
+                        for client in clients:
+                            downloads = client.download_info_from_branch(base_url, branch)
+                            if downloads is not None:
+                                break
+                    else:
+                        raise ProviderException(
+                            'Missing "branch", "tags" or "url" key in release of library "{}"'
+                            ' in repository "%s".'.format(info['name'], self.repo_url)
+                        )
+
+                    if downloads is None:
+                        raise ProviderException(
+                            'Invalid "base" value "{}" for one of the releases of library "{}"'
+                            ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                        )
+
+                    if downloads is False:
+                        raise ProviderException(
+                            'No valid semver tags found at "{}" for library "{}"'
+                            ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                        )
+
+                    for download in downloads:
+                        download.update(download_info)
+                        info['releases'].append(download)
+
+                # check required library keys
+                for key in required_library_keys:
+                    if not info.get(key):
+                        raise ProviderException(
+                            'Missing or invalid "{}" key for library "{}"'
+                            ' in repository "{}".'.format(key, info['name'], self.repo_url)
+                        )
+
+                info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
+
+                output[info['name']] = info
+                yield (info['name'], info)
+
+            except (DownloaderException, ClientException, ProviderException) as e:
+                self.broken_libriaries[info['name']] = e
+
+        self.libraries = output
+
+    def get_packages(self, invalid_sources=None):
+        """
+        Provides access to the packages in this repository
+
+        :param invalid_sources:
+            A list of URLs that are permissible to fetch data from
+
+        :return:
+            A generator of
+            (
+                'Package Name',
+                {
+                    'name': name,
+                    'description': description,
+                    'author': author,
+                    'homepage': homepage,
+                    'previous_names': [old_name, ...],
+                    'labels': [label, ...],
+                    'sources': [url, ...],
+                    'readme': url,
+                    'issues': url,
+                    'donate': url,
+                    'buy': url,
+                    'last_modified': last modified date,
+                    'releases': [
+                        {
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'url': url,
+                            'date': date,
+                            'version': version,
+                            'libraries': [library name, ...]
+                        }, ...
+                    ]
+                }
+            )
+            tuples
+        """
+
+        if self.packages is not None:
+            for key, value in self.packages.items():
+                yield (key, value)
+            return
+
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
+
+        if not self.fetch():
+            return
+
+        required_package_keys = {'author', 'releases'}
+
+        copied_package_keys = (
+            'name',
+            'description',
+            'author',
+            'last_modified',
+            'previous_names',
+            'labels',
+            'homepage',
+            'readme',
+            'issues',
+            'donate',
+            'buy'
+        )
+        copied_release_keys = ('date', 'version')
+        default_platforms = ['*']
+        default_sublime_text = '*'
+
+        debug = self.settings.get('debug')
+
+        clients = [
+            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
+        ]
+
+        output = {}
+        for package in self.repo_info['packages']:
+            info = {
+                'releases': [],
+                'sources': [self.repo_url]
+            }
+
+            for field in copied_package_keys:
+                if package.get(field):
+                    info[field] = package.get(field)
+
+            # Try to grab package-level details from GitHub or BitBucket
+            details = package.get('details')
+            if details:
+                details = resolve_url(self.repo_url, details)
+
+                if invalid_sources is not None and details in invalid_sources:
+                    continue
+
+                if details not in info['sources']:
+                    info['sources'].append(details)
+
+                try:
+                    repo_info = None
+
+                    for client in clients:
+                        repo_info = client.repo_info(details)
+                        if repo_info:
+                            break
+                    else:
+                        raise ProviderException(
+                            'Invalid "details" value "{}" for one of the packages'
+                            ' in the repository {}.'.format(details, self.repo_url)
+                        )
+
+                    del repo_info['default_branch']
+
+                    # When grabbing details, prefer explicit field values over the values
+                    # from the GitHub or BitBucket API
+                    info = dict(chain(repo_info.items(), info.items()))
+
+                except (DownloaderException, ClientException, ProviderException) as e:
+                    if 'name' in info:
+                        self.broken_packages[info['name']] = e
+                    self.failed_sources[details] = e
+                    continue
+
+            if 'name' not in info:
+                self.failed_sources[self.repo_url] = ProviderException(
+                    'No "name" value for one of the packages'
+                    ' in the repository {}.'.format(self.repo_url)
+                )
+                continue
+
+            try:
+                # evaluate releases
+
+                releases = package.get('releases')
+
+                if self.schema_version.major == 2:
+                    # If no releases info was specified, also grab the download info from GH or BB
+                    if not releases and details:
+                        releases = [{'details': details}]
+
+                if not releases:
+                    raise ProviderException(
+                        'No "releases" value for the package "{}"'
+                        ' in the repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                if not isinstance(releases, list):
+                    raise ProviderException(
+                        'The "releases" value is not an array for the package "{}"'
+                        ' in the repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                # This allows developers to specify a GH or BB location to get releases from,
+                # especially tags URLs (https://github.com/user/repo/tags or
+                # https://bitbucket.org/user/repo#tags)
+                for release in releases:
+                    download_info = {}
+
+                    # Validate libraries
+                    # the key can be used to specify dependencies, upstream via repositories
+                    key = 'libraries' if self.schema_version.major >= 4 else 'dependencies'
+                    value = release.get(key, [])
+                    if value:
+                        if not isinstance(value, list):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info['libraries'] = value
+
+                    # Validate supported platforms
+                    key = 'platforms'
+                    value = release.get(key, default_platforms)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                    download_info[key] = value
+
+                    # Validate supported python_versions
+                    if self.schema_version.major >= 4:
+                        key = 'python_versions'
+                        value = release.get(key)
+                        if value:
+                            # Package releases may optionally contain `python_versions` list to tell
+                            # which python version they are compatibilible with.
+                            # The main purpose is to be able to opt-in unmaintained packages to python 3.8
+                            # if they are known not to cause trouble.
+                            if isinstance(value, str):
+                                value = [value]
+                            elif not isinstance(value, list):
+                                raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                            download_info[key] = value
+
+                    if self.schema_version.major >= 3:
+                        # Validate supported ST version
+                        # missing key indicates any ST3+ build is supported
+                        key = 'sublime_text'
+                        value = release.get(key, default_sublime_text)
+                        if not isinstance(value, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info[key] = value
+
+                        # Validate url
+                        # if present, it is an explicit or resolved release
+                        url = release.get('url')
+                        if url:
+                            # Validate date and version
+                            for key in copied_release_keys:
+                                if key in release:
+                                    value = release[key]
+                                    if not value or not isinstance(value, str):
+                                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                                    download_info[key] = value
+
+                            if 'version' not in download_info:
+                                raise ProviderException(
+                                    'Missing "version" key in release with explicit "url" of package "{}"'
+                                    ' in repository "%s".'.format(info['name'], self.repo_url)
+                                )
+
+                            download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                            info['releases'].append(download_info)
+                            continue
+
+                        # Resolve release template using `base` and `branch` or `tags` keys
+
+                        base = release.get('base')
+                        if not base:
+                            base = details
+                        if not base:
+                            raise ProviderException(
+                                'Missing root-level "details" key, or release-level "base" key'
+                                ' for one of the releases of package "{}"'
+                                ' in repository {}.'.format(info['name'], self.repo_url)
+                            )
+
+                        base_url = resolve_url(self.repo_url, base)
+                        downloads = None
+
+                        tags = release.get('tags')
+                        branch = release.get('branch')
+
+                        if tags:
+                            extra = None
+                            if tags is not True:
+                                extra = tags
+                            for client in clients:
+                                downloads = client.download_info_from_tags(base_url, extra)
+                                if downloads is not None:
+                                    break
+                        elif branch:
+                            for client in clients:
+                                downloads = client.download_info_from_branch(base_url, branch)
+                                if downloads is not None:
+                                    break
+                        else:
+                            raise ProviderException(
+                                'Missing "branch", "tags" or "url" key in release of package "{}"'
+                                ' in repository "%s".'.format(info['name'], self.repo_url)
+                            )
+
+                        if downloads is None:
+                            raise ProviderException(
+                                'Invalid "base" value "{}" for one of the releases of package "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+
+                        if downloads is False:
+                            raise ProviderException(
+                                'No valid semver tags found at "{}" for package "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+
+                        for download in downloads:
+                            download.update(download_info)
+                            info['releases'].append(download)
+
+                    elif self.schema_version.major == 2:
+                        # missing key indicates ST2 release; no longer supported
+                        key = 'sublime_text'
+                        value = release.get(key)
+                        if not value:
+                            continue
+                        if not isinstance(value, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info[key] = value
+
+                        # Validate url
+                        # if present, it is an explicit or resolved release
+                        url = release.get('url')
+                        if url:
+                            for key in copied_release_keys:
+                                if key in release:
+                                    value = release[key]
+                                    if not value or not isinstance(value, str):
+                                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                                    download_info[key] = value
+
+                            if 'version' not in download_info:
+                                raise ProviderException(
+                                    'Missing "version" key in release with explicit "url" of package "{}"'
+                                    ' in repository "%s".'.format(info['name'], self.repo_url)
+                                )
+
+                            download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                            info['releases'].append(download_info)
+                            continue
+
+                        # Evaluate and resolve "tags" and "branch" release templates
+
+                        download_details = release.get('details')
+                        if not download_details or not isinstance(download_details, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], 'details')
+
+                        download_details = resolve_url(self.repo_url, release['details'])
+
+                        downloads = None
+
+                        for client in clients:
+                            downloads = client.download_info(download_details)
+                            if downloads is not None:
+                                break
+
+                        if downloads is None:
+                            raise ProviderException(
+                                'Invalid "details" value "{}" for one of the releases of package "{}"'
+                                ' in repository "{}".'.format(download_details, info['name'], self.repo_url)
+                            )
+
+                        if downloads is False:
+                            raise ProviderException(
+                                'No valid semver tags found at "{}" for package "{}"'
+                                ' in repository "{}".'.format(download_details, info['name'], self.repo_url)
+                            )
+
+                        for download in downloads:
+                            download.update(download_info)
+                            info['releases'].append(download)
+
+                # check required package keys
+                for key in required_package_keys:
+                    if not info.get(key):
+                        raise ProviderException(
+                            'Missing or invalid "{}" key for package "{}"'
+                            ' in repository "{}".'.format(key, info['name'], self.repo_url)
+                        )
+
+                info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
+
+                for field in ('previous_names', 'labels'):
+                    if field not in info:
+                        info[field] = []
+
+                if 'readme' in info:
+                    info['readme'] = update_url(resolve_url(self.repo_url, info['readme']), debug)
+
+                for field in ('description', 'readme', 'issues', 'donate', 'buy'):
+                    if field not in info:
+                        info[field] = None
+
+                if 'homepage' not in info:
+                    info['homepage'] = details if details else self.repo_url
+
+                if 'last_modified' not in info:
+                    # Extract a date from the newest release
+                    date = '1970-01-01 00:00:00'
+                    for release in info['releases']:
+                        release_date = release.get('date')
+                        if release_date and isinstance(release_date, str) and release_date > date:
+                            date = release_date
+                    info['last_modified'] = date
+
+                output[info['name']] = info
+                yield (info['name'], info)
+
+            except (DownloaderException, ClientException, ProviderException) as e:
+                self.broken_packages[info['name']] = e
+
+        self.packages = output
+
+    def get_sources(self):
+        """
+        Return a list of current URLs that are directly referenced by the repo
+
+        :return:
+            A list of URLs and/or file paths
+        """
+
+        if not self.fetch():
+            return []
+
+        output = [self.repo_url]
+        for package in self.repo_info['packages']:
+            details = package.get('details')
+            if details:
+                output.append(details)
+        return output
+
+    def get_renamed_packages(self):
+        """:return: A dict of the packages that have been renamed"""
+
+        if not self.fetch():
+            return {}
+
+        output = {}
+        for package in self.repo_info['packages']:
+            if 'previous_names' not in package:
+                continue
+
+            previous_names = package['previous_names']
+            if not isinstance(previous_names, list):
+                previous_names = [previous_names]
+
+            for previous_name in previous_names:
+                output[previous_name] = package['name']
+
+        return output
diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py
deleted file mode 100644
index 565a453..0000000
--- a/app/lib/package_control/providers/repository_provider.py
+++ /dev/null
@@ -1,836 +0,0 @@
-import json
-import re
-import os
-from itertools import chain
-from urllib.parse import urlparse
-
-from .. import text
-from ..clients.bitbucket_client import BitBucketClient
-from ..clients.client_exception import ClientException
-from ..clients.github_client import GitHubClient
-from ..clients.gitlab_client import GitLabClient
-from ..console_write import console_write
-from ..download_manager import http_get, resolve_url, resolve_urls, update_url
-from ..downloaders.downloader_exception import DownloaderException
-from ..package_version import version_sort
-from .base_repository_provider import BaseRepositoryProvider
-from .provider_exception import ProviderException
-from .schema_version import SchemaVersion
-
-
-class InvalidRepoFileException(ProviderException):
-
-    def __init__(self, repo, reason_message):
-        super().__init__(
-            'Repository %s does not appear to be a valid repository file because'
-            ' %s' % (repo.repo_url, reason_message))
-
-
-class RepositoryProvider(BaseRepositoryProvider):
-    """
-    Generic repository downloader that fetches package info
-
-    With the current channel/repository architecture where the channel file
-    caches info from all includes repositories, these package providers just
-    serve the purpose of downloading packages not in the default channel.
-
-    The structure of the JSON a repository should contain is located in
-    example-packages.json.
-
-    :param repo_url:
-        The URL of the package repository
-
-    :param settings:
-        A dict containing at least the following fields:
-          `cache_length`,
-          `debug`,
-          `timeout`,
-          `user_agent`
-        Optional fields:
-          `http_proxy`,
-          `https_proxy`,
-          `proxy_username`,
-          `proxy_password`,
-          `query_string_params`,
-          `http_basic_auth`
-    """
-
-    def __init__(self, repo_url, settings):
-        super().__init__(repo_url, settings)
-        self.included_urls = set()
-        self.repo_info = None
-        self.schema_version = None
-
-    def fetch(self):
-        """
-        Retrieves and loads the JSON for other methods to use
-
-        :raises:
-            InvalidChannelFileException: when parsing or validation file content fails
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when an error occurs trying to open a URL
-        """
-
-        if self.repo_info is not None:
-            return True
-
-        if self.repo_url in self.failed_sources:
-            return False
-
-        try:
-            self.repo_info = self.fetch_repo(self.repo_url)
-            self.schema_version = self.repo_info['schema_version']
-        except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources[self.repo_url] = e
-            self.libraries = {}
-            self.packages = {}
-            return False
-
-        return True
-
-    def fetch_repo(self, location):
-        """
-        Fetches the contents of a URL of file path
-
-        :param location:
-            The URL or file path
-
-        :raises:
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when an error occurs trying to open a URL
-
-        :return:
-            A dict of the parsed JSON
-        """
-
-        # Prevent circular includes
-        if location in self.included_urls:
-            raise ProviderException('Error, repository "%s" already included.' % location)
-
-        self.included_urls.add(location)
-
-        if re.match(r'https?://', location, re.I):
-            json_string = http_get(location, self.settings, 'Error downloading repository.')
-
-        # Anything that is not a URL is expected to be a filesystem path
-        else:
-            if not os.path.exists(location):
-                raise ProviderException('Error, file %s does not exist' % location)
-
-            if self.settings.get('debug'):
-                console_write(
-                    '''
-                    Loading %s as a repository
-                    ''',
-                    location
-                )
-
-            # We open as binary so we get bytes like the DownloadManager
-            with open(location, 'rb') as f:
-                json_string = f.read()
-
-        try:
-            repo_info = json.loads(json_string.decode('utf-8'))
-        except (ValueError):
-            raise InvalidRepoFileException(self, 'parsing JSON failed.')
-
-        try:
-            schema_version = repo_info['schema_version'] = SchemaVersion(repo_info['schema_version'])
-        except KeyError:
-            raise InvalidRepoFileException(
-                self, 'the "schema_version" JSON key is missing.')
-        except ValueError as e:
-            raise InvalidRepoFileException(self, e)
-
-        # Main keys depending on scheme version
-        if schema_version.major < 4:
-            repo_keys = {'packages', 'dependencies', 'includes'}
-        else:
-            repo_keys = {'packages', 'libraries', 'includes'}
-
-        # Check existence of at least one required main key
-        if not set(repo_info.keys()) & repo_keys:
-            raise InvalidRepoFileException(self, 'it doesn\'t look like a repository.')
-
-        # Check type of existing main keys
-        for key in repo_keys:
-            if key in repo_info and not isinstance(repo_info[key], list):
-                raise InvalidRepoFileException(self, 'the "%s" key is not an array.' % key)
-
-        # Migrate dependencies to libraries
-        # The 4.0.0 repository schema renamed dependencies key to libraries.
-        if schema_version.major < 4:
-            repo_info['libraries'] = repo_info.pop('dependencies', [])
-
-        # Allow repositories to include other repositories, recursively
-        includes = repo_info.pop('includes', None)
-        if includes:
-            for include in resolve_urls(self.repo_url, includes):
-                try:
-                    include_info = self.fetch_repo(include)
-                except (DownloaderException, ClientException, ProviderException) as e:
-                    self.failed_sources[include] = e
-                else:
-                    include_version = include_info['schema_version']
-                    if include_version != schema_version:
-                        raise ProviderException(
-                            'Scheme version of included repository %s doesn\'t match its parent.' % include)
-
-                    repo_info['packages'].extend(include_info.get('packages', []))
-                    repo_info['libraries'].extend(include_info.get('libraries', []))
-
-        return repo_info
-
-    def get_libraries(self, invalid_sources=None):
-        """
-        Provides access to the libraries in this repository
-
-        :param invalid_sources:
-            A list of URLs that are permissible to fetch data from
-
-        :return:
-            A generator of
-            (
-                'Library Name',
-                {
-                    'name': name,
-                    'description': description,
-                    'author': author,
-                    'issues': URL,
-                    'releases': [
-                        {
-                            'sublime_text': compatible version,
-                            'platforms': [platform name, ...],
-                            'python_versions': ['3.3', '3.8'],
-                            'url': url,
-                            'version': version,
-                            'sha256': hex hash
-                        }, ...
-                    ],
-                    'sources': [url, ...]
-                }
-            )
-            tuples
-        """
-
-        if self.libraries is not None:
-            for key, value in self.libraries.items():
-                yield (key, value)
-            return
-
-        if invalid_sources is not None and self.repo_url in invalid_sources:
-            return
-
-        if not self.fetch():
-            return
-
-        if self.schema_version.major >= 4:
-            allowed_library_keys = {
-                'name', 'description', 'author', 'issues', 'releases'
-            }
-            allowed_release_keys = {  # todo: remove 'branch'
-                'base', 'version', 'sublime_text', 'platforms', 'python_versions', 'branch', 'tags', 'url', 'sha256'
-            }
-        else:
-            allowed_library_keys = {
-                'name', 'description', 'author', 'issues', 'load_order', 'releases'
-            }
-            allowed_release_keys = {
-                'base', 'version', 'sublime_text', 'platforms', 'branch', 'tags', 'url', 'sha256'
-            }
-
-        debug = self.settings.get('debug')
-
-        clients = [
-            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
-        ]
-
-        output = {}
-        for library in self.repo_info['libraries']:
-            info = {
-                'releases': [],
-                'sources': [self.repo_url]
-            }
-
-            for field in ('name', 'description', 'author', 'issues'):
-                field_value = library.get(field)
-                if field_value:
-                    info[field] = field_value
-
-            if 'name' not in info:
-                self.failed_sources[self.repo_url] = ProviderException(text.format(
-                    '''
-                    No "name" value for one of the libraries in the repository %s.
-                    ''',
-                    self.repo_url
-                ))
-                continue
-
-            try:
-                unknown_keys = set(library) - allowed_library_keys
-                if unknown_keys:
-                    raise ProviderException(text.format(
-                        '''
-                        The "%s" key(s) in the library "%s" in the repository %s are not supported.
-                        ''',
-                        ('", "'.join(sorted(unknown_keys)), info['name'], self.repo_url)
-                    ))
-
-                releases = library.get('releases', [])
-                if releases and not isinstance(releases, list):
-                    raise ProviderException(text.format(
-                        '''
-                        The "releases" value is not an array for the library "%s" in the repository %s.
-                        ''',
-                        (info['name'], self.repo_url)
-                    ))
-
-                def assert_release_keys(download_info):
-                    for key in ('version', 'url', 'sublime_text', 'platforms', 'python_versions'):
-                        if key not in download_info:
-                            raise ProviderException(text.format(
-                                '''
-                                Missing "%s" key for one of the releases of the library "%s" in the repository %s.
-                                ''',
-                                (key, info['name'], self.repo_url)
-                            ))
-
-                for release in releases:
-                    download_info = {}
-
-                    unknown_keys = set(release) - allowed_release_keys
-                    if unknown_keys:
-                        raise ProviderException(text.format(
-                            '''
-                            The "%s" key(s) in one of the releases of the library "%s"
-                            in the repository %s are not supported.
-                            ''',
-                            ('", "'.join(sorted(unknown_keys)), info['name'], self.repo_url)
-                        ))
-
-                    # Make sure that explicit fields are copied over
-                    for field in ('sublime_text', 'version', 'sha256'):
-                        value = release.get(field)
-                        if value:
-                            download_info[field] = value
-
-                    # Validate url
-                    value = release.get('url')
-                    if value:
-                        download_info['url'] = update_url(resolve_url(self.repo_url, value), debug)
-
-                    # Validate supported platforms
-                    value = release.get('platforms', ['*'])
-                    if not isinstance(value, list):
-                        value = [value]
-                    download_info['platforms'] = value
-
-                    # Validate supported python_versions
-                    if self.schema_version.major < 4:
-                        # Assume python 3.3 for backward compatibility with older schemes.
-                        # Note: ST2 with python 2.6 are no longer supported
-                        download_info['python_versions'] = ['3.3']
-                    else:
-                        value = release.get('python_versions')
-                        if value:
-                            if not isinstance(value, list):
-                                value = [value]
-                            download_info['python_versions'] = value
-
-                    tags = release.get('tags')
-                    branch = release.get('branch')
-
-                    if tags or branch:
-                        base = None
-                        if 'base' in release:
-                            base = release['base']
-
-                        if not base:
-                            raise ProviderException(text.format(
-                                '''
-                                Missing release-level "base" key for one of the releases of the
-                                library "%s" in the repository %s.
-                                ''',
-                                (info['name'], self.repo_url)
-                            ))
-
-                        base_url = resolve_url(self.repo_url, base)
-                        downloads = None
-
-                        if tags:
-                            extra = None
-                            if tags is not True:
-                                extra = tags
-                            for client in clients:
-                                downloads = client.download_info_from_tags(base_url, extra)
-                                if downloads is not None:
-                                    break
-                        else:
-                            for client in clients:
-                                downloads = client.download_info_from_branch(base_url, branch)
-                                if downloads is not None:
-                                    break
-
-                        if downloads is None:
-                            raise ProviderException(text.format(
-                                '''
-                                Invalid "base" value "%s" for one of the releases of the
-                                library "%s" in the repository %s.
-                                ''',
-                                (base, info['name'], self.repo_url)
-                            ))
-
-                        if downloads is False:
-                            raise ProviderException(text.format(
-                                '''
-                                No valid semver tags found at %s for the
-                                library "%s" in the repository %s.
-                                ''',
-                                (base, info['name'], self.repo_url)
-                            ))
-
-                        for download in downloads:
-                            del download['date']
-                            new_download = download_info.copy()
-                            new_download.update(download)
-                            assert_release_keys(new_download)
-                            info['releases'].append(new_download)
-
-                    elif 'url' in download_info:
-                        is_http = urlparse(download_info['url']).scheme == 'http'
-                        if is_http and 'sha256' not in download_info:
-                            raise ProviderException(text.format(
-                                '''
-                                No "sha256" key for the non-secure "url" value in one of the
-                                releases of the library "%s" in the repository %s.
-                                ''',
-                                (info['name'], self.repo_url)
-                            ))
-
-                        assert_release_keys(download_info)
-                        info['releases'].append(download_info)
-
-                # check required library keys
-                for key in ('author', 'releases', 'issues', 'description'):
-                    if key not in info:
-                        raise ProviderException(text.format(
-                            '''
-                            No "%s" key for the library "%s" in the repository %s.
-                            ''',
-                            (key, info['name'], self.repo_url)
-                        ))
-
-                info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
-
-                output[info['name']] = info
-                yield (info['name'], info)
-
-            except (DownloaderException, ClientException, ProviderException) as e:
-                self.broken_libriaries[info['name']] = e
-
-        self.libraries = output
-
-    def get_packages(self, invalid_sources=None):
-        """
-        Provides access to the packages in this repository
-
-        :param invalid_sources:
-            A list of URLs that are permissible to fetch data from
-
-        :return:
-            A generator of
-            (
-                'Package Name',
-                {
-                    'name': name,
-                    'description': description,
-                    'author': author,
-                    'homepage': homepage,
-                    'previous_names': [old_name, ...],
-                    'labels': [label, ...],
-                    'sources': [url, ...],
-                    'readme': url,
-                    'issues': url,
-                    'donate': url,
-                    'buy': url,
-                    'last_modified': last modified date,
-                    'releases': [
-                        {
-                            'sublime_text': compatible version,
-                            'platforms': [platform name, ...],
-                            'url': url,
-                            'date': date,
-                            'version': version,
-                            'libraries': [library name, ...]
-                        }, ...
-                    ]
-                }
-            )
-            tuples
-        """
-
-        if self.packages is not None:
-            for key, value in self.packages.items():
-                yield (key, value)
-            return
-
-        if invalid_sources is not None and self.repo_url in invalid_sources:
-            return
-
-        if not self.fetch():
-            return
-
-        debug = self.settings.get('debug')
-
-        clients = [
-            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
-        ]
-
-        output = {}
-        for package in self.repo_info['packages']:
-            info = {
-                'sources': [self.repo_url]
-            }
-
-            copy_fields = [
-                'name',
-                'description',
-                'author',
-                'last_modified',
-                'previous_names',
-                'labels',
-                'homepage',
-                'readme',
-                'issues',
-                'donate',
-                'buy'
-            ]
-            for field in copy_fields:
-                if package.get(field):
-                    info[field] = package.get(field)
-
-            details = package.get('details')
-            releases = package.get('releases')
-
-            # Try to grab package-level details from GitHub or BitBucket
-            if details:
-                details = resolve_url(self.repo_url, details)
-
-                if invalid_sources is not None and details in invalid_sources:
-                    continue
-
-                info['sources'].append(details)
-
-                try:
-                    repo_info = None
-
-                    for client in clients:
-                        repo_info = client.repo_info(details)
-                        if repo_info:
-                            break
-                    else:
-                        raise ProviderException(text.format(
-                            '''
-                            Invalid "details" value "%s" for one of the packages in the repository %s.
-                            ''',
-                            (details, self.repo_url)
-                        ))
-
-                    del repo_info['default_branch']
-
-                    # When grabbing details, prefer explicit field values over the values
-                    # from the GitHub or BitBucket API
-                    info = dict(chain(repo_info.items(), info.items()))
-
-                except (DownloaderException, ClientException, ProviderException) as e:
-                    if 'name' in info:
-                        self.broken_packages[info['name']] = e
-                    self.failed_sources[details] = e
-                    continue
-
-            if 'name' not in info:
-                self.failed_sources[self.repo_url] = ProviderException(text.format(
-                    '''
-                    No "name" value for one of the packages in the repository %s.
-                    ''',
-                    self.repo_url
-                ))
-                continue
-
-            info['releases'] = []
-            if self.schema_version.major == 2:
-                # If no releases info was specified, also grab the download info from GH or BB
-                if not releases and details:
-                    releases = [{'details': details}]
-
-            if not releases:
-                e = ProviderException(text.format(
-                    '''
-                    No "releases" value for the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo_url)
-                ))
-                self.broken_packages[info['name']] = e
-                continue
-
-            if not isinstance(releases, list):
-                e = ProviderException(text.format(
-                    '''
-                    The "releases" value is not an array or the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo_url)
-                ))
-                self.broken_packages[info['name']] = e
-                continue
-
-            # This allows developers to specify a GH or BB location to get releases from,
-            # especially tags URLs (https://github.com/user/repo/tags or
-            # https://bitbucket.org/user/repo#tags)
-            for release in releases:
-                download_details = None
-                download_info = {}
-
-                # Make sure that explicit fields are copied over
-                for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'libraries']:
-                    if field in release:
-                        value = release[field]
-                        if field == 'url':
-                            value = update_url(resolve_url(self.repo_url, value), debug)
-                        if field == 'platforms' and not isinstance(release['platforms'], list):
-                            value = [value]
-                        download_info[field] = value
-
-                if self.schema_version.major < 4 and 'dependencies' in release:
-                    download_info['libraries'] = release['dependencies']
-
-                if self.schema_version.major >= 4:
-                    # Package releases may optionally contain `python_versions` list to tell
-                    # which python version they are compatibilible with.
-                    # The main purpose is to be able to opt-in unmaintained packages to python 3.8
-                    # if they are known not to cause trouble.
-                    value = release.get('python_versions')
-                    if value:
-                        if not isinstance(value, list):
-                            value = [value]
-                        download_info['python_versions'] = value
-
-                if 'platforms' not in download_info:
-                    download_info['platforms'] = ['*']
-
-                if self.schema_version.major == 2:
-                    if 'sublime_text' not in download_info:
-                        download_info['sublime_text'] = '<3000'
-
-                    if 'details' in release:
-                        download_details = resolve_url(self.repo_url, release['details'])
-
-                        try:
-                            downloads = None
-
-                            for client in clients:
-                                downloads = client.download_info(download_details)
-                                if downloads is not None:
-                                    break
-
-                            if downloads is None:
-                                raise ProviderException(text.format(
-                                    '''
-                                    Invalid "details" value "%s" for one of the releases of the
-                                    package "%s" in the repository %s.
-                                    ''',
-                                    (download_details, info['name'], self.repo_url)
-                                ))
-
-                            if downloads is False:
-                                raise ProviderException(text.format(
-                                    '''
-                                    No valid semver tags found at %s for the
-                                    package "%s" in the repository %s.
-                                    ''',
-                                    (download_details, info['name'], self.repo_url)
-                                ))
-
-                            for download in downloads:
-                                new_download = download_info.copy()
-                                new_download.update(download)
-                                info['releases'].append(new_download)
-
-                        except (DownloaderException, ClientException, ProviderException) as e:
-                            self.broken_packages[info['name']] = e
-
-                    elif download_info:
-                        info['releases'].append(download_info)
-
-                elif self.schema_version.major >= 3:
-                    tags = release.get('tags')
-                    branch = release.get('branch')
-
-                    if tags or branch:
-                        try:
-                            base = None
-                            if 'base' in release:
-                                base = release['base']
-                            elif details:
-                                base = details
-
-                            if not base:
-                                raise ProviderException(text.format(
-                                    '''
-                                    Missing root-level "details" key, or release-level "base" key
-                                    for one of the releases of the package "%s" in the repository %s.
-                                    ''',
-                                    (info['name'], self.repo_url)
-                                ))
-
-                            base_url = resolve_url(self.repo_url, base)
-                            downloads = None
-
-                            if tags:
-                                extra = None
-                                if tags is not True:
-                                    extra = tags
-                                for client in clients:
-                                    downloads = client.download_info_from_tags(base_url, extra)
-                                    if downloads is not None:
-                                        break
-                            else:
-                                for client in clients:
-                                    downloads = client.download_info_from_branch(base_url, branch)
-                                    if downloads is not None:
-                                        break
-
-                            if downloads is None:
-                                raise ProviderException(text.format(
-                                    '''
-                                    Invalid "base" value "%s" for one of the releases of the
-                                    package "%s" in the repository %s.
-                                    ''',
-                                    (base, info['name'], self.repo_url)
-                                ))
-
-                            if downloads is False:
-                                raise ProviderException(text.format(
-                                    '''
-                                    No valid semver tags found at %s for the
-                                    package "%s" in the repository %s.
-                                    ''',
-                                    (base, info['name'], self.repo_url)
-                                ))
-
-                            for download in downloads:
-                                new_download = download_info.copy()
-                                new_download.update(download)
-                                info['releases'].append(new_download)
-
-                        except (DownloaderException, ClientException, ProviderException) as e:
-                            self.broken_packages[info['name']] = e
-                            continue
-                    elif download_info:
-                        info['releases'].append(download_info)
-
-            info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
-
-            if info['name'] in self.broken_packages:
-                continue
-
-            if 'author' not in info:
-                self.broken_packages[info['name']] = ProviderException(text.format(
-                    '''
-                    No "author" key for the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo_url)
-                ))
-                continue
-
-            if 'releases' not in info:
-                self.broken_packages[info['name']] = ProviderException(text.format(
-                    '''
-                    No "releases" key for the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo_url)
-                ))
-                continue
-
-            # Make sure all releases have the appropriate keys. We use a
-            # function here so that we can break out of multiple loops.
-            def has_broken_release():
-                for release in info.get('releases', []):
-                    for key in ['version', 'date', 'url', 'sublime_text', 'platforms']:
-                        if key not in release:
-                            self.broken_packages[info['name']] = ProviderException(text.format(
-                                '''
-                                Missing "%s" key for one of the releases of the package "%s" in the repository %s.
-                                ''',
-                                (key, info['name'], self.repo_url)
-                            ))
-                            return True
-                return False
-
-            if has_broken_release():
-                continue
-
-            for field in ['previous_names', 'labels']:
-                if field not in info:
-                    info[field] = []
-
-            if 'readme' in info:
-                info['readme'] = update_url(resolve_url(self.repo_url, info['readme']), debug)
-
-            for field in ['description', 'readme', 'issues', 'donate', 'buy']:
-                if field not in info:
-                    info[field] = None
-
-            if 'homepage' not in info:
-                info['homepage'] = self.repo_url
-
-            if 'releases' in info and 'last_modified' not in info:
-                # Extract a date from the newest release
-                date = '1970-01-01 00:00:00'
-                for release in info['releases']:
-                    release_date = release.get('date')
-                    if release_date and isinstance(release_date, str) and release_date > date:
-                        date = release_date
-                info['last_modified'] = date
-
-            output[info['name']] = info
-            yield (info['name'], info)
-
-        self.packages = output
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs and/or file paths
-        """
-
-        if not self.fetch():
-            return []
-
-        output = [self.repo_url]
-        for package in self.repo_info['packages']:
-            details = package.get('details')
-            if details:
-                output.append(details)
-        return output
-
-    def get_renamed_packages(self):
-        """:return: A dict of the packages that have been renamed"""
-
-        if not self.fetch():
-            return {}
-
-        output = {}
-        for package in self.repo_info['packages']:
-            if 'previous_names' not in package:
-                continue
-
-            previous_names = package['previous_names']
-            if not isinstance(previous_names, list):
-                previous_names = [previous_names]
-
-            for previous_name in previous_names:
-                output[previous_name] = package['name']
-
-        return output
diff --git a/setup/scripts/extract_package_control.py b/setup/scripts/extract_package_control.py
index e704342..d0cf9bd 100644
--- a/setup/scripts/extract_package_control.py
+++ b/setup/scripts/extract_package_control.py
@@ -94,8 +94,8 @@
     'providers/channel_provider.py',
     'providers/github_repository_provider.py',
     'providers/github_user_provider.py',
+    'providers/json_repository_provider.py',
     'providers/provider_exception.py',
-    'providers/repository_provider.py',
     'providers/schema_compat.py',
 
     '__init__.py',

From 7f0bd8bfc628295bd5edd443f86896a02812f677 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sat, 28 Oct 2023 17:33:43 +0200
Subject: [PATCH 24/39] Sync with Package Control

---
 app/lib/package_control/__init__.py           |   4 +-
 .../clients/bitbucket_client.py               |  12 ++
 .../package_control/clients/github_client.py  | 155 ++++++++++++++++
 .../package_control/clients/gitlab_client.py  | 166 +++++++++++++++++-
 .../clients/json_api_client.py                | 106 +++++++++++
 .../providers/json_repository_provider.py     | 160 ++++++++++++-----
 6 files changed, 550 insertions(+), 53 deletions(-)

diff --git a/app/lib/package_control/__init__.py b/app/lib/package_control/__init__.py
index 5672732..1c7082d 100644
--- a/app/lib/package_control/__init__.py
+++ b/app/lib/package_control/__init__.py
@@ -1,2 +1,2 @@
-__version__ = "4.0.0-beta8"
-__version_info__ = (4, 0, 0, 'beta', 8)
+__version__ = "4.0.0-beta9"
+__version_info__ = (4, 0, 0, 'beta', 9)
diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py
index fdc5732..4d61f30 100644
--- a/app/lib/package_control/clients/bitbucket_client.py
+++ b/app/lib/package_control/clients/bitbucket_client.py
@@ -146,6 +146,18 @@ def download_info_from_branch(self, url, default_branch=None):
 
         return [self._make_download_info(user_repo, branch, version, timestamp)]
 
+    def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
+        """
+        BitBucket doesn't support releases in ways GitHub/Gitlab do.
+
+        It supports download assets, but those are not bound to tags or releases.
+
+        Version information could be extracted from file names,
+        but that's not how PC evaluates download assets, currently.
+        """
+
+        return None
+
     def download_info_from_tags(self, url, tag_prefix=None):
         """
         Retrieve information about downloading a package
diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py
index 2bb46e2..1650445 100644
--- a/app/lib/package_control/clients/github_client.py
+++ b/app/lib/package_control/clients/github_client.py
@@ -131,6 +131,161 @@ def download_info_from_branch(self, url, default_branch=None):
 
         return [self._make_download_info(user_repo, branch, version, timestamp)]
 
+    def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
+        """
+        Retrieve information about downloading a package
+
+        :param url:
+            The URL of the repository, in one of the forms:
+              https://github.com/{user}/{repo}
+              https://github.com/{user}/{repo}/releases
+            Grabs the info from the newest tag(s) that is a valid semver version.
+
+        :param tag_prefix:
+            If the URL is a tags URL, only match tags that have this prefix.
+            If tag_prefix is None, match only tags without prefix.
+
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+            [
+                (
+                    "Name-${version}-st${st_build}-*-x??.sublime",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.3", "3.8"],
+                        "sublime_text": ">=4107"
+                    }
+                )
+            ]
+
+            Supported globs:
+
+              * : any number of characters
+              ? : single character placeholder
+
+            Supported variables are:
+
+              ${platform}
+                A platform-arch string as given in "platforms" list.
+                A separate explicit release is evaluated for each platform.
+                If "platforms": ['*'] is specified, variable is set to "any".
+
+              ${py_version}
+                Major and minor part of required python version without period.
+                One of "33", "38" or any other valid python version supported by ST.
+
+              ${st_build}
+                Value of "st_specifier" stripped by leading operator
+                  "*"            => "any"
+                  ">=4107"       => "4107"
+                  "<4107"        => "4107"
+                  "4107 - 4126"  => "4107"
+
+              ${version}
+                Resolved semver without tag prefix
+                (e.g.: tag st4107-1.0.5 => version 1.0.5)
+
+                Note: is not replaced by this method, but by the ``ClientProvider``.
+
+        :raises:
+            DownloaderException: when there is an error downloading
+            ClientException: when there is an error parsing the response
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+
+              - `version` - the version number of the download
+              - `url` - the download URL of a zip file of the package
+              - `date` - the ISO-8601 timestamp string when the version was published
+              - `platforms` - list of unicode strings with compatible platforms
+              - `python_versions` - list of compatible python versions
+              - `sublime_text` - sublime text version specifier
+
+            Example:
+
+            ```py
+            [
+                {
+                  "url": "https://server.com/file.zip",
+                  "version": "1.0.0",
+                  "date": "2023-10-21 12:00:00",
+                  "platforms": ["windows-x64"],
+                  "python_versions": ["3.8"],
+                  "sublime_text": ">=4107"
+                },
+                ...
+            ]
+            ```
+        """
+
+        match = re.match(r'https?://github\.com/([^/#?]+/[^/#?]+)(?:/releases)?/?$', url)
+        if not match:
+            return None
+
+        def _get_releases(user_repo, tag_prefix=None, page_size=1000):
+            used_versions = set()
+            for page in range(10):
+                query_string = urlencode({'page': page * page_size, 'per_page': page_size})
+                api_url = self._api_url(user_repo, '/releases?%s' % query_string)
+                releases = self.fetch_json(api_url)
+
+                for release in releases:
+                    if release['draft']:
+                        continue
+                    version = version_match_prefix(release['tag_name'], tag_prefix)
+                    if not version or version in used_versions:
+                        continue
+
+                    used_versions.add(version)
+
+                    yield (
+                        version,
+                        release['published_at'][0:19].replace('T', ' '),
+                        [
+                            ((a['label'], a['browser_download_url']))
+                            for a in release['assets']
+                            if a['state'] == 'uploaded'
+                        ]
+                    )
+
+                if len(releases) < page_size:
+                    return
+
+        user_repo = match.group(1)
+        max_releases = self.settings.get('max_releases', 0)
+        num_releases = 0
+
+        asset_templates = self._expand_asset_variables(asset_templates)
+
+        output = []
+        for release in _get_releases(user_repo, tag_prefix):
+            version, timestamp, assets = release
+
+            version_string = str(version)
+
+            for pattern, selectors in asset_templates:
+                pattern = pattern.replace('${version}', version_string)
+                pattern = pattern.replace('.', r'\.')
+                pattern = pattern.replace('*', r'.*?')
+                pattern = pattern.replace('?', r'.')
+                regex = re.compile(pattern)
+
+                for asset_name, asset_url in assets:
+                    if not regex.match(asset_name):
+                        continue
+
+                    info = {'url': asset_url, 'version': version_string, 'date': timestamp}
+                    info.update(selectors)
+                    output.append(info)
+
+            num_releases += version.is_final
+            if max_releases > 0 and num_releases >= max_releases:
+                break
+
+        return output
+
     def download_info_from_tags(self, url, tag_prefix=None):
         """
         Retrieve information about downloading a package
diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py
index 80f0272..c75a22d 100644
--- a/app/lib/package_control/clients/gitlab_client.py
+++ b/app/lib/package_control/clients/gitlab_client.py
@@ -42,7 +42,7 @@ def user_repo_branch(url):
     @staticmethod
     def repo_url(user_name, repo_name):
         """
-        Generate the tags URL for a GitHub repo if the value passed is a GitHub
+        Generate the tags URL for a GitLab repo if the value passed is a GitLab
         repository URL
 
         :param owener_name:
@@ -134,6 +134,160 @@ def download_info_from_branch(self, url, default_branch=None):
 
         return [self._make_download_info(user_name, repo_name, branch, version, timestamp)]
 
+    def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
+        """
+        Retrieve information about downloading a package
+
+        :param url:
+            The URL of the repository, in one of the forms:
+              https://gitlab.com/{user}/{repo}
+              https://gitlab.com/{user}/{repo}/-/releases
+            Grabs the info from the newest tag(s) that is a valid semver version.
+
+        :param tag_prefix:
+            If the URL is a tags URL, only match tags that have this prefix.
+            If tag_prefix is None, match only tags without prefix.
+
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+            [
+                (
+                    "Name-${version}-st${st_build}-*-x??.sublime",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.3", "3.8"],
+                        "sublime_text": ">=4107"
+                    }
+                )
+            ]
+
+            Supported globs:
+
+              * : any number of characters
+              ? : single character placeholder
+
+            Supported variables are:
+
+              ${platform}
+                A platform-arch string as given in "platforms" list.
+                A separate explicit release is evaluated for each platform.
+                If "platforms": ['*'] is specified, variable is set to "any".
+
+              ${py_version}
+                Major and minor part of required python version without period.
+                One of "33", "38" or any other valid python version supported by ST.
+
+              ${st_build}
+                Value of "st_specifier" stripped by leading operator
+                  "*"            => "any"
+                  ">=4107"       => "4107"
+                  "<4107"        => "4107"
+                  "4107 - 4126"  => "4107"
+
+              ${version}
+                Resolved semver without tag prefix
+                (e.g.: tag st4107-1.0.5 => version 1.0.5)
+
+                Note: is not replaced by this method, but by the ``ClientProvider``.
+
+        :raises:
+            DownloaderException: when there is an error downloading
+            ClientException: when there is an error parsing the response
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+
+              - `version` - the version number of the download
+              - `url` - the download URL of a zip file of the package
+              - `date` - the ISO-8601 timestamp string when the version was published
+              - `platforms` - list of unicode strings with compatible platforms
+              - `python_versions` - list of compatible python versions
+              - `sublime_text` - sublime text version specifier
+
+            Example:
+
+            ```py
+            [
+                {
+                  "url": "https://server.com/file.zip",
+                  "version": "1.0.0",
+                  "date": "2023-10-21 12:00:00",
+                  "platforms": ["windows-x64"],
+                  "python_versions": ["3.8"],
+                  "sublime_text": ">=4107"
+                },
+                ...
+            ]
+            ```
+        """
+
+        match = re.match(r'https?://gitlab\.com/([^/#?]+)/([^/#?]+)(?:/-/releases)?/?$', url)
+        if not match:
+            return None
+
+        def _get_releases(user_repo, tag_prefix=None, page_size=1000):
+            used_versions = set()
+            for page in range(10):
+                query_string = urlencode({'page': page * page_size, 'per_page': page_size})
+                api_url = self._api_url(user_repo, '/releases?%s' % query_string)
+                releases = self.fetch_json(api_url)
+
+                for release in releases:
+                    version = version_match_prefix(release['tag_name'], tag_prefix)
+                    if not version or version in used_versions:
+                        continue
+
+                    used_versions.add(version)
+
+                    yield (
+                        version,
+                        release['released_at'][0:19].replace('T', ' '),
+                        [
+                            ((a['name'], a['direct_asset_url']))
+                            for a in release['assets']['links']
+                        ]
+                    )
+
+                if len(releases) < page_size:
+                    return
+
+        user_name, repo_name = match.groups()
+        repo_id = '%s%%2F%s' % (user_name, repo_name)
+
+        max_releases = self.settings.get('max_releases', 0)
+        num_releases = 0
+
+        asset_templates = self._expand_asset_variables(asset_templates)
+
+        output = []
+        for release in _get_releases(repo_id, tag_prefix):
+            version, timestamp, assets = release
+
+            version_string = str(version)
+
+            for pattern, selectors in asset_templates:
+                pattern = pattern.replace('${version}', version_string)
+                pattern = pattern.replace('.', r'\.')
+                pattern = pattern.replace('*', r'.*?')
+                pattern = pattern.replace('?', r'.')
+                regex = re.compile(pattern)
+
+                for asset_name, asset_url in assets:
+                    if not regex.match(asset_name):
+                        continue
+
+                    info = {'url': asset_url, 'version': version_string, 'date': timestamp}
+                    info.update(selectors)
+                    output.append(info)
+
+            num_releases += version.is_final
+            if max_releases > 0 and num_releases >= max_releases:
+                break
+
+        return output
+
     def download_info_from_tags(self, url, tag_prefix=None):
         """
         Retrieve information about downloading a package
@@ -164,11 +318,11 @@ def download_info_from_tags(self, url, tag_prefix=None):
         if not tags_match:
             return None
 
-        def _get_releases(user_repo, tag_prefix=None, page_size=1000):
+        def _get_releases(repo_id, tag_prefix=None, page_size=1000):
             used_versions = set()
             for page in range(10):
                 query_string = urlencode({'page': page * page_size, 'per_page': page_size})
-                tags_url = self._api_url(user_repo, '/repository/tags?%s' % query_string)
+                tags_url = self._api_url(repo_id, '/repository/tags?%s' % query_string)
                 tags_json = self.fetch_json(tags_url)
 
                 for tag in tags_json:
@@ -185,13 +339,13 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                     return
 
         user_name, repo_name = tags_match.groups()
-        user_repo = '%s%%2F%s' % (user_name, repo_name)
+        repo_id = '%s%%2F%s' % (user_name, repo_name)
 
         max_releases = self.settings.get('max_releases', 0)
         num_releases = 0
 
         output = []
-        for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True):
+        for release in sorted(_get_releases(repo_id, tag_prefix), reverse=True):
             version, tag, timestamp = release
 
             output.append(self._make_download_info(user_name, repo_name, tag, str(version), timestamp))
@@ -227,7 +381,7 @@ def repo_info(self, url):
         """
 
         user_name, repo_name, branch = self.user_repo_branch(url)
-        if not repo_name:
+        if not user_name or not repo_name:
             return None
 
         repo_id = '%s%%2F%s' % (user_name, repo_name)
diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py
index 73ff561..72da376 100644
--- a/app/lib/package_control/clients/json_api_client.py
+++ b/app/lib/package_control/clients/json_api_client.py
@@ -61,3 +61,109 @@ def fetch_json(self, url, prefer_cached=False):
         except (ValueError):
             error_string = 'Error parsing JSON from URL %s.' % url
             raise ClientException(error_string)
+
+    @staticmethod
+    def _expand_asset_variables(asset_templates):
+        """
+        Expands the asset variables.
+
+        Note: ``${version}`` is not replaced.
+
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+            ```py
+            [
+                (
+                    "Name-${version}-py${py_version}-*-x??.whl",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.3", "3.8"],
+                        "sublime_text": ">=4107"
+                    }
+                )
+            ]
+            ```
+
+            Supported variables are:
+
+              ``${platform}``
+                A platform-arch string as given in "platforms" list.
+                A separate explicit release is evaluated for each platform.
+                If "platforms": ['*'] is specified, variable is set to "any".
+
+              ``${py_version}``
+                Major and minor part of required python version without period.
+                One of "33", "38" or any other valid python version supported by ST.
+
+              ``${st_build}``
+                Value of "st_specifier" stripped by leading operator
+                "*"            => "any"
+                ">=4107"       => "4107"
+                "<4107"        => "4107"
+                "4107 - 4126"  => "4107"
+
+        :returns:
+            A list of asset templates with all variables (except ``${version}``) resolved.
+
+            ```py
+            [
+                (
+                    "Name-${version}-py33-*-x??.whl",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.3"],
+                        "sublime_text": ">=4107"
+                    }
+                ),
+                (
+                    "Name-${version}-py33-*-x??.whl",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.8"],
+                        "sublime_text": ">=4107"
+                    }
+                )
+            ]
+            ```
+        """
+
+        output = []
+        var = '${st_build}'
+        for pattern, selectors in asset_templates:
+            # resolve ${st_build}
+            if var in pattern:
+                # convert st_specifier version specifier to build number
+                st_specifier = selectors['sublime_text']
+                if st_specifier == '*':
+                    st_build = 'any'
+                elif st_specifier[0].isdigit():
+                    # 4107, 4107 - 4126
+                    st_build = st_specifier[:4]
+                elif st_specifier[1].isdigit():
+                    # <4107, >4107
+                    st_build = st_specifier[1:]
+                else:
+                    # ==4107, <=4107, >=4107
+                    st_build = st_specifier[2:]
+
+                pattern = pattern.replace(var, st_build)
+
+            output.append((pattern, selectors))
+
+        def resolve(templates, var, key):
+            for pattern, selectors in templates:
+                if var not in pattern:
+                    yield (pattern, selectors)
+                    continue
+
+                for platform in selectors[key]:
+                    new_selectors = selectors.copy()
+                    new_selectors[key] = [platform]
+                    yield (pattern.replace(var, platform), new_selectors)
+
+            return None
+
+        output = resolve(output, '${platform}', 'platforms')
+        output = resolve(output, '${py_version}', 'python_versions')
+        return list(output)
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
index e5eb247..c479282 100644
--- a/app/lib/package_control/providers/json_repository_provider.py
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -16,6 +16,14 @@
 from .provider_exception import ProviderException
 from .schema_version import SchemaVersion
 
+try:
+    # running within ST
+    from ..selectors import is_compatible_platform, is_compatible_version
+    IS_ST = True
+except ImportError:
+    # running on CLI or server
+    IS_ST = False
+
 
 class InvalidRepoFileException(ProviderException):
     def __init__(self, repo, reason_message):
@@ -236,25 +244,26 @@ def get_libraries(self, invalid_sources=None):
         if not self.fetch():
             return
 
+        if not self.repo_info:
+            return
+
         if self.schema_version.major >= 4:
             allowed_library_keys = {
                 'name', 'description', 'author', 'homepage', 'issues', 'releases'
             }
             allowed_release_keys = {  # todo: remove 'branch'
-                'base', 'version', 'sublime_text', 'platforms', 'python_versions', 'branch', 'tags', 'url', 'sha256'
+                'base', 'version', 'sublime_text', 'platforms', 'python_versions',
+                'branch', 'tags', 'asset', 'url', 'sha256'
             }
         else:
             allowed_library_keys = {
                 'name', 'description', 'author', 'issues', 'load_order', 'releases'
             }
             allowed_release_keys = {
-                'base', 'version', 'sublime_text', 'platforms', 'branch', 'tags', 'url', 'sha256'
+                'base', 'version', 'sublime_text', 'platforms',
+                'branch', 'tags', 'url', 'sha256'
             }
 
-        required_library_keys = {
-            'description', 'author', 'issues', 'releases'
-        }
-
         copied_library_keys = ('name', 'description', 'author', 'homepage', 'issues')
         copied_release_keys = ('date', 'version', 'sha256')
         default_platforms = ['*']
@@ -268,7 +277,7 @@ def get_libraries(self, invalid_sources=None):
         ]
 
         output = {}
-        for library in self.repo_info['libraries']:
+        for library in self.repo_info.get('libraries', []):
             info = {
                 'releases': [],
                 'sources': [self.repo_url]
@@ -303,6 +312,8 @@ def get_libraries(self, invalid_sources=None):
                         ' in repository {}.'.format(info['name'], self.repo_url)
                     )
 
+                staged_releases = {}
+
                 for release in releases:
                     download_info = {}
 
@@ -331,6 +342,9 @@ def get_libraries(self, invalid_sources=None):
                         value = [value]
                     elif not isinstance(value, list):
                         raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_platform(value):
+                        continue
                     download_info[key] = value
 
                     # Validate supported python_versions
@@ -347,6 +361,9 @@ def get_libraries(self, invalid_sources=None):
                     value = release.get(key, default_sublime_text)
                     if not isinstance(value, str):
                         raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_version(value):
+                        continue
                     download_info[key] = value
 
                     # Validate url
@@ -387,13 +404,23 @@ def get_libraries(self, invalid_sources=None):
                     downloads = None
 
                     # Evaluate and resolve "tags" and "branch" release templates
-                    tags = release.get('tags')
+                    asset = release.get('asset')
                     branch = release.get('branch')
+                    tags = release.get('tags')
+                    extra = None if tags is True else tags
 
-                    if tags:
-                        extra = None
-                        if tags is not True:
-                            extra = tags
+                    if asset:
+                        if branch:
+                            raise ProviderException(
+                                'Illegal "asset" key "{}" for branch based release of library "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+                        # group releases with assets by base_url and tag-prefix
+                        # to prepare gathering download_info with a single API call
+                        staged_releases.setdefault((base_url, extra), []).append((asset, download_info))
+                        continue
+
+                    elif tags:
                         for client in clients:
                             downloads = client.download_info_from_tags(base_url, extra)
                             if downloads is not None:
@@ -426,14 +453,26 @@ def get_libraries(self, invalid_sources=None):
                         download.update(download_info)
                         info['releases'].append(download)
 
+                # gather download_info from releases
+                for (base_url, extra), asset_templates in staged_releases.items():
+                    for client in clients:
+                        downloads = client.download_info_from_releases(base_url, asset_templates, extra)
+                        if downloads is not None:
+                            info['releases'].extend(downloads)
+                            break
+
                 # check required library keys
-                for key in required_library_keys:
+                for key in ('description', 'author', 'issues'):
                     if not info.get(key):
                         raise ProviderException(
                             'Missing or invalid "{}" key for library "{}"'
                             ' in repository "{}".'.format(key, info['name'], self.repo_url)
                         )
 
+                # Empty releases means package is unavailable on current platform or for version of ST
+                if not info['releases']:
+                    continue
+
                 info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
 
                 output[info['name']] = info
@@ -494,7 +533,8 @@ def get_packages(self, invalid_sources=None):
         if not self.fetch():
             return
 
-        required_package_keys = {'author', 'releases'}
+        if not self.repo_info:
+            return
 
         copied_package_keys = (
             'name',
@@ -520,7 +560,7 @@ def get_packages(self, invalid_sources=None):
         ]
 
         output = {}
-        for package in self.repo_info['packages']:
+        for package in self.repo_info.get('packages', []):
             info = {
                 'releases': [],
                 'sources': [self.repo_url]
@@ -574,14 +614,19 @@ def get_packages(self, invalid_sources=None):
                 continue
 
             try:
+                if not info.get('author'):
+                    raise ProviderException(
+                        'Missing or invalid "author" key for package "{}"'
+                        ' in repository "{}".'.format(info['name'], self.repo_url)
+                    )
+
                 # evaluate releases
 
                 releases = package.get('releases')
 
-                if self.schema_version.major == 2:
-                    # If no releases info was specified, also grab the download info from GH or BB
-                    if not releases and details:
-                        releases = [{'details': details}]
+                # If no releases info was specified, also grab the download info from GH or BB
+                if self.schema_version.major == 2 and not releases and details:
+                    releases = [{'details': details}]
 
                 if not releases:
                     raise ProviderException(
@@ -595,6 +640,8 @@ def get_packages(self, invalid_sources=None):
                         ' in the repository {}.'.format(info['name'], self.repo_url)
                     )
 
+                staged_releases = {}
+
                 # This allows developers to specify a GH or BB location to get releases from,
                 # especially tags URLs (https://github.com/user/repo/tags or
                 # https://bitbucket.org/user/repo#tags)
@@ -617,22 +664,24 @@ def get_packages(self, invalid_sources=None):
                         value = [value]
                     elif not isinstance(value, list):
                         raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_platform(value):
+                        continue
                     download_info[key] = value
 
-                    # Validate supported python_versions
-                    if self.schema_version.major >= 4:
-                        key = 'python_versions'
-                        value = release.get(key)
-                        if value:
-                            # Package releases may optionally contain `python_versions` list to tell
-                            # which python version they are compatibilible with.
-                            # The main purpose is to be able to opt-in unmaintained packages to python 3.8
-                            # if they are known not to cause trouble.
-                            if isinstance(value, str):
-                                value = [value]
-                            elif not isinstance(value, list):
-                                raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
-                            download_info[key] = value
+                    # Validate supported python_versions (requires scheme 4.0.0!)
+                    key = 'python_versions'
+                    value = release.get(key)
+                    if value:
+                        # Package releases may optionally contain `python_versions` list to tell
+                        # which python version they are compatibilible with.
+                        # The main purpose is to be able to opt-in unmaintained packages to python 3.8
+                        # if they are known not to cause trouble.
+                        if isinstance(value, str):
+                            value = [value]
+                        elif not isinstance(value, list):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info[key] = value
 
                     if self.schema_version.major >= 3:
                         # Validate supported ST version
@@ -641,6 +690,9 @@ def get_packages(self, invalid_sources=None):
                         value = release.get(key, default_sublime_text)
                         if not isinstance(value, str):
                             raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        # ignore incompatible release (avoid downloading/evaluating further information)
+                        if IS_ST and not is_compatible_version(value):
+                            continue
                         download_info[key] = value
 
                         # Validate url
@@ -680,17 +732,28 @@ def get_packages(self, invalid_sources=None):
                         base_url = resolve_url(self.repo_url, base)
                         downloads = None
 
-                        tags = release.get('tags')
+                        asset = release.get('asset')
                         branch = release.get('branch')
+                        tags = release.get('tags')
+                        extra = None if tags is True else tags
+
+                        if asset:
+                            if branch:
+                                raise ProviderException(
+                                    'Illegal "asset" key "{}" for branch based release of library "{}"'
+                                    ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                                )
+                            # group releases with assets by base_url and tag-prefix
+                            # to prepare gathering download_info with a single API call
+                            staged_releases.setdefault((base_url, extra), []).append((asset, download_info))
+                            continue
 
-                        if tags:
-                            extra = None
-                            if tags is not True:
-                                extra = tags
+                        elif tags:
                             for client in clients:
                                 downloads = client.download_info_from_tags(base_url, extra)
                                 if downloads is not None:
                                     break
+
                         elif branch:
                             for client in clients:
                                 downloads = client.download_info_from_branch(base_url, branch)
@@ -726,6 +789,9 @@ def get_packages(self, invalid_sources=None):
                             continue
                         if not isinstance(value, str):
                             raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        # ignore incompatible release (avoid downloading/evaluating further information)
+                        if IS_ST and not is_compatible_version(value):
+                            continue
                         download_info[key] = value
 
                         # Validate url
@@ -780,13 +846,17 @@ def get_packages(self, invalid_sources=None):
                             download.update(download_info)
                             info['releases'].append(download)
 
-                # check required package keys
-                for key in required_package_keys:
-                    if not info.get(key):
-                        raise ProviderException(
-                            'Missing or invalid "{}" key for package "{}"'
-                            ' in repository "{}".'.format(key, info['name'], self.repo_url)
-                        )
+                # gather download_info from releases
+                for (base_url, extra), asset_templates in staged_releases.items():
+                    for client in clients:
+                        downloads = client.download_info_from_releases(base_url, asset_templates, extra)
+                        if downloads is not None:
+                            info['releases'].extend(downloads)
+                            break
+
+                # Empty releases means package is unavailable on current platform or for version of ST
+                if not info['releases']:
+                    continue
 
                 info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
 

From f0a68b2697be27e927179604bbae6ae6eb8c0cb3 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Mon, 30 Oct 2023 19:33:36 +0100
Subject: [PATCH 25/39] Fix JsonRepositoryProvider reference names

---
 app/lib/run_repo_tests.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/app/lib/run_repo_tests.py b/app/lib/run_repo_tests.py
index 388e29e..0bd909a 100644
--- a/app/lib/run_repo_tests.py
+++ b/app/lib/run_repo_tests.py
@@ -11,7 +11,7 @@
 from urllib.error import URLError
 import imp
 
-from .package_control.providers import RepositoryProvider
+from .package_control.providers import JsonRepositoryProvider
 from .package_control.download_manager import close_all_connections, http_get
 from .package_control.downloaders.downloader_exception import DownloaderException
 from .. import config
@@ -246,7 +246,7 @@ def clean_message(exception):
             error = re.sub(regex, '', error)
         return error.replace(' in the repository https://example.com', '')
 
-    provider = RepositoryProvider('https://example.com', settings)
+    provider = JsonRepositoryProvider('https://example.com', settings)
     provider.schema_version = '3.0.0'
     provider.schema_major_version = 3
     provider.repo_info = {'schema_version': '3.0.0', 'packages': [spec], 'dependencies': []}

From 38c1cf6e130ee188234ac05ab377a8a3bcb4ab00 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Mon, 30 Oct 2023 19:34:42 +0100
Subject: [PATCH 26/39] Fix KeyError on optional python_versions key

Provide fallbacks if release keys are not found. Primarily possible only for
`python_versions` as it is optional for packages.
---
 app/models/package/modify.py | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/app/models/package/modify.py b/app/models/package/modify.py
index 6315004..cdd7c19 100644
--- a/app/models/package/modify.py
+++ b/app/models/package/modify.py
@@ -584,9 +584,9 @@ def store(values):
 
             cursor.execute(sql, [
                 name,
-                release['platforms'],
-                release['python_versions'],
-                _normalize_st_version(release['sublime_text']),
+                release.get('platforms', ['*']),
+                release.get('python_versions', []),
+                _normalize_st_version(release.get('sublime_text', '*')),
                 release['version'],
                 release['url'],
                 release['date'],

From a8afd57b83de051ea7cb97a69203952b5fa7d785 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Mon, 30 Oct 2023 19:42:30 +0100
Subject: [PATCH 27/39] Sync with Package Control

---
 .../package_control/clients/github_client.py  |   3 +-
 .../package_control/clients/gitlab_client.py  |   3 +-
 .../package_control/clients/pypi_client.py    | 276 ++++++++++++++++++
 app/lib/package_control/download_manager.py   |   3 +
 app/lib/package_control/http_cache.py         |  30 +-
 .../providers/json_repository_provider.py     |   3 +-
 6 files changed, 313 insertions(+), 5 deletions(-)
 create mode 100644 app/lib/package_control/clients/pypi_client.py

diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py
index 1650445..e213a1e 100644
--- a/app/lib/package_control/clients/github_client.py
+++ b/app/lib/package_control/clients/github_client.py
@@ -268,8 +268,8 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
             for pattern, selectors in asset_templates:
                 pattern = pattern.replace('${version}', version_string)
                 pattern = pattern.replace('.', r'\.')
-                pattern = pattern.replace('*', r'.*?')
                 pattern = pattern.replace('?', r'.')
+                pattern = pattern.replace('*', r'.*?')
                 regex = re.compile(pattern)
 
                 for asset_name, asset_url in assets:
@@ -279,6 +279,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                     info = {'url': asset_url, 'version': version_string, 'date': timestamp}
                     info.update(selectors)
                     output.append(info)
+                    break
 
             num_releases += version.is_final
             if max_releases > 0 and num_releases >= max_releases:
diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py
index c75a22d..b0c9466 100644
--- a/app/lib/package_control/clients/gitlab_client.py
+++ b/app/lib/package_control/clients/gitlab_client.py
@@ -270,8 +270,8 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
             for pattern, selectors in asset_templates:
                 pattern = pattern.replace('${version}', version_string)
                 pattern = pattern.replace('.', r'\.')
-                pattern = pattern.replace('*', r'.*?')
                 pattern = pattern.replace('?', r'.')
+                pattern = pattern.replace('*', r'.*?')
                 regex = re.compile(pattern)
 
                 for asset_name, asset_url in assets:
@@ -281,6 +281,7 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                     info = {'url': asset_url, 'version': version_string, 'date': timestamp}
                     info.update(selectors)
                     output.append(info)
+                    break
 
             num_releases += version.is_final
             if max_releases > 0 and num_releases >= max_releases:
diff --git a/app/lib/package_control/clients/pypi_client.py b/app/lib/package_control/clients/pypi_client.py
new file mode 100644
index 0000000..289e3eb
--- /dev/null
+++ b/app/lib/package_control/clients/pypi_client.py
@@ -0,0 +1,276 @@
+import re
+
+from ..pep440 import PEP440Version
+from ..pep440 import PEP440VersionSpecifier
+
+from .json_api_client import JSONApiClient
+
+
+class PyPiClient(JSONApiClient):
+    @staticmethod
+    def name_and_version(url):
+        match = re.match(
+            r"^https?://pypi\.org/project/([^/#?]+)(?:/([^/#?]+?)|/?)$", url
+        )
+        if match:
+            return match.groups()
+
+        return (None, None)
+
+    def repo_info(self, url):
+        name, _ = self.name_and_version(url)
+        if not name:
+            return None
+
+        pypi_url = "https://pypi.org/pypi/{}/json".format(name)
+        info = self.fetch_json(pypi_url)
+
+        return {
+            "name": name,
+            "description": info["summary"],
+            "homepage": info["home_page"]
+            or info.get("project_urls", {}).get("Homepage"),
+            "author": info["author"],
+            "issues": info["bugtrack_url"]
+            or info.get("project_urls", {}).get("Issues"),
+        }
+
+    def download_info(self, url, tag_prefix=None):
+        """Branch or tag based releases are not supported."""
+        return None
+
+    def download_info_from_branch(self, url, default_branch=None):
+        """Branch or tag based releases are not supported."""
+        return None
+
+    def download_info_from_tags(self, url, tag_prefix=None):
+        """Branch or tag based releases are not supported."""
+        return None
+
+    def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
+        """
+        Retrieve information about package
+
+        :param url:
+            The URL of the repository, in one of the forms:
+              https://pypi.org/projects/{library_name}
+              https://pypi.org/projects/{library_name}/{version}
+            Grabs the info from the newest compatible release(s).
+
+        :param tag_prefix:
+            unused, present for API compatibility.
+
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+            [
+                (
+                    "coverage-${version}-cp33-*-win_amd64*.whl",
+                    {
+                        "platforms": ["windows-x64"],
+                        "python_versions": ["3.3"]
+                    }
+                )
+            ]
+
+            Supported globs:
+
+              * : any number of characters
+              ? : single character placeholder
+
+            Supported variables are:
+
+              ${platform}
+                A platform-arch string as given in "platforms" list.
+                A separate explicit release is evaluated for each platform.
+                If "platforms": ["*"] is specified, variable is set to "any".
+
+              ${py_version}
+                Major and minor part of required python version without period.
+                One of "33", "38" or any other valid python version supported by ST.
+
+              ${st_build}
+                Value of "st_specifier" stripped by leading operator
+                  "*"            => "any"
+                  ">=4107"       => "4107"
+                  "<4107"        => "4107"
+                  "4107 - 4126"  => "4107"
+
+              ${version}
+                Resolved semver without tag prefix
+                (e.g.: tag st4107-1.0.5 => version 1.0.5)
+
+                Note: is not replaced by this method, but by the ``ClientProvider``.
+
+        :raises:
+            DownloaderException: when there is an error downloading
+            ClientException: when there is an error parsing the response
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+
+              - `version` - the version number of the download
+              - `url` - the download URL of a zip file of the package
+              - `date` - the ISO-8601 timestamp string when the version was published
+              - `platforms` - list of unicode strings with compatible platforms
+              - `python_versions` - list of compatible python versions
+              - `sublime_text` - sublime text version specifier
+
+            Example:
+
+            ```py
+            [
+                {
+                    "url": "https://files.pythonhosted.org/packages/.../coverage-4.2-cp33-cp33m-win_amd64.whl",
+                    "version": "4.2",
+                    "date": "2016-07-26 21:09:17",
+                    "sha256": "bd4eba631f07cae8cdb9c55c144f165649e6701b962f9d604b4e00cf8802406c",
+                    "platforms": ["windows-x64"],
+                    "python_versions": ["3.3"]
+                },
+                ...
+            ]
+            ```
+        """
+
+        name, version = self.name_and_version(url)
+        if not name:
+            return None
+
+        if version:
+            return self._download_info_from_fixed_version(
+                name, version, asset_templates
+            )
+
+        return self._download_info_from_latest_version(name, asset_templates)
+
+    def _download_info_from_fixed_version(self, name, version, asset_templates):
+        """
+        Build download information from fixed version.
+
+        :param name:
+            The package name
+        :param version:
+            The package version
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+        """
+
+        pypi_url = "https://pypi.org/pypi/{}/{}/json".format(name, version)
+        info = self.fetch_json(pypi_url)
+
+        asset_templates = self._expand_asset_variables(asset_templates)
+        assets = info["urls"]
+
+        output = []
+        for pattern, selectors in asset_templates:
+            info = self._make_download_info(pattern, selectors, version, assets)
+            if info:
+                output.append(info)
+
+        return output
+
+    def _download_info_from_latest_version(self, name, asset_templates):
+        """
+        Build download information from latest compatible versions of each asset template.
+
+        :param name:
+            The package name
+        :param version:
+            The package version
+        :param asset_templates:
+            A list of tuples of asset template and download_info.
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+        """
+
+        pypi_url = "https://pypi.org/pypi/{}/json".format(name)
+        info = self.fetch_json(pypi_url)
+
+        asset_templates = self._expand_asset_variables(asset_templates)
+
+        max_releases = self.settings.get("max_releases", 0)
+        num_releases = [0] * len(asset_templates)
+
+        output = []
+
+        # get latest compatible release for each asset template
+        for version, assets in reversed(info["releases"].items()):
+            # we don"t want beta releases!
+            if not PEP440Version(version).is_final:
+                continue
+
+            for idx, (pattern, selectors) in enumerate(asset_templates):
+                if max_releases > 0 and num_releases[idx] >= max_releases:
+                    continue
+                info = self._make_download_info(pattern, selectors, version, assets)
+                if not info:
+                    continue
+                output.append(info)
+                num_releases[idx] += 1
+            if max_releases > 0 and min(num_releases) >= max_releases:
+                break
+
+        return output
+
+    @staticmethod
+    def _make_download_info(pattern, selectors, version, assets):
+        """
+        Build download information for given asset template.
+
+        :param pattern:
+            The glob pattern of a given asset template
+        :param selectors:
+            The dictionary of release specification of given asset template from repository.json
+        :param version:
+            The package version
+        :param assets:
+            A list of dictionaries of asset information downloaded from PyPI.
+
+        :return:
+            ``None`` if no match, ``False`` if no commit, or a list of dicts with the
+            following keys:
+        """
+
+        pattern = pattern.replace("${version}", version)
+        pattern = pattern.replace(".", r"\.")
+        pattern = pattern.replace("?", r".")
+        pattern = pattern.replace("*", r".*?")
+        regex = re.compile(pattern)
+
+        python_versions = (PEP440Version(ver) for ver in selectors["python_versions"])
+
+        for asset in assets:
+            if asset["packagetype"] != "bdist_wheel":
+                continue
+            if asset["yanked"]:
+                continue
+            if not regex.match(asset["filename"]):
+                continue
+
+            specs = asset["requires_python"]
+            if specs:
+                specs = (
+                    PEP440VersionSpecifier(spec)
+                    for spec in asset["requires_python"].split(",")
+                )
+                if not all(ver in spec for spec in specs for ver in python_versions):
+                    continue
+
+            info = {
+                "url": asset["url"],
+                "version": version,
+                "date": asset["upload_time"][0:19].replace("T", " "),
+                "sha256": asset["digests"]["sha256"],
+            }
+            info.update(selectors)
+            return info
+
+        return None
diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py
index 5a65514..6f45d9b 100644
--- a/app/lib/package_control/download_manager.py
+++ b/app/lib/package_control/download_manager.py
@@ -199,6 +199,9 @@ def resolve_url(root_url, url):
         A generator of resolved URLs
     """
 
+    if not url:
+        return url
+
     scheme_match = re.match(r'(https?:)//', root_url, re.I)
     if scheme_match is None:
         root_dir = os.path.dirname(root_url)
diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py
index e9e4977..06cf6ac 100644
--- a/app/lib/package_control/http_cache.py
+++ b/app/lib/package_control/http_cache.py
@@ -11,9 +11,26 @@ class HttpCache:
     """
 
     def __init__(self, ttl):
+        """
+        Constructs a new instance.
+
+        :param ttl:
+            The number of seconds a cache entry should be valid for
+        """
+        self.ttl = int(ttl)    
         self.base_path = os.path.join(sys_path.pc_cache_dir(), 'http_cache')
         os.makedirs(self.base_path, exist_ok=True)
-        self.clear(int(ttl))
+
+    def __del__(self):
+        """
+        Delete an existing instance.
+
+        Remove outdated cache files, when cache object is deleted.
+        All files which have been accessed by deleted instance keep untouched.
+        """
+
+        if self.ttl > 0:
+            self.clear(self.ttl)
 
     def clear(self, ttl):
         """
@@ -46,9 +63,18 @@ def get(self, key):
             The (binary) cached value, or False
         """
         try:
+            content = None
             cache_file = os.path.join(self.base_path, key)
             with open(cache_file, 'rb') as fobj:
-                return fobj.read()
+                content = fobj.read()
+
+            # update filetime to prevent unmodified cache files
+            # from being deleted, if they are frequently accessed.
+            now = time.time()
+            os.utime(cache_file, (now, now))
+
+            return content
+
         except FileNotFoundError:
             return False
 
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
index c479282..db4085f 100644
--- a/app/lib/package_control/providers/json_repository_provider.py
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -8,6 +8,7 @@
 from ..clients.client_exception import ClientException
 from ..clients.github_client import GitHubClient
 from ..clients.gitlab_client import GitLabClient
+from ..clients.pypi_client import PyPiClient
 from ..console_write import console_write
 from ..download_manager import http_get, resolve_url, resolve_urls, update_url
 from ..downloaders.downloader_exception import DownloaderException
@@ -273,7 +274,7 @@ def get_libraries(self, invalid_sources=None):
         debug = self.settings.get('debug')
 
         clients = [
-            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
+            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient, PyPiClient)
         ]
 
         output = {}

From be1d4f1cf066e869aa66b4dd7cabef17349133bd Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Mon, 30 Oct 2023 19:42:53 +0100
Subject: [PATCH 28/39] Update settings, crawl at most 1 release

---
 config/crawler.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/config/crawler.yml b/config/crawler.yml
index 285d13d..2674df0 100644
--- a/config/crawler.yml
+++ b/config/crawler.yml
@@ -5,6 +5,7 @@ user_agent: Package Control Default Channel Server
 http_cache: true
 http_cache_length: 5184000
 install_prereleases: true
+max_releases: 1
 downloader_precedence: 
     windows: [wininet, oscrypto]
     osx: [oscrypto, urllib, curl]

From f8e1a47a103af2445c01f9f42f58df08d2300483 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Mon, 30 Oct 2023 20:37:29 +0100
Subject: [PATCH 29/39] Fix initial database scheme

Adds missing python_versions column to package releases.
---
 setup/sql/up.sql | 1 +
 1 file changed, 1 insertion(+)

diff --git a/setup/sql/up.sql b/setup/sql/up.sql
index 81bde0f..610a7d2 100644
--- a/setup/sql/up.sql
+++ b/setup/sql/up.sql
@@ -114,6 +114,7 @@ CREATE TABLE library_releases (
 CREATE TABLE releases (
     package                  varchar(500)  NOT NULL REFERENCES packages(name) ON DELETE CASCADE ON UPDATE CASCADE,
     platforms                varchar[]     NOT NULL,
+    python_versions          varchar[]     NOT NULL,
     sublime_text             varchar       NOT NULL,
     version                  varchar       NOT NULL,
     url                      varchar       NOT NULL,

From 79836437ca80846e5a2a1a45c1dec0cca6a3fa73 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 29 Oct 2023 09:39:18 +0100
Subject: [PATCH 30/39] Adjustable cache path

---
 app/lib/package_control/sys_path.py | 9 ++++++++-
 1 file changed, 8 insertions(+), 1 deletion(-)

diff --git a/app/lib/package_control/sys_path.py b/app/lib/package_control/sys_path.py
index 16b96bc..d182aef 100644
--- a/app/lib/package_control/sys_path.py
+++ b/app/lib/package_control/sys_path.py
@@ -2,9 +2,16 @@
 
 import os
 
+__cache_path = os.path.join(os.path.expanduser('~'), '.package_control')
+
+
+def set_cache_dir(cache_path):
+    global __cache_path
+    __cache_path = cache_path
+
 
 def pc_cache_dir():
-    return os.path.join(os.path.expanduser('~'), '.package_control')
+    return __cache_path
 
 
 def user_config_dir():

From 8a6ff6c30ed9978aa5eae7a604450043a07b4911 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Tue, 31 Oct 2023 15:28:13 +0100
Subject: [PATCH 31/39] Sync with Package Control

---
 app/lib/package_control/clients/github_client.py | 16 ++++++++++------
 app/lib/package_control/clients/gitlab_client.py | 16 ++++++++++------
 2 files changed, 20 insertions(+), 12 deletions(-)

diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py
index e213a1e..9f66bb7 100644
--- a/app/lib/package_control/clients/github_client.py
+++ b/app/lib/package_control/clients/github_client.py
@@ -253,19 +253,23 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                 if len(releases) < page_size:
                     return
 
+        asset_templates = self._expand_asset_variables(asset_templates)
+
         user_repo = match.group(1)
         max_releases = self.settings.get('max_releases', 0)
-        num_releases = 0
-
-        asset_templates = self._expand_asset_variables(asset_templates)
+        num_releases = [0] * len(asset_templates)
 
         output = []
+
         for release in _get_releases(user_repo, tag_prefix):
             version, timestamp, assets = release
 
             version_string = str(version)
 
-            for pattern, selectors in asset_templates:
+            for idx, (pattern, selectors) in enumerate(asset_templates):
+                if max_releases > 0 and num_releases[idx] >= max_releases:
+                    continue
+
                 pattern = pattern.replace('${version}', version_string)
                 pattern = pattern.replace('.', r'\.')
                 pattern = pattern.replace('?', r'.')
@@ -279,10 +283,10 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                     info = {'url': asset_url, 'version': version_string, 'date': timestamp}
                     info.update(selectors)
                     output.append(info)
+                    num_releases[idx] += version.is_final
                     break
 
-            num_releases += version.is_final
-            if max_releases > 0 and num_releases >= max_releases:
+            if max_releases > 0 and min(num_releases) >= max_releases:
                 break
 
         return output
diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py
index b0c9466..5f289e7 100644
--- a/app/lib/package_control/clients/gitlab_client.py
+++ b/app/lib/package_control/clients/gitlab_client.py
@@ -256,18 +256,22 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
         user_name, repo_name = match.groups()
         repo_id = '%s%%2F%s' % (user_name, repo_name)
 
-        max_releases = self.settings.get('max_releases', 0)
-        num_releases = 0
-
         asset_templates = self._expand_asset_variables(asset_templates)
 
+        max_releases = self.settings.get('max_releases', 0)
+        num_releases = [0] * len(asset_templates)
+
         output = []
+
         for release in _get_releases(repo_id, tag_prefix):
             version, timestamp, assets = release
 
             version_string = str(version)
 
-            for pattern, selectors in asset_templates:
+            for idx, (pattern, selectors) in enumerate(asset_templates):
+                if max_releases > 0 and num_releases[idx] >= max_releases:
+                    continue
+
                 pattern = pattern.replace('${version}', version_string)
                 pattern = pattern.replace('.', r'\.')
                 pattern = pattern.replace('?', r'.')
@@ -281,10 +285,10 @@ def _get_releases(user_repo, tag_prefix=None, page_size=1000):
                     info = {'url': asset_url, 'version': version_string, 'date': timestamp}
                     info.update(selectors)
                     output.append(info)
+                    num_releases[idx] += version.is_final
                     break
 
-            num_releases += version.is_final
-            if max_releases > 0 and num_releases >= max_releases:
+            if max_releases > 0 and min(num_releases) >= max_releases:
                 break
 
         return output

From c188e34a37799afcc4d1b27b1254126cbe763971 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 5 Nov 2023 19:43:38 +0100
Subject: [PATCH 32/39] Sync with Package Control

---
 app/lib/package_control/download_manager.py            |  2 ++
 app/lib/package_control/http_cache.py                  |  2 +-
 .../providers/json_repository_provider.py              | 10 +++++-----
 3 files changed, 8 insertions(+), 6 deletions(-)

diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py
index 6f45d9b..436dc37 100644
--- a/app/lib/package_control/download_manager.py
+++ b/app/lib/package_control/download_manager.py
@@ -169,6 +169,8 @@ def resolve_urls(root_url, uris):
         root_dir = ''
 
     for url in uris:
+        if not url:
+            continue
         if url.startswith('//'):
             if scheme_match is not None:
                 url = scheme_match.group(1) + url
diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py
index 06cf6ac..2611a76 100644
--- a/app/lib/package_control/http_cache.py
+++ b/app/lib/package_control/http_cache.py
@@ -17,7 +17,7 @@ def __init__(self, ttl):
         :param ttl:
             The number of seconds a cache entry should be valid for
         """
-        self.ttl = int(ttl)    
+        self.ttl = int(ttl)
         self.base_path = os.path.join(sys_path.pc_cache_dir(), 'http_cache')
         os.makedirs(self.base_path, exist_ok=True)
 
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
index db4085f..824218f 100644
--- a/app/lib/package_control/providers/json_repository_provider.py
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -381,7 +381,7 @@ def get_libraries(self, invalid_sources=None):
                         if 'version' not in download_info:
                             raise ProviderException(
                                 'Missing "version" key in release with explicit "url" of library "{}"'
-                                ' in repository "%s".'.format(info['name'], self.repo_url)
+                                ' in repository "{}".'.format(info['name'], self.repo_url)
                             )
 
                         download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
@@ -435,7 +435,7 @@ def get_libraries(self, invalid_sources=None):
                     else:
                         raise ProviderException(
                             'Missing "branch", "tags" or "url" key in release of library "{}"'
-                            ' in repository "%s".'.format(info['name'], self.repo_url)
+                            ' in repository "{}".'.format(info['name'], self.repo_url)
                         )
 
                     if downloads is None:
@@ -711,7 +711,7 @@ def get_packages(self, invalid_sources=None):
                             if 'version' not in download_info:
                                 raise ProviderException(
                                     'Missing "version" key in release with explicit "url" of package "{}"'
-                                    ' in repository "%s".'.format(info['name'], self.repo_url)
+                                    ' in repository "{}".'.format(info['name'], self.repo_url)
                                 )
 
                             download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
@@ -763,7 +763,7 @@ def get_packages(self, invalid_sources=None):
                         else:
                             raise ProviderException(
                                 'Missing "branch", "tags" or "url" key in release of package "{}"'
-                                ' in repository "%s".'.format(info['name'], self.repo_url)
+                                ' in repository "{}".'.format(info['name'], self.repo_url)
                             )
 
                         if downloads is None:
@@ -809,7 +809,7 @@ def get_packages(self, invalid_sources=None):
                             if 'version' not in download_info:
                                 raise ProviderException(
                                     'Missing "version" key in release with explicit "url" of package "{}"'
-                                    ' in repository "%s".'.format(info['name'], self.repo_url)
+                                    ' in repository "{}".'.format(info['name'], self.repo_url)
                                 )
 
                             download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)

From 2c0ea00e6045234b2f09f4a368b218c5776f7054 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Thu, 9 Nov 2023 20:24:38 +0100
Subject: [PATCH 33/39] Sync with Package Control

---
 app/lib/package_control/package_version.py | 7 +++++--
 1 file changed, 5 insertions(+), 2 deletions(-)

diff --git a/app/lib/package_control/package_version.py b/app/lib/package_control/package_version.py
index dd508cc..5219d1d 100644
--- a/app/lib/package_control/package_version.py
+++ b/app/lib/package_control/package_version.py
@@ -29,10 +29,13 @@ def __init__(self, ver):
         if not isinstance(ver, str):
             raise TypeError("{!r} is not a string".format(ver))
 
-        # Store original version string to maintain backward compatibility
+        # Store original version string with `v` trimmed to maintain backward compatibility
         # with regards to not normalize it.
         # The one and only use case is to keep existing CI tests working without change.
-        self._str = ver
+        if ver[0] == 'v':
+            self._str = ver[1:]
+        else:
+            self._str = ver
 
         # We prepend 0 to all date-based version numbers so that developers
         # may switch to explicit versioning from GitHub/GitLab/BitBucket

From 8582693cf803c5d47e419c173174effc0b42b2ed Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Thu, 9 Nov 2023 20:35:24 +0100
Subject: [PATCH 34/39] On demand channel.json creation

This commit

1. moves asset storage functionality to a dedicated module as it might also be
   used to provide compressed stylesheets or javascript assets in future.

2. fixes an issue which caused generated channel.json files to look different
   after each poll. This causes the file to change each 10mins even if content
   hasn't changed. This degrades chance to return 304 so clients could re-use
   local cache.

   To achieve it, json objects content is sorted by key and a hash file is used
   to check, whether content has changed since last run.
---
 app/lib/store_asset.py                | 50 +++++++++++++++++++++++++++
 app/tasks/generate_channel_v3_json.py | 35 ++++++-------------
 app/tasks/generate_channel_v4_json.py | 35 ++++++-------------
 3 files changed, 70 insertions(+), 50 deletions(-)
 create mode 100644 app/lib/store_asset.py

diff --git a/app/lib/store_asset.py b/app/lib/store_asset.py
new file mode 100644
index 0000000..a97b4cd
--- /dev/null
+++ b/app/lib/store_asset.py
@@ -0,0 +1,50 @@
+import bz2
+import gzip
+import hashlib
+import os
+
+def store_asset(filename, content):
+    """
+    Stores an asset uncompressed and as gzip, bzip2 archive.
+
+    :param filename:
+        The filename
+    :param content:
+        The content
+    """
+    new_filename     = filename + '-new'
+    new_filename_gz  = filename + '.gz-new'
+    new_filename_bz2 = filename + '.bz2-new'
+    filename_gz      = filename + '.gz'
+    filename_bz2     = filename + '.bz2'
+    filename_sha512  = filename + '.sha512'
+
+    encoded_content = content.encode('utf-8')
+    content_hash = hashlib.sha512(encoded_content).hexdigest().encode('utf-8')
+
+    # Abort, if content hasn't changed so http server continues to return 304
+    # if clients already have a locally cached copy.
+    try:
+        with open(filename_sha512, 'rb') as f:
+            if f.read().strip() == content_hash:
+                return
+    except FileNotFoundError:
+        pass
+
+    with open(new_filename, 'wb') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename, filename)
+
+    with gzip.open(new_filename_gz, 'w') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename_gz, filename_gz)
+
+    with bz2.open(new_filename_bz2, 'w') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename_bz2, filename_bz2)
+
+    with open(filename_sha512, 'wb') as f:
+        f.write(content_hash)
diff --git a/app/tasks/generate_channel_v3_json.py b/app/tasks/generate_channel_v3_json.py
index 039874f..25d110b 100644
--- a/app/tasks/generate_channel_v3_json.py
+++ b/app/tasks/generate_channel_v3_json.py
@@ -1,9 +1,8 @@
-import bz2
-import gzip
 import json
 import os
 
 from ..lib.json_datetime_encoder import JsonDatetimeEncoder
+from ..lib.store_asset import store_asset
 from ..models import package
 
 
@@ -40,26 +39,12 @@
 
 output['repositories'] = sorted(output['repositories'])
 
-new_json_path     = os.path.join(assets_dir, 'channel_v3.json-new')
-new_json_gz_path  = os.path.join(assets_dir, 'channel_v3.json.gz-new')
-new_json_bz2_path = os.path.join(assets_dir, 'channel_v3.json.bz2-new')
-json_path         = os.path.join(assets_dir, 'channel_v3.json')
-json_gz_path      = os.path.join(assets_dir, 'channel_v3.json.gz')
-json_bz2_path     = os.path.join(assets_dir, 'channel_v3.json.bz2')
-
-encoded_json = json.dumps(output, cls=JsonDatetimeEncoder).encode('utf-8')
-
-with open(new_json_path, 'wb') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_path, json_path)
-
-with gzip.open(new_json_gz_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_gz_path, json_gz_path)
-
-with bz2.open(new_json_bz2_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_bz2_path, json_bz2_path)
+store_asset(
+    os.path.join(assets_dir, 'channel_v4.json'),
+    json.dumps(
+        output,
+        cls=JsonDatetimeEncoder,
+        check_circular=False,
+        sort_keys=True
+    )
+)
diff --git a/app/tasks/generate_channel_v4_json.py b/app/tasks/generate_channel_v4_json.py
index 1d7fbae..09c4eac 100644
--- a/app/tasks/generate_channel_v4_json.py
+++ b/app/tasks/generate_channel_v4_json.py
@@ -1,9 +1,8 @@
-import bz2
-import gzip
 import json
 import os
 
 from ..lib.json_datetime_encoder import JsonDatetimeEncoder
+from ..lib.store_asset import store_asset
 from ..models import package, library
 
 
@@ -53,26 +52,12 @@
 
 output['repositories'] = sorted(output['repositories'])
 
-new_json_path     = os.path.join(assets_dir, 'channel_v4.json-new')
-new_json_gz_path  = os.path.join(assets_dir, 'channel_v4.json.gz-new')
-new_json_bz2_path = os.path.join(assets_dir, 'channel_v4.json.bz2-new')
-json_path         = os.path.join(assets_dir, 'channel_v4.json')
-json_gz_path      = os.path.join(assets_dir, 'channel_v4.json.gz')
-json_bz2_path     = os.path.join(assets_dir, 'channel_v4.json.bz2')
-
-encoded_json = json.dumps(output, cls=JsonDatetimeEncoder).encode('utf-8')
-
-with open(new_json_path, 'wb') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_path, json_path)
-
-with gzip.open(new_json_gz_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_gz_path, json_gz_path)
-
-with bz2.open(new_json_bz2_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_bz2_path, json_bz2_path)
+store_asset(
+    os.path.join(assets_dir, 'channel_v4.json'),
+    json.dumps(
+        output,
+        cls=JsonDatetimeEncoder,
+        check_circular=False,
+        sort_keys=True
+    )
+)

From aba4da2994f47f997ded79b062dff04c56f0c2a1 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sat, 25 Nov 2023 11:03:59 +0100
Subject: [PATCH 35/39] Sync with Package Control

---
 .../clients/json_api_client.py                |  7 +++---
 .../package_control/clients/pypi_client.py    | 13 ++++++-----
 app/lib/package_control/http_cache.py         | 22 +++++++++++--------
 3 files changed, 24 insertions(+), 18 deletions(-)

diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py
index 72da376..0dd9cfd 100644
--- a/app/lib/package_control/clients/json_api_client.py
+++ b/app/lib/package_control/clients/json_api_client.py
@@ -157,10 +157,11 @@ def resolve(templates, var, key):
                     yield (pattern, selectors)
                     continue
 
-                for platform in selectors[key]:
+                for value in selectors[key]:
                     new_selectors = selectors.copy()
-                    new_selectors[key] = [platform]
-                    yield (pattern.replace(var, platform), new_selectors)
+                    new_selectors[key] = [value]
+                    # remove `.` from python versions; n.r. for platforms
+                    yield (pattern.replace(var, value.replace('.', '')), new_selectors)
 
             return None
 
diff --git a/app/lib/package_control/clients/pypi_client.py b/app/lib/package_control/clients/pypi_client.py
index 289e3eb..c2cf58a 100644
--- a/app/lib/package_control/clients/pypi_client.py
+++ b/app/lib/package_control/clients/pypi_client.py
@@ -2,6 +2,7 @@
 
 from ..pep440 import PEP440Version
 from ..pep440 import PEP440VersionSpecifier
+from ..package_version import version_sort
 
 from .json_api_client import JSONApiClient
 
@@ -162,10 +163,9 @@ def _download_info_from_fixed_version(self, name, version, asset_templates):
         """
 
         pypi_url = "https://pypi.org/pypi/{}/{}/json".format(name, version)
-        info = self.fetch_json(pypi_url)
+        assets = self.fetch_json(pypi_url)["urls"]
 
         asset_templates = self._expand_asset_variables(asset_templates)
-        assets = info["urls"]
 
         output = []
         for pattern, selectors in asset_templates:
@@ -192,21 +192,21 @@ def _download_info_from_latest_version(self, name, asset_templates):
         """
 
         pypi_url = "https://pypi.org/pypi/{}/json".format(name)
-        info = self.fetch_json(pypi_url)
+        releases = self.fetch_json(pypi_url)["releases"]
 
         asset_templates = self._expand_asset_variables(asset_templates)
 
         max_releases = self.settings.get("max_releases", 0)
         num_releases = [0] * len(asset_templates)
 
-        output = []
-
         # get latest compatible release for each asset template
-        for version, assets in reversed(info["releases"].items()):
+        output = []
+        for version in version_sort(releases, reverse=True):
             # we don"t want beta releases!
             if not PEP440Version(version).is_final:
                 continue
 
+            assets = releases[version]
             for idx, (pattern, selectors) in enumerate(asset_templates):
                 if max_releases > 0 and num_releases[idx] >= max_releases:
                     continue
@@ -215,6 +215,7 @@ def _download_info_from_latest_version(self, name, asset_templates):
                     continue
                 output.append(info)
                 num_releases[idx] += 1
+
             if max_releases > 0 and min(num_releases) >= max_releases:
                 break
 
diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py
index 2611a76..aa20884 100644
--- a/app/lib/package_control/http_cache.py
+++ b/app/lib/package_control/http_cache.py
@@ -42,15 +42,19 @@ def clear(self, ttl):
 
         ttl = int(ttl)
 
-        for filename in os.listdir(self.base_path):
-            path = os.path.join(self.base_path, filename)
-            # There should not be any folders in the cache dir, but we
-            # ignore to prevent an exception
-            if os.path.isdir(path):
-                continue
-            mtime = os.stat(path).st_mtime
-            if mtime < time.time() - ttl:
-                os.unlink(path)
+        try:
+            for filename in os.listdir(self.base_path):
+                path = os.path.join(self.base_path, filename)
+                # There should not be any folders in the cache dir, but we
+                # ignore to prevent an exception
+                if os.path.isdir(path):
+                    continue
+                mtime = os.stat(path).st_mtime
+                if mtime < time.time() - ttl:
+                    os.unlink(path)
+
+        except FileNotFoundError:
+            pass
 
     def get(self, key):
         """

From 4fa7302a654dc272a5901dff2c6f0e639a529281 Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 26 Nov 2023 15:10:38 +0100
Subject: [PATCH 36/39] Add repository test schema 4.0.0 support

---
 app/lib/run_repo_tests.py | 10 +++++-----
 1 file changed, 5 insertions(+), 5 deletions(-)

diff --git a/app/lib/run_repo_tests.py b/app/lib/run_repo_tests.py
index 0bd909a..00d0360 100644
--- a/app/lib/run_repo_tests.py
+++ b/app/lib/run_repo_tests.py
@@ -12,6 +12,7 @@
 import imp
 
 from .package_control.providers import JsonRepositoryProvider
+from .package_control.providers.schema_version import SchemaVersion
 from .package_control.download_manager import close_all_connections, http_get
 from .package_control.downloaders.downloader_exception import DownloaderException
 from .. import config
@@ -247,9 +248,8 @@ def clean_message(exception):
         return error.replace(' in the repository https://example.com', '')
 
     provider = JsonRepositoryProvider('https://example.com', settings)
-    provider.schema_version = '3.0.0'
-    provider.schema_major_version = 3
-    provider.repo_info = {'schema_version': '3.0.0', 'packages': [spec], 'dependencies': []}
+    provider.schema_version = SchemaVersion('4.0.0')
+    provider.repo_info = {'schema_version': '4.0.0', 'packages': [spec], 'libraries': []}
 
     try:
         for name, info in provider.get_packages():
@@ -540,9 +540,9 @@ def test_pull_request(pr):
                     errors = True
                     continue
 
-                if repo_json['schema_version'] != '3.0.0':
+                if repo_json['schema_version'] not in('3.0.0', '4.0.0'):
                     errors = True
-                    output.append('  - ERROR: "schema_version" must be "3.0.0"')
+                    output.append('  - ERROR: "schema_version" must be "3.0.0" or "4.0.0"')
                     continue
 
                 num_pkgs = 0

From 5fb72c303400df7310e47b007c111c693dcac4eb Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 26 Nov 2023 15:14:44 +0100
Subject: [PATCH 37/39] Adjust removed packages to new model

---
 app/lib/removed_packages.py | 9 ++++-----
 1 file changed, 4 insertions(+), 5 deletions(-)

diff --git a/app/lib/removed_packages.py b/app/lib/removed_packages.py
index 9574c87..9c72e59 100644
--- a/app/lib/removed_packages.py
+++ b/app/lib/removed_packages.py
@@ -3,8 +3,7 @@
 
 from .package_control.providers import REPOSITORY_PROVIDERS, CHANNEL_PROVIDERS
 from .. import config
-from .connection import connection
-from ..models import package, dependency
+from ..models import package, library
 
 
 
@@ -41,7 +40,7 @@ def mark():
             package.modify.mark_removed(info['name'])
             print('Package "%s" marked as removed' % info['name'])
 
-    for info in dependency.old():
+    for info in library.old():
         mark_removed = False
 
         for source in info['sources']:
@@ -53,8 +52,8 @@ def mark():
             mark_removed = True
 
         if mark_removed:
-            dependency.mark_removed(info['name'])
-            print('Dependency "%s" marked as removed' % info['name'])
+            library.mark_removed(info['name'])
+            print('library "%s" marked as removed' % info['name'])
 
 
 def find_active_sources():

From ed92350a4cd5532b78661db158b742ca0fa107fc Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sat, 9 Mar 2024 11:36:55 +0100
Subject: [PATCH 38/39] Sync with Package Control 4.0.3

---
 app/lib/package_control/ca_certs.py           |   2 -
 .../clients/client_exception.py               |   3 -
 .../package_control/clients/pypi_client.py    |  23 +-
 app/lib/package_control/deps/__init__.py      |  28 +
 .../package_control/deps/asn1crypto/algos.py  | 165 ++++-
 .../package_control/deps/asn1crypto/cms.py    |  25 +-
 .../package_control/deps/asn1crypto/core.py   |  26 +
 .../package_control/deps/asn1crypto/crl.py    |   1 -
 .../package_control/deps/asn1crypto/x509.py   |  30 +-
 .../package_control/deps/oscrypto/__init__.py |  14 +-
 app/lib/package_control/deps/oscrypto/_ffi.py |   5 +-
 .../deps/oscrypto/_mac/_security_cffi.py      |  13 +-
 .../deps/oscrypto/_mac/_security_ctypes.py    |  26 +-
 .../deps/oscrypto/_mac/asymmetric.py          |  11 +-
 .../package_control/deps/oscrypto/_mac/tls.py |  11 +-
 .../deps/oscrypto/_openssl/_libcrypto.py      |   8 +-
 .../deps/oscrypto/_openssl/_libcrypto_cffi.py |   6 +-
 .../oscrypto/_openssl/_libcrypto_ctypes.py    |   6 +-
 .../deps/oscrypto/_openssl/_libssl.py         |  20 +-
 .../deps/oscrypto/_openssl/tls.py             |  25 +-
 app/lib/package_control/download_manager.py   |  82 +--
 .../downloaders/binary_not_found_error.py     |   3 -
 .../downloaders/downloader_exception.py       |   3 -
 .../package_control/downloaders/http_error.py |   3 -
 .../downloaders/non_clean_exit_error.py       |   3 -
 .../downloaders/non_http_error.py             |   3 -
 .../downloaders/oscrypto_downloader.py        |  22 -
 .../downloaders/urllib_downloader.py          |  53 +-
 .../http/debuggable_http_connection.py        |   8 +-
 .../http/debuggable_http_handler.py           |   2 -
 .../http/invalid_certificate_exception.py     |   3 -
 .../http/validating_https_connection.py       | 673 +++++++++---------
 app/lib/package_control/http_cache.py         |  31 +-
 .../providers/base_repository_provider.py     |   2 +-
 .../providers/channel_provider.py             |   5 +-
 .../providers/provider_exception.py           |   3 -
 36 files changed, 766 insertions(+), 581 deletions(-)

diff --git a/app/lib/package_control/ca_certs.py b/app/lib/package_control/ca_certs.py
index fe1f9ae..0dacbcf 100644
--- a/app/lib/package_control/ca_certs.py
+++ b/app/lib/package_control/ca_certs.py
@@ -11,8 +11,6 @@
     certifi = None
 
 try:
-    from .deps.oscrypto import use_ctypes
-    use_ctypes()
     from .deps.oscrypto import trust_list  # noqa
     from .deps.oscrypto.errors import CACertsError
 except Exception as e:
diff --git a/app/lib/package_control/clients/client_exception.py b/app/lib/package_control/clients/client_exception.py
index a776f9d..5fcbb76 100644
--- a/app/lib/package_control/clients/client_exception.py
+++ b/app/lib/package_control/clients/client_exception.py
@@ -1,6 +1,3 @@
 class ClientException(Exception):
 
     """If a client could not fetch information"""
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/clients/pypi_client.py b/app/lib/package_control/clients/pypi_client.py
index c2cf58a..493c993 100644
--- a/app/lib/package_control/clients/pypi_client.py
+++ b/app/lib/package_control/clients/pypi_client.py
@@ -1,8 +1,8 @@
 import re
 
+from ..pep440 import PEP440InvalidVersionError
 from ..pep440 import PEP440Version
 from ..pep440 import PEP440VersionSpecifier
-from ..package_version import version_sort
 
 from .json_api_client import JSONApiClient
 
@@ -64,6 +64,7 @@ def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
         :param asset_templates:
             A list of tuples of asset template and download_info.
 
+            ```py
             [
                 (
                     "coverage-${version}-cp33-*-win_amd64*.whl",
@@ -73,6 +74,7 @@ def download_info_from_releases(self, url, asset_templates, tag_prefix=None):
                     }
                 )
             ]
+            ```
 
             Supported globs:
 
@@ -192,8 +194,18 @@ def _download_info_from_latest_version(self, name, asset_templates):
         """
 
         pypi_url = "https://pypi.org/pypi/{}/json".format(name)
+
+        # fetch dictionary of form `version: [asset, asset]`
         releases = self.fetch_json(pypi_url)["releases"]
 
+        # create a list of valid pep440 versions
+        versions = []
+        for version in releases:
+            try:
+                versions.append(PEP440Version(version))
+            except PEP440InvalidVersionError:
+                continue
+
         asset_templates = self._expand_asset_variables(asset_templates)
 
         max_releases = self.settings.get("max_releases", 0)
@@ -201,16 +213,17 @@ def _download_info_from_latest_version(self, name, asset_templates):
 
         # get latest compatible release for each asset template
         output = []
-        for version in version_sort(releases, reverse=True):
+        for version in sorted(versions, reverse=True):
             # we don"t want beta releases!
-            if not PEP440Version(version).is_final:
+            if not version.is_final:
                 continue
 
-            assets = releases[version]
+            version_string = str(version)
+            assets = releases[version_string]
             for idx, (pattern, selectors) in enumerate(asset_templates):
                 if max_releases > 0 and num_releases[idx] >= max_releases:
                     continue
-                info = self._make_download_info(pattern, selectors, version, assets)
+                info = self._make_download_info(pattern, selectors, version_string, assets)
                 if not info:
                     continue
                 output.append(info)
diff --git a/app/lib/package_control/deps/__init__.py b/app/lib/package_control/deps/__init__.py
index e69de29..e313053 100644
--- a/app/lib/package_control/deps/__init__.py
+++ b/app/lib/package_control/deps/__init__.py
@@ -0,0 +1,28 @@
+import os.path
+import sys
+
+try:
+    from .oscrypto import use_ctypes, use_openssl
+
+    use_ctypes()
+
+    # On Linux we need to use the version of OpenSSL included with Sublime Text
+    # to prevent conflicts between two different versions of OpenSSL being
+    # dynamically linked. On ST3, we can't use oscrypto for OpenSSL stuff since
+    # it has OpenSSL statically linked, and we can't dlopen() that.
+    # ST 4081 broke sys.executable to return "sublime_text", but other 4xxx builds
+    # will contain "plugin_host".
+    if sys.version_info[:2] == (3, 8) and sys.platform == 'linux' and (
+            'sublime_text' in sys.executable or
+            'plugin_host' in sys.executable):
+        install_dir = os.path.dirname(sys.executable)
+        try:
+            use_openssl(
+                os.path.join(install_dir, 'libcrypto.so.1.1'),
+                os.path.join(install_dir, 'libssl.so.1.1')
+            )
+        except RuntimeError:
+            pass  # runtime error may be raised, when reloading modules.
+
+except ImportError:
+    pass
diff --git a/app/lib/package_control/deps/asn1crypto/algos.py b/app/lib/package_control/deps/asn1crypto/algos.py
index fc25e4d..ff05626 100644
--- a/app/lib/package_control/deps/asn1crypto/algos.py
+++ b/app/lib/package_control/deps/asn1crypto/algos.py
@@ -245,17 +245,29 @@ class SignedDigestAlgorithmId(ObjectIdentifier):
         '1.2.840.10040.4.3': 'sha1_dsa',
         '1.3.14.3.2.13': 'sha1_dsa',
         '1.3.14.3.2.27': 'sha1_dsa',
+        # Source: NIST CSOR Algorithm Registrations
         '2.16.840.1.101.3.4.3.1': 'sha224_dsa',
         '2.16.840.1.101.3.4.3.2': 'sha256_dsa',
+        '2.16.840.1.101.3.4.3.3': 'sha384_dsa',
+        '2.16.840.1.101.3.4.3.4': 'sha512_dsa',
         '1.2.840.10045.4.1': 'sha1_ecdsa',
         '1.2.840.10045.4.3.1': 'sha224_ecdsa',
         '1.2.840.10045.4.3.2': 'sha256_ecdsa',
         '1.2.840.10045.4.3.3': 'sha384_ecdsa',
         '1.2.840.10045.4.3.4': 'sha512_ecdsa',
+        # Source: NIST CSOR Algorithm Registrations
+        '2.16.840.1.101.3.4.3.5': 'sha3_224_dsa',
+        '2.16.840.1.101.3.4.3.6': 'sha3_256_dsa',
+        '2.16.840.1.101.3.4.3.7': 'sha3_384_dsa',
+        '2.16.840.1.101.3.4.3.8': 'sha3_512_dsa',
         '2.16.840.1.101.3.4.3.9': 'sha3_224_ecdsa',
         '2.16.840.1.101.3.4.3.10': 'sha3_256_ecdsa',
         '2.16.840.1.101.3.4.3.11': 'sha3_384_ecdsa',
         '2.16.840.1.101.3.4.3.12': 'sha3_512_ecdsa',
+        '2.16.840.1.101.3.4.3.13': 'sha3_224_rsa',
+        '2.16.840.1.101.3.4.3.14': 'sha3_256_rsa',
+        '2.16.840.1.101.3.4.3.15': 'sha3_384_rsa',
+        '2.16.840.1.101.3.4.3.16': 'sha3_512_rsa',
         # For when the digest is specified elsewhere in a Sequence
         '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15',
         '1.2.840.10040.4.1': 'dsa',
@@ -263,6 +275,16 @@ class SignedDigestAlgorithmId(ObjectIdentifier):
         # RFC 8410 -- https://tools.ietf.org/html/rfc8410
         '1.3.101.112': 'ed25519',
         '1.3.101.113': 'ed448',
+        # Source: BSI TR-03111 V-2
+        '0.4.0.127.0.7.1.1.4.1.1': 'sha1_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.2': 'sha224_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.3': 'sha256_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.4': 'sha384_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.5': 'sha512_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.8': 'sha3_224_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.9': 'sha3_256_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.10': 'sha3_384_ecdsa_plain',
+        '0.4.0.127.0.7.1.1.4.1.11': 'sha3_512_ecdsa_plain',
     }
 
     _reverse_map = {
@@ -281,16 +303,36 @@ class SignedDigestAlgorithmId(ObjectIdentifier):
         'sha256_dsa': '2.16.840.1.101.3.4.3.2',
         'sha256_ecdsa': '1.2.840.10045.4.3.2',
         'sha256_rsa': '1.2.840.113549.1.1.11',
+        'sha384_dsa': '2.16.840.1.101.3.4.3.3',
         'sha384_ecdsa': '1.2.840.10045.4.3.3',
         'sha384_rsa': '1.2.840.113549.1.1.12',
+        'sha512_dsa': '2.16.840.1.101.3.4.3.4',
         'sha512_ecdsa': '1.2.840.10045.4.3.4',
         'sha512_rsa': '1.2.840.113549.1.1.13',
+        # Source: NIST CSOR Algorithm Registrations
+        'sha3_224_dsa': '2.16.840.1.101.3.4.3.5',
+        'sha3_256_dsa': '2.16.840.1.101.3.4.3.6',
+        'sha3_384_dsa': '2.16.840.1.101.3.4.3.7',
+        'sha3_512_dsa': '2.16.840.1.101.3.4.3.8',
         'sha3_224_ecdsa': '2.16.840.1.101.3.4.3.9',
         'sha3_256_ecdsa': '2.16.840.1.101.3.4.3.10',
         'sha3_384_ecdsa': '2.16.840.1.101.3.4.3.11',
         'sha3_512_ecdsa': '2.16.840.1.101.3.4.3.12',
+        'sha3_224_rsa': '2.16.840.1.101.3.4.3.13',
+        'sha3_256_rsa': '2.16.840.1.101.3.4.3.14',
+        'sha3_384_rsa': '2.16.840.1.101.3.4.3.15',
+        'sha3_512_rsa': '2.16.840.1.101.3.4.3.16',
         'ed25519': '1.3.101.112',
         'ed448': '1.3.101.113',
+        'sha1_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.1',
+        'sha224_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.2',
+        'sha256_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.3',
+        'sha384_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.4',
+        'sha512_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.5',
+        'sha3_224_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.8',
+        'sha3_256_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.9',
+        'sha3_384_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.10',
+        'sha3_512_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.11',
     }
 
 
@@ -305,6 +347,47 @@ class SignedDigestAlgorithm(_ForceNullParameters, Sequence):
         'rsassa_pss': RSASSAPSSParams,
     }
 
+    _algo_map = {
+        'md2_rsa': 'md2',
+        'md5_rsa': 'md5',
+        'sha1_rsa': 'sha1',
+        'sha224_rsa': 'sha224',
+        'sha256_rsa': 'sha256',
+        'sha384_rsa': 'sha384',
+        'sha512_rsa': 'sha512',
+        'sha1_dsa': 'sha1',
+        'sha224_dsa': 'sha224',
+        'sha256_dsa': 'sha256',
+        'sha384_dsa': 'sha384',
+        'sha512_dsa': 'sha512',
+        'sha1_ecdsa': 'sha1',
+        'sha1_ecdsa_plain': 'sha1',
+        'sha224_ecdsa': 'sha224',
+        'sha256_ecdsa': 'sha256',
+        'sha384_ecdsa': 'sha384',
+        'sha512_ecdsa': 'sha512',
+        'sha224_ecdsa_plain': 'sha224',
+        'sha256_ecdsa_plain': 'sha256',
+        'sha384_ecdsa_plain': 'sha384',
+        'sha512_ecdsa_plain': 'sha512',
+        'sha3_224_dsa': 'sha3_224',
+        'sha3_256_dsa': 'sha3_256',
+        'sha3_384_dsa': 'sha3_384',
+        'sha3_512_dsa': 'sha3_512',
+        'sha3_224_ecdsa': 'sha3_224',
+        'sha3_256_ecdsa': 'sha3_256',
+        'sha3_384_ecdsa': 'sha3_384',
+        'sha3_512_ecdsa': 'sha3_512',
+        'sha3_224_ecdsa_plain': 'sha3_224',
+        'sha3_256_ecdsa_plain': 'sha3_256',
+        'sha3_384_ecdsa_plain': 'sha3_384',
+        'sha3_512_ecdsa_plain': 'sha3_512',
+        'sha3_224_rsa': 'sha3_224',
+        'sha3_256_rsa': 'sha3_256',
+        'sha3_384_rsa': 'sha3_384',
+        'sha3_512_rsa': 'sha3_512',
+    }
+
     @property
     def signature_algo(self):
         """
@@ -323,11 +406,21 @@ def signature_algo(self):
             'sha256_rsa': 'rsassa_pkcs1v15',
             'sha384_rsa': 'rsassa_pkcs1v15',
             'sha512_rsa': 'rsassa_pkcs1v15',
+            'sha3_224_rsa': 'rsassa_pkcs1v15',
+            'sha3_256_rsa': 'rsassa_pkcs1v15',
+            'sha3_384_rsa': 'rsassa_pkcs1v15',
+            'sha3_512_rsa': 'rsassa_pkcs1v15',
             'rsassa_pkcs1v15': 'rsassa_pkcs1v15',
             'rsassa_pss': 'rsassa_pss',
             'sha1_dsa': 'dsa',
             'sha224_dsa': 'dsa',
             'sha256_dsa': 'dsa',
+            'sha384_dsa': 'dsa',
+            'sha512_dsa': 'dsa',
+            'sha3_224_dsa': 'dsa',
+            'sha3_256_dsa': 'dsa',
+            'sha3_384_dsa': 'dsa',
+            'sha3_512_dsa': 'dsa',
             'dsa': 'dsa',
             'sha1_ecdsa': 'ecdsa',
             'sha224_ecdsa': 'ecdsa',
@@ -338,6 +431,15 @@ def signature_algo(self):
             'sha3_256_ecdsa': 'ecdsa',
             'sha3_384_ecdsa': 'ecdsa',
             'sha3_512_ecdsa': 'ecdsa',
+            'sha1_ecdsa_plain': 'ecdsa',
+            'sha224_ecdsa_plain': 'ecdsa',
+            'sha256_ecdsa_plain': 'ecdsa',
+            'sha384_ecdsa_plain': 'ecdsa',
+            'sha512_ecdsa_plain': 'ecdsa',
+            'sha3_224_ecdsa_plain': 'ecdsa',
+            'sha3_256_ecdsa_plain': 'ecdsa',
+            'sha3_384_ecdsa_plain': 'ecdsa',
+            'sha3_512_ecdsa_plain': 'ecdsa',
             'ecdsa': 'ecdsa',
             'ed25519': 'ed25519',
             'ed448': 'ed448',
@@ -361,32 +463,53 @@ def hash_algo(self):
         """
 
         algorithm = self['algorithm'].native
+        if algorithm in self._algo_map:
+            return self._algo_map[algorithm]
 
-        algo_map = {
-            'md2_rsa': 'md2',
-            'md5_rsa': 'md5',
-            'sha1_rsa': 'sha1',
-            'sha224_rsa': 'sha224',
-            'sha256_rsa': 'sha256',
-            'sha384_rsa': 'sha384',
-            'sha512_rsa': 'sha512',
-            'sha1_dsa': 'sha1',
-            'sha224_dsa': 'sha224',
-            'sha256_dsa': 'sha256',
-            'sha1_ecdsa': 'sha1',
-            'sha224_ecdsa': 'sha224',
-            'sha256_ecdsa': 'sha256',
-            'sha384_ecdsa': 'sha384',
-            'sha512_ecdsa': 'sha512',
-            'ed25519': 'sha512',
-            'ed448': 'shake256',
-        }
-        if algorithm in algo_map:
-            return algo_map[algorithm]
+        if algorithm == 'rsassa_pss':
+            return self['parameters']['hash_algorithm']['algorithm'].native
+
+        if algorithm == 'ed25519' or algorithm == 'ed448':
+            raise ValueError(unwrap(
+                '''
+                Hash algorithm not known for %s - use .cms_hash_algorithm for CMS purposes.
+                More info at https://github.com/wbond/asn1crypto/pull/230.
+                ''',
+                algorithm
+            ))
+
+        raise ValueError(unwrap(
+            '''
+            Hash algorithm not known for %s
+            ''',
+            algorithm
+        ))
+
+    @property
+    def cms_hash_algo(self):
+        """
+        The hash algorithm for CMS hashing
+
+        :return:
+            A unicode string of "md2", "md5", "sha1", "sha224", "sha256",
+            "sha384", "sha512", "sha512_224", "sha512_256" or "shake256"
+        """
+
+        algorithm = self['algorithm'].native
+
+        if algorithm in self._algo_map:
+            return self._algo_map[algorithm]
 
         if algorithm == 'rsassa_pss':
             return self['parameters']['hash_algorithm']['algorithm'].native
 
+        cms_algo_map = {
+            'ed25519': 'sha512',
+            'ed448': 'shake256',
+        }
+        if algorithm in cms_algo_map:
+            return cms_algo_map[algorithm]
+
         raise ValueError(unwrap(
             '''
             Hash algorithm not known for %s
diff --git a/app/lib/package_control/deps/asn1crypto/cms.py b/app/lib/package_control/deps/asn1crypto/cms.py
index c395b22..b104c99 100644
--- a/app/lib/package_control/deps/asn1crypto/cms.py
+++ b/app/lib/package_control/deps/asn1crypto/cms.py
@@ -729,6 +729,12 @@ class RecipientKeyIdentifier(Sequence):
         ('other', OtherKeyAttribute, {'optional': True}),
     ]
 
+    def _setup(self):
+        super(RecipientKeyIdentifier, self)._setup()
+        # This creates a backwards compatible shim for an
+        # incorrect format field name that was in old versions
+        self._field_map['subjectKeyIdentifier'] = self._field_map['subject_key_identifier']
+
 
 class KeyAgreementRecipientIdentifier(Choice):
     _alternatives = [
@@ -929,21 +935,20 @@ def decompressed(self):
         return self._decompressed
 
 
-class RecipientKeyIdentifier(Sequence):
-    _fields = [
-        ('subjectKeyIdentifier', OctetString),
-        ('date', GeneralizedTime, {'optional': True}),
-        ('other', OtherKeyAttribute, {'optional': True}),
-    ]
-
-
 class SMIMEEncryptionKeyPreference(Choice):
     _alternatives = [
         ('issuer_and_serial_number', IssuerAndSerialNumber, {'implicit': 0}),
-        ('recipientKeyId', RecipientKeyIdentifier, {'implicit': 1}),
-        ('subjectAltKeyIdentifier', PublicKeyInfo, {'implicit': 2}),
+        ('recipient_key_id', RecipientKeyIdentifier, {'implicit': 1}),
+        ('subject_alt_key_identifier', PublicKeyInfo, {'implicit': 2}),
     ]
 
+    def _setup(self):
+        super(SMIMEEncryptionKeyPreference, self)._setup()
+        # This creates backwards compatible shims for two
+        # incorrect format alternative names that were in old versions
+        self._name_map['recipientKeyId'] = self._name_map['recipient_key_id']
+        self._name_map['subjectAltKeyIdentifier'] = self._name_map['subject_alt_key_identifier']
+
 
 class SMIMEEncryptionKeyPreferences(SetOf):
     _child_spec = SMIMEEncryptionKeyPreference
diff --git a/app/lib/package_control/deps/asn1crypto/core.py b/app/lib/package_control/deps/asn1crypto/core.py
index 364c6b5..428ef0e 100644
--- a/app/lib/package_control/deps/asn1crypto/core.py
+++ b/app/lib/package_control/deps/asn1crypto/core.py
@@ -166,6 +166,15 @@ def load(encoded_data, strict=False):
     return Asn1Value.load(encoded_data, strict=strict)
 
 
+def unpickle_helper(asn1crypto_cls, der_bytes):
+    """
+    Helper function to integrate with pickle.
+
+    Note that this must be an importable top-level function.
+    """
+    return asn1crypto_cls.load(der_bytes)
+
+
 class Asn1Value(object):
     """
     The basis of all ASN.1 values
@@ -481,6 +490,12 @@ def __unicode__(self):
 
         return self.__repr__()
 
+    def __reduce__(self):
+        """
+        Permits pickling Asn1Value objects using their DER representation.
+        """
+        return unpickle_helper, (self.__class__, self.dump())
+
     def _new_instance(self):
         """
         Constructs a new copy of the current object, preserving any tagging
@@ -3414,6 +3429,17 @@ def __init__(self, value=None, default=None, **kwargs):
                         self.__setitem__(key, value[key])
                         unused_keys.remove(key)
 
+                # This handles the situation where there is field name
+                # mapping going on due to a field be renamed. Normally
+                # the keys are checked against the primary field list.
+                # If there are still keys left over, check to see if they
+                # are mapped via checking the _field_map.
+                if len(unused_keys):
+                    for key in list(unused_keys):
+                        if key in self._field_map:
+                            self.__setitem__(key, value[key])
+                            unused_keys.remove(key)
+
                 if len(unused_keys):
                     raise ValueError(unwrap(
                         '''
diff --git a/app/lib/package_control/deps/asn1crypto/crl.py b/app/lib/package_control/deps/asn1crypto/crl.py
index 84cb168..bf30196 100644
--- a/app/lib/package_control/deps/asn1crypto/crl.py
+++ b/app/lib/package_control/deps/asn1crypto/crl.py
@@ -44,7 +44,6 @@ class Version(Integer):
     _map = {
         0: 'v1',
         1: 'v2',
-        2: 'v3',
     }
 
 
diff --git a/app/lib/package_control/deps/asn1crypto/x509.py b/app/lib/package_control/deps/asn1crypto/x509.py
index 8cfb2c7..38aa770 100644
--- a/app/lib/package_control/deps/asn1crypto/x509.py
+++ b/app/lib/package_control/deps/asn1crypto/x509.py
@@ -27,7 +27,7 @@
 from ._errors import unwrap
 from ._iri import iri_to_uri, uri_to_iri
 from ._ordereddict import OrderedDict
-from ._types import type_name, str_cls, bytes_to_list
+from ._types import type_name, str_cls, byte_cls, bytes_to_list
 from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm
 from .core import (
     Any,
@@ -708,7 +708,13 @@ def prepped_value(self):
         """
 
         if self._prepped is None:
-            self._prepped = self._ldap_string_prep(self['value'].native)
+            native = self['value'].native
+            if isinstance(native, str_cls):
+                self._prepped = self._ldap_string_prep(native)
+            else:
+                if isinstance(native, byte_cls):
+                    native = ' ' + native.decode('cp1252') + ' '
+                self._prepped = native
         return self._prepped
 
     def __ne__(self, other):
@@ -1015,15 +1021,27 @@ def build(cls, name_dict, use_printable=False):
 
         for attribute_name, attribute_value in name_dict.items():
             attribute_name = NameType.map(attribute_name)
-            if attribute_name == 'email_address':
-                value = EmailAddress(attribute_value)
-            elif attribute_name == 'domain_component':
-                value = DNSName(attribute_value)
+            attribute_class = NameTypeAndValue._oid_specs.get(attribute_name)
+            if not attribute_class:
+                raise ValueError(unwrap(
+                    '''
+                    No encoding specification found for %s
+                    ''',
+                    attribute_name
+                ))
+
+            if isinstance(attribute_value, attribute_class):
+                value = attribute_value
+
+            elif attribute_class is not DirectoryString:
+                value = attribute_class(attribute_value)
+
             elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']):
                 value = DirectoryString(
                     name='printable_string',
                     value=PrintableString(attribute_value)
                 )
+
             else:
                 value = DirectoryString(
                     name=encoding_name,
diff --git a/app/lib/package_control/deps/oscrypto/__init__.py b/app/lib/package_control/deps/oscrypto/__init__.py
index eb27313..6e4d487 100644
--- a/app/lib/package_control/deps/oscrypto/__init__.py
+++ b/app/lib/package_control/deps/oscrypto/__init__.py
@@ -119,11 +119,17 @@ def use_openssl(libcrypto_path, libssl_path, trust_list_path=None):
     if not isinstance(libssl_path, str_cls):
         raise ValueError('libssl_path must be a unicode string, not %s' % type_name(libssl_path))
 
-    if not os.path.exists(libcrypto_path):
-        raise LibraryNotFoundError('libcrypto does not exist at %s' % libcrypto_path)
+    do_path_checks = True
+    if sys.platform == 'darwin':
+        mac_version_info = tuple(map(int, platform.mac_ver()[0].split('.')[:2]))
+        do_path_checks = mac_version_info < (10, 16)
 
-    if not os.path.exists(libssl_path):
-        raise LibraryNotFoundError('libssl does not exist at %s' % libssl_path)
+    if do_path_checks:
+        if not os.path.exists(libcrypto_path):
+            raise LibraryNotFoundError('libcrypto does not exist at %s' % libcrypto_path)
+
+        if not os.path.exists(libssl_path):
+            raise LibraryNotFoundError('libssl does not exist at %s' % libssl_path)
 
     if trust_list_path is not None:
         if not isinstance(trust_list_path, str_cls):
diff --git a/app/lib/package_control/deps/oscrypto/_ffi.py b/app/lib/package_control/deps/oscrypto/_ffi.py
index fe6bdee..7b7ab87 100644
--- a/app/lib/package_control/deps/oscrypto/_ffi.py
+++ b/app/lib/package_control/deps/oscrypto/_ffi.py
@@ -111,8 +111,6 @@ def is_null(point):
             return True
         if ffi.getctype(ffi.typeof(point)) == 'void *':
             return False
-        if point[0] == ffi.NULL:
-            return True
         return False
 
     def errno():
@@ -212,6 +210,7 @@ def callback(library, signature_name, func):
         'int': c_int,
         'unsigned int': c_uint,
         'size_t': ctypes.c_size_t,
+        'uint16_t': ctypes.c_uint16,
         'uint32_t': ctypes.c_uint32,
     }
     if sys.platform == 'win32':
@@ -294,6 +293,8 @@ def cast(library, type_, value):
         return ctypes.cast(value, type_)
 
     def sizeof(library, value):
+        if isinstance(value, str_cls):
+            return ctypes.sizeof(getattr(library, value))
         return ctypes.sizeof(value)
 
     def bytes_from_buffer(buffer, maxlen=None):
diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py
index d277d98..4cce32b 100644
--- a/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py
+++ b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py
@@ -22,6 +22,17 @@
     raise OSError('Only OS X 10.7 and newer are supported, not %s.%s' % (version_info[0], version_info[1]))
 
 ffi = FFI()
+
+# It appears SSLCipherSuite is uint16_t on ARM64, but uint32_t on X86_64
+if platform.machine() == 'arm64':
+    ffi.cdef("""
+        typedef uint16_t SSLCipherSuite;
+    """)
+else:
+    ffi.cdef("""
+        typedef uint32_t SSLCipherSuite;
+    """)
+
 ffi.cdef("""
     typedef bool Boolean;
     typedef long CFIndex;
@@ -38,7 +49,6 @@
     typedef uint32_t CSSM_KEYUSE;
     typedef uint32_t CSSM_CERT_TYPE;
     typedef uint32_t SSLProtocol;
-    typedef uint32_t SSLCipherSuite;
     typedef uint32_t SecTrustResultType;
 
     typedef void *CFTypeRef;
@@ -103,6 +113,7 @@
     SecTransformRef SecSignTransformCreate(SecKeyRef key, CFErrorRef *error);
     SecCertificateRef SecCertificateCreateWithData(CFAllocatorRef allocator, CFDataRef data);
     OSStatus SecCertificateCopyPublicKey(SecCertificateRef certificate, SecKeyRef *key);
+    SecKeyRef SecCertificateCopyKey(SecCertificateRef certificate);
     CFStringRef SecCopyErrorMessageString(OSStatus status, void *reserved);
     OSStatus SecTrustCopyAnchorCertificates(CFArrayRef *anchors);
     CFDataRef SecCertificateCopyData(SecCertificateRef certificate);
diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py b/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py
index ee1be00..3dfdefa 100644
--- a/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py
+++ b/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py
@@ -2,7 +2,20 @@
 from __future__ import unicode_literals, division, absolute_import, print_function
 
 import platform
-from ctypes import c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_int, c_uint32, c_uint64, c_ulong, c_long, c_bool
+from ctypes import (
+    c_bool,
+    c_byte,
+    c_char_p,
+    c_int,
+    c_int32,
+    c_long,
+    c_size_t,
+    c_uint16,
+    c_uint32,
+    c_uint64,
+    c_ulong,
+    c_void_p,
+)
 from ctypes import CDLL, POINTER, CFUNCTYPE, Structure
 
 from .._ffi import FFIEngineError
@@ -57,7 +70,11 @@
 SecExternalItemType = c_uint32
 SecPadding = c_uint32
 SSLProtocol = c_uint32
-SSLCipherSuite = c_uint32
+# It appears SSLCipherSuite is uint16_t on ARM64, but uint32_t on X86_64
+if platform.machine() == 'arm64':
+    SSLCipherSuite = c_uint16
+else:
+    SSLCipherSuite = c_uint32
 SecPolicyRef = POINTER(c_void_p)
 CSSM_CC_HANDLE = c_uint64
 CSSM_ALGORITHMS = c_uint32
@@ -186,6 +203,11 @@ class SecItemImportExportKeyParameters(Structure):
     ]
     Security.SecCertificateCreateWithData.restype = SecCertificateRef
 
+    Security.SecCertificateCopyKey.argtypes = [
+        SecCertificateRef,
+    ]
+    Security.SecCertificateCopyKey.restype = SecKeyRef
+
     Security.SecCertificateCopyPublicKey.argtypes = [
         SecCertificateRef,
         POINTER(SecKeyRef)
diff --git a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py
index 131197b..3bb851b 100644
--- a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py
+++ b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py
@@ -261,9 +261,14 @@ def public_key(self):
                 sec_cert_ref = self.sec_certificate_ref
 
             sec_public_key_ref_pointer = new(Security, 'SecKeyRef *')
-            res = Security.SecCertificateCopyPublicKey(sec_cert_ref, sec_public_key_ref_pointer)
-            handle_sec_error(res)
-            sec_public_key_ref = unwrap(sec_public_key_ref_pointer)
+            if osx_version_info >= (10, 14):
+                sec_public_key_ref = Security.SecCertificateCopyKey(sec_cert_ref)
+                if is_null(sec_public_key_ref):
+                    raise ValueError('Unable to extract public key from certificate')
+            else:
+                res = Security.SecCertificateCopyPublicKey(sec_cert_ref, sec_public_key_ref_pointer)
+                handle_sec_error(res)
+                sec_public_key_ref = unwrap(sec_public_key_ref_pointer)
             self._public_key = PublicKey(sec_public_key_ref, self.asn1['tbs_certificate']['subject_public_key_info'])
 
         return self._public_key
diff --git a/app/lib/package_control/deps/oscrypto/_mac/tls.py b/app/lib/package_control/deps/oscrypto/_mac/tls.py
index f936407..50dc158 100644
--- a/app/lib/package_control/deps/oscrypto/_mac/tls.py
+++ b/app/lib/package_control/deps/oscrypto/_mac/tls.py
@@ -29,6 +29,7 @@
     new,
     null,
     pointer_set,
+    sizeof,
     struct,
     struct_bytes,
     unwrap,
@@ -633,8 +634,8 @@ def _handshake(self):
 
             supported_ciphers = deref(supported_ciphers_pointer)
 
-            cipher_buffer = buffer_from_bytes(supported_ciphers * 4)
-            supported_cipher_suites_pointer = cast(Security, 'uint32_t *', cipher_buffer)
+            cipher_buffer = buffer_from_bytes(supported_ciphers * sizeof(Security, 'SSLCipherSuite'))
+            supported_cipher_suites_pointer = cast(Security, 'SSLCipherSuite *', cipher_buffer)
             result = Security.SSLGetSupportedCiphers(
                 session_context,
                 supported_cipher_suites_pointer,
@@ -645,7 +646,7 @@ def _handshake(self):
             supported_ciphers = deref(supported_ciphers_pointer)
             supported_cipher_suites = array_from_pointer(
                 Security,
-                'uint32_t',
+                'SSLCipherSuite',
                 supported_cipher_suites_pointer,
                 supported_ciphers
             )
@@ -658,9 +659,9 @@ def _handshake(self):
                     good_ciphers.append(supported_cipher_suite)
 
             num_good_ciphers = len(good_ciphers)
-            good_ciphers_array = new(Security, 'uint32_t[]', num_good_ciphers)
+            good_ciphers_array = new(Security, 'SSLCipherSuite[]', num_good_ciphers)
             array_set(good_ciphers_array, good_ciphers)
-            good_ciphers_pointer = cast(Security, 'uint32_t *', good_ciphers_array)
+            good_ciphers_pointer = cast(Security, 'SSLCipherSuite *', good_ciphers_array)
             result = Security.SSLSetEnabledCiphers(
                 session_context,
                 good_ciphers_pointer,
diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py
index 1c52488..90768d7 100644
--- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py
+++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py
@@ -44,9 +44,11 @@
 # like PKCS12
 libcrypto_legacy_support = True
 if libcrypto_version_info >= (3, ):
-    if libcrypto.OSSL_PROVIDER_available(null(), "legacy".encode("ascii")):
-        libcrypto.OSSL_PROVIDER_load(null(), "legacy".encode("ascii"))
-    else:
+
+    libcrypto.OSSL_PROVIDER_load(null(), "legacy".encode("ascii"))
+    libcrypto.OSSL_PROVIDER_load(null(), "default".encode("ascii"))
+
+    if libcrypto.OSSL_PROVIDER_available(null(), "legacy".encode("ascii")) == 0:
         libcrypto_legacy_support = False
 
 
diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py
index 8aed03e..6f901ea 100644
--- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py
+++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py
@@ -37,13 +37,13 @@
 
 is_libressl = 'LibreSSL' in version_string
 
-version_match = re.search('\\b(\\d\\.\\d\\.\\d[a-z]*)\\b', version_string)
+version_match = re.search('\\b(\\d+\\.\\d+\\.\\d+[a-z]*)\\b', version_string)
 if not version_match:
-    version_match = re.search('(?<=LibreSSL )(\\d\\.\\d(\\.\\d)?)\\b', version_string)
+    version_match = re.search('(?<=LibreSSL )(\\d+\\.\\d+(\\.\\d+)?)\\b', version_string)
 if not version_match:
     raise LibraryNotFoundError('Error detecting the version of libcrypto')
 version = version_match.group(1)
-version_parts = re.sub('(\\d)([a-z]+)', '\\1.\\2', version).split('.')
+version_parts = re.sub('(\\d+)([a-z]+)', '\\1.\\2', version).split('.')
 version_info = tuple(int(part) if part.isdigit() else part for part in version_parts)
 
 # LibreSSL is compatible with libcrypto from OpenSSL 1.0.1
diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py
index e33ebbc..4e1b3cf 100644
--- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py
+++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py
@@ -40,13 +40,13 @@
 
 is_libressl = 'LibreSSL' in version_string
 
-version_match = re.search('\\b(\\d\\.\\d\\.\\d[a-z]*)\\b', version_string)
+version_match = re.search('\\b(\\d+\\.\\d+\\.\\d+[a-z]*)\\b', version_string)
 if not version_match:
-    version_match = re.search('(?<=LibreSSL )(\\d\\.\\d(\\.\\d)?)\\b', version_string)
+    version_match = re.search('(?<=LibreSSL )(\\d+\\.\\d+(\\.\\d+)?)\\b', version_string)
 if not version_match:
     raise LibraryNotFoundError('Error detecting the version of libcrypto')
 version = version_match.group(1)
-version_parts = re.sub('(\\d)([a-z]+)', '\\1.\\2', version).split('.')
+version_parts = re.sub('(\\d+)([a-z]+)', '\\1.\\2', version).split('.')
 version_info = tuple(int(part) if part.isdigit() else part for part in version_parts)
 
 # LibreSSL is compatible with libcrypto from OpenSSL 1.0.1
diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py b/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py
index 2fa2bce..7717650 100644
--- a/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py
+++ b/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py
@@ -1,7 +1,10 @@
 # coding: utf-8
 from __future__ import unicode_literals, division, absolute_import, print_function
 
-from .. import ffi
+import re
+import sys
+
+from .. import ffi, _backend_config
 
 # Initialize OpenSSL
 from ._libcrypto import libcrypto_version_info
@@ -15,6 +18,7 @@
 __all__ = [
     'libssl',
     'LibsslConst',
+    'error_code_version_info',
 ]
 
 
@@ -87,3 +91,17 @@ class LibsslConst():
 
 if libcrypto_version_info >= (1, 1, 0):
     LibsslConst.SSL_R_DH_KEY_TOO_SMALL = 394
+
+
+error_code_version_info = libcrypto_version_info
+# The Apple version of libssl seems to have changed various codes for
+# some reason, but the rest of the API is still OpenSSL 1.0.1
+if sys.platform == 'darwin':
+    libssl_abi_match = re.match(r'/usr/lib/libssl\.(\d+)', _backend_config().get('libssl_path', ''))
+    if libssl_abi_match and int(libssl_abi_match.group(1)) >= 44:
+        LibsslConst.SSL_F_TLS_PROCESS_SERVER_CERTIFICATE = 7
+        LibsslConst.SSL_F_SSL3_GET_KEY_EXCHANGE = 9
+        LibsslConst.SSL_F_SSL3_READ_BYTES = 4
+        LibsslConst.SSL_F_SSL3_GET_RECORD = 4
+        LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO = 4
+        error_code_version_info = (1, 1, 0)
diff --git a/app/lib/package_control/deps/oscrypto/_openssl/tls.py b/app/lib/package_control/deps/oscrypto/_openssl/tls.py
index 6f180f4..a4a4570 100644
--- a/app/lib/package_control/deps/oscrypto/_openssl/tls.py
+++ b/app/lib/package_control/deps/oscrypto/_openssl/tls.py
@@ -7,7 +7,7 @@
 import select
 import numbers
 
-from ._libssl import libssl, LibsslConst
+from ._libssl import error_code_version_info, libssl, LibsslConst
 from ._libcrypto import libcrypto, libcrypto_version_info, handle_openssl_error, peek_openssl_error
 from .. import _backend_config
 from .._asn1 import Certificate as Asn1Certificate
@@ -554,7 +554,7 @@ def _handshake(self):
                     if info == dh_key_info_1 or info == dh_key_info_2 or info == dh_key_info_3:
                         raise_dh_params()
 
-                    if libcrypto_version_info < (1, 1):
+                    if error_code_version_info < (1, 1):
                         unknown_protocol_info = (
                             LibsslConst.ERR_LIB_SSL,
                             LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO,
@@ -580,23 +580,16 @@ def _handshake(self):
                     if info == tls_version_info_error:
                         raise_protocol_version()
 
+                    # There are multiple functions that can result in a handshake failure,
+                    # but our custom handshake parsing code figures out what really happened,
+                    # and what is more, OpenSSL 3 got rid of function codes. Because of this,
+                    # we skip checking the function code.
                     handshake_error_info = (
                         LibsslConst.ERR_LIB_SSL,
-                        LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO,
                         LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE
                     )
-                    # OpenSSL 3.0 no longer has func codes, so this can be confused
-                    # with the following handler which needs to check for client auth
-                    if libcrypto_version_info < (3, ) and info == handshake_error_info:
-                        raise_handshake()
 
-                    handshake_failure_info = (
-                        LibsslConst.ERR_LIB_SSL,
-                        LibsslConst.SSL_F_SSL3_READ_BYTES,
-                        LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE
-                    )
-                    handshake_failure_info = _homogenize_openssl3_error(handshake_failure_info)
-                    if info == handshake_failure_info:
+                    if (info[0], info[2]) == handshake_error_info:
                         saw_client_auth = False
                         for record_type, _, record_data in parse_tls_records(handshake_server_bytes):
                             if record_type != b'\x16':
@@ -609,7 +602,7 @@ def _handshake(self):
                             raise_client_auth()
                         raise_handshake()
 
-                    if libcrypto_version_info < (1, 1):
+                    if error_code_version_info < (1, 1):
                         cert_verify_failed_info = (
                             LibsslConst.ERR_LIB_SSL,
                             LibsslConst.SSL_F_SSL3_GET_SERVER_CERTIFICATE,
@@ -780,7 +773,7 @@ def _raw_write(self):
                 sent = self._socket.send(to_write)
             except (socket_.error) as e:
                 # Handle ECONNRESET and EPIPE
-                if e.errno == 104 or e.errno == 32:
+                if e.errno == 104 or e.errno == 54 or e.errno == 32:
                     raise_disconnect = True
                 # Handle EPROTOTYPE. Newer versions of macOS will return this
                 # if we try to call send() while the socket is being torn down
diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py
index 436dc37..0c611c3 100644
--- a/app/lib/package_control/download_manager.py
+++ b/app/lib/package_control/download_manager.py
@@ -14,13 +14,11 @@
 from .downloaders import DOWNLOADERS
 from .downloaders.binary_not_found_error import BinaryNotFoundError
 from .downloaders.downloader_exception import DownloaderException
-from .downloaders.oscrypto_downloader_exception import OscryptoDownloaderException
 from .downloaders.rate_limit_exception import RateLimitException
 from .downloaders.rate_limit_exception import RateLimitSkipException
-from .downloaders.urllib_downloader import UrlLibDownloader
-from .downloaders.win_downloader_exception import WinDownloaderException
 from .http_cache import HttpCache
 
+_http_cache = None
 
 _managers = {}
 """A dict of domains - each points to a list of downloaders"""
@@ -85,7 +83,7 @@ def http_get(url, settings, error_message='', prefer_cached=False):
 
 
 def _grab(url, settings):
-    global _managers, _lock, _in_use, _timer
+    global _http_cache, _managers, _lock, _in_use, _timer
 
     with _lock:
         if _timer:
@@ -100,7 +98,15 @@ def _grab(url, settings):
             _managers[hostname] = []
 
         if not _managers[hostname]:
-            _managers[hostname].append(DownloadManager(settings))
+            http_cache = None
+            if settings.get('http_cache'):
+                # first call defines http cache settings
+                # It is safe to assume all calls share same settings.
+                if not _http_cache:
+                    _http_cache = HttpCache(settings.get('http_cache_length', 604800))
+                http_cache = _http_cache
+
+            _managers[hostname].append(DownloadManager(settings, http_cache))
 
         _in_use += 1
 
@@ -135,13 +141,17 @@ def _release(url, manager):
 
 
 def close_all_connections():
-    global _managers, _lock, _in_use, _timer
+    global _http_cache, _managers, _lock, _in_use, _timer
 
     with _lock:
         if _timer:
             _timer.cancel()
             _timer = None
 
+        if _http_cache:
+            _http_cache.prune()
+            _http_cache = None
+
         for managers in _managers.values():
             for manager in managers:
                 manager.close()
@@ -266,7 +276,7 @@ def update_url(url, debug):
 
 class DownloadManager:
 
-    def __init__(self, settings):
+    def __init__(self, settings, http_cache=None):
         # Cache the downloader for re-use
         self.downloader = None
 
@@ -292,11 +302,10 @@ def __init__(self, settings):
         if user_agent and '%s' in user_agent:
             self.settings['user_agent'] = user_agent % __version__
 
-        # setup private http cache storage driver
-        if settings.get('http_cache'):
-            cache_length = settings.get('http_cache_length', 604800)
-            self.settings['cache'] = HttpCache(cache_length)
-            self.settings['cache_length'] = cache_length
+        # assign global http cache storage driver
+        if http_cache:
+            self.settings['cache'] = http_cache
+            self.settings['cache_length'] = http_cache.ttl
 
     def close(self):
         if self.downloader:
@@ -479,52 +488,3 @@ def fetch(self, url, error_message, prefer_cached=False):
                 str(e)
             )
             raise
-
-        except (OscryptoDownloaderException) as e:
-            console_write(
-                '''
-                Attempting to use Urllib downloader due to Oscrypto error: %s
-                ''',
-                str(e)
-            )
-
-            self.downloader = UrlLibDownloader(self.settings)
-            # Try again with the new downloader!
-            return self.fetch(url, error_message, prefer_cached)
-
-        except (WinDownloaderException) as e:
-            console_write(
-                '''
-                Attempting to use Urllib downloader due to WinINet error: %s
-                ''',
-                str(e)
-            )
-
-            # Here we grab the proxy info extracted from WinInet to fill in
-            # the Package Control settings if those are not present. This should
-            # hopefully make a seamless fallback for users who run into weird
-            # windows errors related to network communication.
-            wininet_proxy = self.downloader.proxy or ''
-            wininet_proxy_username = self.downloader.proxy_username or ''
-            wininet_proxy_password = self.downloader.proxy_password or ''
-
-            http_proxy = self.settings.get('http_proxy', '')
-            https_proxy = self.settings.get('https_proxy', '')
-            proxy_username = self.settings.get('proxy_username', '')
-            proxy_password = self.settings.get('proxy_password', '')
-
-            settings = self.settings.copy()
-            if not http_proxy and wininet_proxy:
-                settings['http_proxy'] = wininet_proxy
-            if not https_proxy and wininet_proxy:
-                settings['https_proxy'] = wininet_proxy
-
-            has_proxy = settings.get('http_proxy') or settings.get('https_proxy')
-            if has_proxy and not proxy_username and wininet_proxy_username:
-                settings['proxy_username'] = wininet_proxy_username
-            if has_proxy and not proxy_password and wininet_proxy_password:
-                settings['proxy_password'] = wininet_proxy_password
-
-            self.downloader = UrlLibDownloader(settings)
-            # Try again with the new downloader!
-            return self.fetch(url, error_message, prefer_cached)
diff --git a/app/lib/package_control/downloaders/binary_not_found_error.py b/app/lib/package_control/downloaders/binary_not_found_error.py
index e93a8f7..acff446 100644
--- a/app/lib/package_control/downloaders/binary_not_found_error.py
+++ b/app/lib/package_control/downloaders/binary_not_found_error.py
@@ -1,6 +1,3 @@
 class BinaryNotFoundError(Exception):
 
     """If a necessary executable is not found in the PATH on the system"""
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/downloaders/downloader_exception.py b/app/lib/package_control/downloaders/downloader_exception.py
index 71509be..c76043c 100644
--- a/app/lib/package_control/downloaders/downloader_exception.py
+++ b/app/lib/package_control/downloaders/downloader_exception.py
@@ -1,6 +1,3 @@
 class DownloaderException(Exception):
 
     """If a downloader could not download a URL"""
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/downloaders/http_error.py b/app/lib/package_control/downloaders/http_error.py
index dbf1383..10e09dd 100644
--- a/app/lib/package_control/downloaders/http_error.py
+++ b/app/lib/package_control/downloaders/http_error.py
@@ -5,6 +5,3 @@ class HttpError(Exception):
     def __init__(self, message, code):
         self.code = code
         super(HttpError, self).__init__(message)
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/downloaders/non_clean_exit_error.py b/app/lib/package_control/downloaders/non_clean_exit_error.py
index 8c32d6c..b1c93b9 100644
--- a/app/lib/package_control/downloaders/non_clean_exit_error.py
+++ b/app/lib/package_control/downloaders/non_clean_exit_error.py
@@ -12,6 +12,3 @@ def __init__(self, returncode):
 
     def __str__(self):
         return str(self.returncode)
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/downloaders/non_http_error.py b/app/lib/package_control/downloaders/non_http_error.py
index 61cf13d..8dc5432 100644
--- a/app/lib/package_control/downloaders/non_http_error.py
+++ b/app/lib/package_control/downloaders/non_http_error.py
@@ -1,6 +1,3 @@
 class NonHttpError(Exception):
 
     """If a downloader had a non-clean exit, but it was not due to an HTTP error"""
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py
index 9379114..b6934e9 100644
--- a/app/lib/package_control/downloaders/oscrypto_downloader.py
+++ b/app/lib/package_control/downloaders/oscrypto_downloader.py
@@ -7,7 +7,6 @@
 import os
 import re
 import socket
-import sys
 from urllib.parse import urlparse
 from urllib.request import parse_keqv_list, parse_http_list
 
@@ -16,7 +15,6 @@
 from ..console_write import console_write
 from ..deps.asn1crypto.util import OrderedDict
 from ..deps.asn1crypto import pem, x509
-from ..deps.oscrypto import use_ctypes, use_openssl
 from .downloader_exception import DownloaderException
 from .oscrypto_downloader_exception import OscryptoDownloaderException
 from .basic_auth_downloader import BasicAuthDownloader
@@ -24,26 +22,6 @@
 from .decoding_downloader import DecodingDownloader
 from .limiting_downloader import LimitingDownloader
 
-use_ctypes()
-
-# On Linux we need to use the version of OpenSSL included with Sublime Text
-# to prevent conflicts between two different versions of OpenSSL being
-# dynamically linked. On ST3, we can't use oscrypto for OpenSSL stuff since
-# it has OpenSSL statically linked, and we can't dlopen() that.
-# ST 4081 broke sys.executable to return "sublime_text", but other 4xxx builds
-# will contain "plugin_host".
-if sys.version_info[:2] == (3, 8) and sys.platform == 'linux' and (
-        'sublime_text' in sys.executable or
-        'plugin_host' in sys.executable):
-    install_dir = os.path.dirname(sys.executable)
-    try:
-        use_openssl(
-            os.path.join(install_dir, 'libcrypto.so.1.1'),
-            os.path.join(install_dir, 'libssl.so.1.1')
-        )
-    except RuntimeError:
-        pass  # runtime error may be raised, when reloading modules.
-
 from ..deps.oscrypto import tls  # noqa
 from ..deps.oscrypto import errors as oscrypto_errors  # noqa
 
diff --git a/app/lib/package_control/downloaders/urllib_downloader.py b/app/lib/package_control/downloaders/urllib_downloader.py
index 65d1f90..9455a91 100644
--- a/app/lib/package_control/downloaders/urllib_downloader.py
+++ b/app/lib/package_control/downloaders/urllib_downloader.py
@@ -1,6 +1,5 @@
 import re
-import sys
-import urllib.request as urllib_compat
+import ssl
 from http.client import HTTPException, BadStatusLine
 from urllib.request import (
     build_opener,
@@ -14,7 +13,7 @@
 from socket import error as ConnectionError
 
 from .. import text
-from ..ca_certs import get_ca_bundle_path
+from ..ca_certs import get_ca_bundle_path, get_user_ca_bundle_path
 from ..console_write import console_write
 from ..http.validating_https_handler import ValidatingHTTPSHandler
 from ..http.debuggable_http_handler import DebuggableHTTPHandler
@@ -125,6 +124,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False):
 
                 return self.cache_result('get', url, http_file.getcode(), http_file.headers, result)
 
+            except (ssl.CertificateError) as e:
+                error_string = 'Certificate validation for %s failed: %s' % (url, str(e))
+
             except (HTTPException) as e:
                 # Since we use keep-alives, it is possible the other end closed
                 # the connection, and we may just need to re-open
@@ -269,11 +271,11 @@ def setup_opener(self, url, timeout):
                 if https_proxy:
                     password_manager.add_password(None, https_proxy, proxy_username, proxy_password)
 
-            handlers = [proxy_handler]
-
-            basic_auth_handler = ProxyBasicAuthHandler(password_manager)
-            digest_auth_handler = ProxyDigestAuthHandler(password_manager)
-            handlers.extend([digest_auth_handler, basic_auth_handler])
+            handlers = [
+                proxy_handler,
+                ProxyBasicAuthHandler(password_manager),
+                ProxyDigestAuthHandler(password_manager)
+            ]
 
             debug = self.settings.get('debug')
 
@@ -291,16 +293,31 @@ def setup_opener(self, url, timeout):
 
             secure_url_match = re.match(r'^https://([^/#?]+)', url)
             if secure_url_match is not None:
-                bundle_path = get_ca_bundle_path(self.settings)
-                bundle_path = bundle_path.encode(sys.getfilesystemencoding())
-                handlers.append(ValidatingHTTPSHandler(
-                    ca_certs=bundle_path,
-                    debug=debug,
-                    passwd=password_manager,
-                    user_agent=self.settings.get('user_agent')
-                ))
+                if hasattr(ssl.SSLContext, 'load_default_certs'):
+                    # python 3.8 ssl module is able to load CA from native OS
+                    # certificate stores, just need to merge in user defined CA
+                    # No need to create home grown merged CA bundle anymore.
+                    handlers.append(ValidatingHTTPSHandler(
+                        ca_certs=None,
+                        extra_ca_certs=get_user_ca_bundle_path(self.settings),
+                        debug=debug,
+                        passwd=password_manager,
+                        user_agent=self.settings.get('user_agent')
+                    ))
+
+                else:
+                    # python 3.3 ssl module is not able to access OS cert stores
+                    handlers.append(ValidatingHTTPSHandler(
+                        ca_certs=get_ca_bundle_path(self.settings),
+                        extra_ca_certs=None,
+                        debug=debug,
+                        passwd=password_manager,
+                        user_agent=self.settings.get('user_agent')
+                    ))
+
             else:
-                handlers.append(DebuggableHTTPHandler(debug=debug, passwd=password_manager))
+                handlers.append(DebuggableHTTPHandler(debug=debug))
+
             self.opener = build_opener(*handlers)
 
     def supports_ssl(self):
@@ -310,7 +327,7 @@ def supports_ssl(self):
         :return:
             If the object supports HTTPS requests
         """
-        return 'ssl' in sys.modules and hasattr(urllib_compat, 'HTTPSHandler')
+        return True
 
     def supports_plaintext(self):
         """
diff --git a/app/lib/package_control/http/debuggable_http_connection.py b/app/lib/package_control/http/debuggable_http_connection.py
index 4ddb147..e92932a 100644
--- a/app/lib/package_control/http/debuggable_http_connection.py
+++ b/app/lib/package_control/http/debuggable_http_connection.py
@@ -15,8 +15,6 @@ class DebuggableHTTPConnection(HTTPConnection):
     _debug_protocol = 'HTTP'
 
     def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kwargs):
-        self.passwd = kwargs.get('passwd')
-
         if 'debug' in kwargs and kwargs['debug']:
             self.debuglevel = 5
         elif 'debuglevel' in kwargs:
@@ -60,10 +58,8 @@ def send(self, string):
                 self.debuglevel = reset_debug
 
     def request(self, method, url, body=None, headers={}):
-        original_headers = headers.copy()
-
         # By default urllib2 and urllib.request override the Connection header,
         # however, it is preferred to be able to re-use it
-        original_headers['Connection'] = 'Keep-Alive'
+        headers['Connection'] = 'Keep-Alive'
 
-        HTTPConnection.request(self, method, url, body, original_headers)
+        HTTPConnection.request(self, method, url, body, headers)
diff --git a/app/lib/package_control/http/debuggable_http_handler.py b/app/lib/package_control/http/debuggable_http_handler.py
index eabf34d..6391efc 100644
--- a/app/lib/package_control/http/debuggable_http_handler.py
+++ b/app/lib/package_control/http/debuggable_http_handler.py
@@ -17,11 +17,9 @@ def __init__(self, debuglevel=0, debug=False, **kwargs):
             self._debuglevel = 5
         else:
             self._debuglevel = debuglevel
-        self.passwd = kwargs.get('passwd')
 
     def http_open(self, req):
         def http_class_wrapper(host, **kwargs):
-            kwargs['passwd'] = self.passwd
             if 'debuglevel' not in kwargs:
                 kwargs['debuglevel'] = self._debuglevel
             return DebuggableHTTPConnection(host, **kwargs)
diff --git a/app/lib/package_control/http/invalid_certificate_exception.py b/app/lib/package_control/http/invalid_certificate_exception.py
index f6f79a7..fa5fe87 100644
--- a/app/lib/package_control/http/invalid_certificate_exception.py
+++ b/app/lib/package_control/http/invalid_certificate_exception.py
@@ -15,6 +15,3 @@ def __init__(self, host, cert, reason):
         self.reason = reason.rstrip()
         message = 'Host %s returned an invalid certificate (%s) %s' % (self.host, self.reason, self.cert)
         HTTPException.__init__(self, message.rstrip())
-
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
diff --git a/app/lib/package_control/http/validating_https_connection.py b/app/lib/package_control/http/validating_https_connection.py
index 8448792..714c111 100644
--- a/app/lib/package_control/http/validating_https_connection.py
+++ b/app/lib/package_control/http/validating_https_connection.py
@@ -1,9 +1,10 @@
-import re
-import socket
 import base64
 import hashlib
 import os
-import sys
+import re
+import socket
+import ssl
+
 from http.client import HTTPS_PORT
 from urllib.request import parse_keqv_list, parse_http_list
 
@@ -13,384 +14,390 @@
 from .invalid_certificate_exception import InvalidCertificateException
 
 
-# The following code is wrapped in a try because the Linux versions of Sublime
-# Text do not include the ssl module due to the fact that different distros
-# have different versions
-try:
-    import ssl
+class ValidatingHTTPSConnection(DebuggableHTTPConnection):
 
-    class ValidatingHTTPSConnection(DebuggableHTTPConnection):
+    """
+    A custom HTTPConnection class that validates SSL certificates, and
+    allows proxy authentication for HTTPS connections.
+    """
 
-        """
-        A custom HTTPConnection class that validates SSL certificates, and
-        allows proxy authentication for HTTPS connections.
-        """
+    default_port = HTTPS_PORT
+
+    response_class = DebuggableHTTPSResponse
+    _debug_protocol = 'HTTPS'
 
-        default_port = HTTPS_PORT
+    def __init__(self, host, port=None, ca_certs=None, extra_ca_certs=None, **kwargs):
+        passed_args = {}
+        if 'timeout' in kwargs:
+            passed_args['timeout'] = kwargs['timeout']
+        if 'debug' in kwargs:
+            passed_args['debug'] = kwargs['debug']
+        DebuggableHTTPConnection.__init__(self, host, port, **passed_args)
 
-        response_class = DebuggableHTTPSResponse
-        _debug_protocol = 'HTTPS'
+        self.passwd = kwargs.get('passwd')
 
-        # The ssl.SSLContext() for the connection - Python 3 only
-        ctx = None
+        if 'user_agent' in kwargs:
+            self.user_agent = kwargs['user_agent']
 
-        def __init__(self, host, port=None, key_file=None, cert_file=None, ca_certs=None, **kwargs):
-            passed_args = {}
-            if 'timeout' in kwargs:
-                passed_args['timeout'] = kwargs['timeout']
-            if 'debug' in kwargs:
-                passed_args['debug'] = kwargs['debug']
-            DebuggableHTTPConnection.__init__(self, host, port, **passed_args)
+        # build ssl context
 
-            self.passwd = kwargs.get('passwd')
-            self.key_file = key_file
-            self.cert_file = cert_file
+        context = ssl.SSLContext(
+            ssl.PROTOCOL_TLS_CLIENT if hasattr(ssl, 'PROTOCOL_TLS_CLIENT') else ssl.PROTOCOL_SSLv23)
+
+        if hasattr(context, 'minimum_version'):
+            context.minimum_version = ssl.TLSVersion.TLSv1
+        else:
+            context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 \
+                | ssl.OP_NO_COMPRESSION | ssl.OP_CIPHER_SERVER_PREFERENCE
+
+        context.verify_mode = ssl.CERT_REQUIRED
+        if hasattr(context, 'check_hostname'):
+            context.check_hostname = True
+        if hasattr(context, 'post_handshake_auth'):
+            context.post_handshake_auth = True
+
+        if ca_certs:
+            context.load_verify_locations(ca_certs)
             self.ca_certs = ca_certs
-            if 'user_agent' in kwargs:
-                self.user_agent = kwargs['user_agent']
-            if self.ca_certs:
-                self.cert_reqs = ssl.CERT_REQUIRED
-            else:
-                self.cert_reqs = ssl.CERT_NONE
+        elif hasattr(context, 'load_default_certs'):
+            context.load_default_certs(ssl.Purpose.SERVER_AUTH)
+            self.ca_certs = "OS native store"
+        else:
+            raise InvalidCertificateException(self.host, self.port, "CA missing")
 
-        def get_valid_hosts_for_cert(self, cert):
-            """
-            Returns a list of valid hostnames for an SSL certificate
+        if extra_ca_certs:
+            try:
+                context.load_verify_locations(extra_ca_certs)
+            except Exception:
+                pass
 
-            :param cert: A dict from SSLSocket.getpeercert()
+        self._context = context
 
-            :return: An array of hostnames
-            """
+    def get_valid_hosts_for_cert(self, cert):
+        """
+        Returns a list of valid hostnames for an SSL certificate
 
-            if 'subjectAltName' in cert:
-                return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns']
-            else:
-                return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname']
+        :param cert: A dict from SSLSocket.getpeercert()
 
-        def validate_cert_host(self, cert, hostname):
-            """
-            Checks if the cert is valid for the hostname
+        :return: An array of hostnames
+        """
 
-            :param cert: A dict from SSLSocket.getpeercert()
+        if 'subjectAltName' in cert:
+            return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns']
+        else:
+            return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname']
 
-            :param hostname: A string hostname to check
+    def validate_cert_host(self, cert, hostname):
+        """
+        Checks if the cert is valid for the hostname
 
-            :return: A boolean if the cert is valid for the hostname
-            """
+        :param cert: A dict from SSLSocket.getpeercert()
 
-            hosts = self.get_valid_hosts_for_cert(cert)
-            for host in hosts:
-                host_re = host.replace('.', r'\.').replace('*', r'[^.]*')
-                if re.search('^%s$' % (host_re,), hostname, re.I):
-                    return True
-            return False
+        :param hostname: A string hostname to check
 
-        # Compatibility for python 3.3 vs 3.8
-        #   python 3.8 replaced _set_hostport() by _get_hostport()
-        if not hasattr(DebuggableHTTPConnection, '_set_hostport'):
+        :return: A boolean if the cert is valid for the hostname
+        """
 
-            def _set_hostport(self, host, port):
-                (self.host, self.port) = self._get_hostport(host, port)
-                self._validate_host(self.host)
+        hosts = self.get_valid_hosts_for_cert(cert)
+        for host in hosts:
+            host_re = host.replace('.', r'\.').replace('*', r'[^.]*')
+            if re.search('^%s$' % (host_re,), hostname, re.I):
+                return True
+        return False
 
-        def _tunnel(self):
-            """
-            This custom _tunnel method allows us to read and print the debug
-            log for the whole response before throwing an error, and adds
-            support for proxy authentication
-            """
+    # Compatibility for python 3.3 vs 3.8
+    #   python 3.8 replaced _set_hostport() by _get_hostport()
+    if not hasattr(DebuggableHTTPConnection, '_set_hostport'):
 
-            self._proxy_host = self.host
-            self._proxy_port = self.port
-            self._set_hostport(self._tunnel_host, self._tunnel_port)
+        def _set_hostport(self, host, port):
+            (self.host, self.port) = self._get_hostport(host, port)
+            self._validate_host(self.host)
 
-            self._tunnel_headers['Host'] = "%s:%s" % (self.host, self.port)
-            self._tunnel_headers['User-Agent'] = self.user_agent
-            self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive'
+    def _tunnel(self):
+        """
+        This custom _tunnel method allows us to read and print the debug
+        log for the whole response before throwing an error, and adds
+        support for proxy authentication
+        """
 
-            request = "CONNECT %s:%d HTTP/1.1\r\n" % (self.host, self.port)
-            for header, value in self._tunnel_headers.items():
-                request += "%s: %s\r\n" % (header, value)
-            request += "\r\n"
+        self._proxy_host = self.host
+        self._proxy_port = self.port
+        self._set_hostport(self._tunnel_host, self._tunnel_port)
 
-            request = bytes(request, 'iso-8859-1')
+        self._tunnel_headers['Host'] = "%s:%s" % (self.host, self.port)
+        self._tunnel_headers['User-Agent'] = self.user_agent
+        self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive'
 
-            self.send(request)
+        request = "CONNECT %s:%d HTTP/1.1\r\n" % (self.host, self.port)
+        for header, value in self._tunnel_headers.items():
+            request += "%s: %s\r\n" % (header, value)
+        request += "\r\n"
 
-            response = self.response_class(self.sock, method=self._method)
-            (version, code, message) = response._read_status()
+        request = bytes(request, 'iso-8859-1')
 
-            status_line = "%s %s %s" % (version, code, message.rstrip())
-            headers = [status_line]
+        self.send(request)
 
-            content_length = 0
-            close_connection = False
-            while True:
-                line = response.fp.readline()
+        response = self.response_class(self.sock, method=self._method)
+        (version, code, message) = response._read_status()
 
-                line = line.decode('iso-8859-1')
+        status_line = "%s %s %s" % (version, code, message.rstrip())
+        headers = [status_line]
 
-                if line == '\r\n':
-                    break
+        content_length = 0
+        close_connection = False
+        while True:
+            line = response.fp.readline()
 
-                headers.append(line.rstrip())
+            line = line.decode('iso-8859-1')
 
-                parts = line.rstrip().split(': ', 1)
-                name = parts[0].lower()
-                value = parts[1].lower().strip()
-                if name == 'content-length':
-                    content_length = int(value)
+            if line == '\r\n':
+                break
 
-                if name in ['connection', 'proxy-connection'] and value == 'close':
-                    close_connection = True
+            headers.append(line.rstrip())
 
-            if self.debuglevel in [-1, 5]:
-                indented_headers = '\n  '.join(headers)
-                console_write(
-                    '''
-                    Urllib %s Debug Read
-                      %s
-                    ''',
-                    (self._debug_protocol, indented_headers)
-                )
+            parts = line.rstrip().split(': ', 1)
+            name = parts[0].lower()
+            value = parts[1].lower().strip()
+            if name == 'content-length':
+                content_length = int(value)
 
-            # Handle proxy auth for SSL connections since regular urllib punts on this
-            if code == 407 and self.passwd and 'Proxy-Authorization' not in self._tunnel_headers:
-                if content_length:
-                    response._safe_read(content_length)
-
-                supported_auth_methods = {}
-                for line in headers:
-                    parts = line.split(': ', 1)
-                    if parts[0].lower() != 'proxy-authenticate':
-                        continue
-                    details = parts[1].split(' ', 1)
-                    supported_auth_methods[details[0].lower()] = details[1] if len(details) > 1 else ''
-
-                username, password = self.passwd.find_user_password(None, "%s:%s" % (
-                    self._proxy_host, self._proxy_port))
-
-                if 'digest' in supported_auth_methods:
-                    response_value = self.build_digest_response(
-                        supported_auth_methods['digest'], username, password)
-                    if response_value:
-                        self._tunnel_headers['Proxy-Authorization'] = "Digest %s" % response_value
-
-                elif 'basic' in supported_auth_methods:
-                    response_value = "%s:%s" % (username, password)
-                    response_value = base64.b64encode(response_value.encode('utf-8')).decode('utf-8')
-                    self._tunnel_headers['Proxy-Authorization'] = "Basic %s" % response_value.strip()
-
-                if 'Proxy-Authorization' in self._tunnel_headers:
-                    self.host = self._proxy_host
-                    self.port = self._proxy_port
-
-                    # If the proxy wanted the connection closed, we need to make a new connection
-                    if close_connection:
-                        self.sock.close()
-                        self.sock = socket.create_connection((self.host, self.port), self.timeout)
-
-                    return self._tunnel()
-
-            if code != 200:
-                self.close()
-                raise socket.error("Tunnel connection failed: %d %s" % (code, message.strip()))
-
-        def build_digest_response(self, fields, username, password):
-            """
-            Takes a Proxy-Authenticate: Digest header and creates a response
-            header
-
-            :param fields:
-                The string portion of the Proxy-Authenticate header after
-                "Digest "
-
-            :param username:
-                The username to use for the response
-
-            :param password:
-                The password to use for the response
-
-            :return:
-                None if invalid Proxy-Authenticate header, otherwise the
-                string of fields for the Proxy-Authorization: Digest header
-            """
-
-            fields = parse_keqv_list(parse_http_list(fields))
-
-            realm = fields.get('realm')
-            nonce = fields.get('nonce')
-            qop = fields.get('qop')
-            algorithm = fields.get('algorithm')
-            if algorithm:
-                algorithm = algorithm.lower()
-            opaque = fields.get('opaque')
-
-            if algorithm in ['md5', None]:
-                def md5hash(string):
-                    return hashlib.md5(string).hexdigest()
-                hash = md5hash
-
-            elif algorithm == 'sha':
-                def sha1hash(string):
-                    return hashlib.sha1(string).hexdigest()
-                hash = sha1hash
-
-            else:
-                return None
-
-            host_port = "%s:%s" % (self.host, self.port)
-
-            a1 = "%s:%s:%s" % (username, realm, password)
-            a2 = "CONNECT:%s" % host_port
-            ha1 = hash(a1)
-            ha2 = hash(a2)
-
-            if qop is None:
-                response = hash("%s:%s:%s" % (ha1, nonce, ha2))
-            elif qop == 'auth':
-                nc = '00000001'
-                cnonce = hash(os.urandom(8))[:8]
-                response = hash("%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2))
-            else:
-                return None
-
-            response_fields = {
-                'username': username,
-                'realm': realm,
-                'nonce': nonce,
-                'response': response,
-                'uri': host_port
-            }
-            if algorithm:
-                response_fields['algorithm'] = algorithm
-            if qop == 'auth':
-                response_fields['nc'] = nc
-                response_fields['cnonce'] = cnonce
-                response_fields['qop'] = qop
-            if opaque:
-                response_fields['opaque'] = opaque
-
-            return ', '.join(["%s=\"%s\"" % (field, response_fields[field]) for field in response_fields])
-
-        def connect(self):
-            """
-            Adds debugging and SSL certification validation
-            """
+            if name in ['connection', 'proxy-connection'] and value == 'close':
+                close_connection = True
 
-            if self.debuglevel == -1:
-                console_write(
-                    '''
-                    Urllib HTTPS Debug General
-                      Connecting to %s on port %s
-                    ''',
-                    (self.host, self.port)
-                )
+        if self.debuglevel in [-1, 5]:
+            indented_headers = '\n  '.join(headers)
+            console_write(
+                '''
+                Urllib %s Debug Read
+                  %s
+                ''',
+                (self._debug_protocol, indented_headers)
+            )
 
-            self.sock = socket.create_connection((self.host, self.port), self.timeout)
-            if self._tunnel_host:
-                self._tunnel()
+        # Handle proxy auth for SSL connections since regular urllib punts on this
+        if code == 407 and self.passwd and 'Proxy-Authorization' not in self._tunnel_headers:
+            if content_length:
+                response._safe_read(content_length)
+
+            supported_auth_methods = {}
+            for line in headers:
+                parts = line.split(': ', 1)
+                if parts[0].lower() != 'proxy-authenticate':
+                    continue
+                details = parts[1].split(' ', 1)
+                supported_auth_methods[details[0].lower()] = details[1] if len(details) > 1 else ''
+
+            username, password = self.passwd.find_user_password(None, "%s:%s" % (
+                self._proxy_host, self._proxy_port))
+
+            if 'digest' in supported_auth_methods:
+                response_value = self.build_digest_response(
+                    supported_auth_methods['digest'], username, password)
+                if response_value:
+                    self._tunnel_headers['Proxy-Authorization'] = "Digest %s" % response_value
+
+            elif 'basic' in supported_auth_methods:
+                response_value = "%s:%s" % (username, password)
+                response_value = base64.b64encode(response_value.encode('utf-8')).decode('utf-8')
+                self._tunnel_headers['Proxy-Authorization'] = "Basic %s" % response_value.strip()
+
+            if 'Proxy-Authorization' in self._tunnel_headers:
+                self.host = self._proxy_host
+                self.port = self._proxy_port
+
+                # If the proxy wanted the connection closed, we need to make a new connection
+                if close_connection:
+                    self.sock.close()
+                    self.sock = socket.create_connection((self.host, self.port), self.timeout)
+
+                return self._tunnel()
+
+        if code != 200:
+            self.close()
+            raise socket.error("Tunnel connection failed: %d %s" % (code, message.strip()))
+
+    def build_digest_response(self, fields, username, password):
+        """
+        Takes a Proxy-Authenticate: Digest header and creates a response
+        header
 
-            if self.debuglevel == -1:
-                console_write(
-                    '''
-                    Urllib HTTPS Debug General
-                      Upgrading connection to SSL using CA certs file at %s
-                    ''',
-                    self.ca_certs.decode(sys.getfilesystemencoding())
-                )
+        :param fields:
+            The string portion of the Proxy-Authenticate header after
+            "Digest "
 
-            hostname = self.host.split(':', 0)[0]
-
-            proto = ssl.PROTOCOL_SSLv23
-            if sys.version_info >= (3, 6):
-                proto = ssl.PROTOCOL_TLS
-            self.ctx = ssl.SSLContext(proto)
-            if sys.version_info < (3, 7):
-                self.ctx.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3
-            else:
-                self.ctx.minimum_version = ssl.TLSVersion.TLSv1
-            self.ctx.verify_mode = self.cert_reqs
-            self.ctx.load_verify_locations(self.ca_certs)
-            # We don't call load_cert_chain() with self.key_file and self.cert_file
-            # since that is for servers, and this code only supports client mode
-            if self.debuglevel == -1:
-                console_write(
-                    '''
-                      Using hostname "%s" for TLS SNI extension
-                    ''',
-                    hostname,
-                    indent='  ',
-                    prefix=False
-                )
-            self.sock = self.ctx.wrap_socket(
-                self.sock,
-                server_hostname=hostname
+        :param username:
+            The username to use for the response
+
+        :param password:
+            The password to use for the response
+
+        :return:
+            None if invalid Proxy-Authenticate header, otherwise the
+            string of fields for the Proxy-Authorization: Digest header
+        """
+
+        fields = parse_keqv_list(parse_http_list(fields))
+
+        realm = fields.get('realm')
+        nonce = fields.get('nonce')
+        qop = fields.get('qop')
+        algorithm = fields.get('algorithm')
+        if algorithm:
+            algorithm = algorithm.lower()
+        opaque = fields.get('opaque')
+
+        if algorithm in ['md5', None]:
+            def md5hash(string):
+                return hashlib.md5(string).hexdigest()
+            hash = md5hash
+
+        elif algorithm == 'sha':
+            def sha1hash(string):
+                return hashlib.sha1(string).hexdigest()
+            hash = sha1hash
+
+        else:
+            return None
+
+        host_port = "%s:%s" % (self.host, self.port)
+
+        a1 = "%s:%s:%s" % (username, realm, password)
+        a2 = "CONNECT:%s" % host_port
+        ha1 = hash(a1)
+        ha2 = hash(a2)
+
+        if qop is None:
+            response = hash("%s:%s:%s" % (ha1, nonce, ha2))
+        elif qop == 'auth':
+            nc = '00000001'
+            cnonce = hash(os.urandom(8))[:8]
+            response = hash("%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2))
+        else:
+            return None
+
+        response_fields = {
+            'username': username,
+            'realm': realm,
+            'nonce': nonce,
+            'response': response,
+            'uri': host_port
+        }
+        if algorithm:
+            response_fields['algorithm'] = algorithm
+        if qop == 'auth':
+            response_fields['nc'] = nc
+            response_fields['cnonce'] = cnonce
+            response_fields['qop'] = qop
+        if opaque:
+            response_fields['opaque'] = opaque
+
+        return ', '.join(["%s=\"%s\"" % (field, response_fields[field]) for field in response_fields])
+
+    def connect(self):
+        """
+        Adds debugging and SSL certification validation
+        """
+
+        if self.debuglevel == -1:
+            console_write(
+                '''
+                Urllib HTTPS Debug General
+                  Connecting to %s on port %s
+                ''',
+                (self.host, self.port)
+            )
+
+        self.sock = socket.create_connection((self.host, self.port), self.timeout)
+        if self._tunnel_host:
+            self._tunnel()
+
+        if self.debuglevel == -1:
+            console_write(
+                '''
+                Urllib HTTPS Debug General
+                  Upgrading connection to SSL using CA certs from %s
+                ''',
+                self.ca_certs
+            )
+
+        hostname = self.host.split(':', 0)[0]
+
+        # We don't call load_cert_chain() with self.key_file and self.cert_file
+        # since that is for servers, and this code only supports client mode
+        if self.debuglevel == -1:
+            console_write(
+                '''
+                  Using hostname "%s" for TLS SNI extension
+                ''',
+                hostname,
+                indent='  ',
+                prefix=False
             )
+        self.sock = self._context.wrap_socket(
+            self.sock,
+            server_hostname=hostname
+        )
+
+        if self.debuglevel == -1:
+            cipher_info = self.sock.cipher()
+            console_write(
+                '''
+                  Successfully upgraded connection to %s:%s with SSL
+                  Using %s with cipher %s
+                ''',
+                (self.host, self.port, cipher_info[1], cipher_info[0]),
+                indent='  ',
+                prefix=False
+            )
+
+        # This debugs and validates the SSL certificate
+        if self._context.verify_mode & ssl.CERT_REQUIRED:
+            cert = self.sock.getpeercert()
 
             if self.debuglevel == -1:
-                cipher_info = self.sock.cipher()
+                subjectMap = {
+                    'organizationName': 'O',
+                    'commonName': 'CN',
+                    'organizationalUnitName': 'OU',
+                    'countryName': 'C',
+                    'serialNumber': 'serialNumber',
+                    'commonName': 'CN',
+                    'localityName': 'L',
+                    'stateOrProvinceName': 'S',
+                    '1.3.6.1.4.1.311.60.2.1.2': 'incorporationState',
+                    '1.3.6.1.4.1.311.60.2.1.3': 'incorporationCountry'
+                }
+                subject_list = list(cert['subject'])
+                subject_list.reverse()
+                subject_parts = []
+                for pair in subject_list:
+                    if pair[0][0] in subjectMap:
+                        field_name = subjectMap[pair[0][0]]
+                    else:
+                        field_name = pair[0][0]
+                    subject_parts.append(field_name + '=' + pair[0][1])
+
                 console_write(
                     '''
-                      Successfully upgraded connection to %s:%s with SSL
-                      Using %s with cipher %s
+                      Server SSL certificate:
+                        subject: %s
                     ''',
-                    (self.host, self.port, cipher_info[1], cipher_info[0]),
+                    ','.join(subject_parts),
                     indent='  ',
                     prefix=False
                 )
-
-            # This debugs and validates the SSL certificate
-            if self.cert_reqs & ssl.CERT_REQUIRED:
-                cert = self.sock.getpeercert()
-
+                if 'subjectAltName' in cert:
+                    alt_names = [c[1] for c in cert['subjectAltName']]
+                    alt_names = ', '.join(alt_names)
+                    console_write('    subject alt name: %s', alt_names, prefix=False)
+                if 'notAfter' in cert:
+                    console_write('    expire date: %s', cert['notAfter'], prefix=False)
+
+            if not self.validate_cert_host(cert, hostname):
                 if self.debuglevel == -1:
-                    subjectMap = {
-                        'organizationName': 'O',
-                        'commonName': 'CN',
-                        'organizationalUnitName': 'OU',
-                        'countryName': 'C',
-                        'serialNumber': 'serialNumber',
-                        'commonName': 'CN',
-                        'localityName': 'L',
-                        'stateOrProvinceName': 'S',
-                        '1.3.6.1.4.1.311.60.2.1.2': 'incorporationState',
-                        '1.3.6.1.4.1.311.60.2.1.3': 'incorporationCountry'
-                    }
-                    subject_list = list(cert['subject'])
-                    subject_list.reverse()
-                    subject_parts = []
-                    for pair in subject_list:
-                        if pair[0][0] in subjectMap:
-                            field_name = subjectMap[pair[0][0]]
-                        else:
-                            field_name = pair[0][0]
-                        subject_parts.append(field_name + '=' + pair[0][1])
-
-                    console_write(
-                        '''
-                          Server SSL certificate:
-                            subject: %s
-                        ''',
-                        ','.join(subject_parts),
-                        indent='  ',
-                        prefix=False
-                    )
-                    if 'subjectAltName' in cert:
-                        alt_names = [c[1] for c in cert['subjectAltName']]
-                        alt_names = ', '.join(alt_names)
-                        console_write('    subject alt name: %s', alt_names, prefix=False)
-                    if 'notAfter' in cert:
-                        console_write('    expire date: %s', cert['notAfter'], prefix=False)
-
-                if not self.validate_cert_host(cert, hostname):
-                    if self.debuglevel == -1:
-                        console_write('  Certificate INVALID', prefix=False)
-
-                    raise InvalidCertificateException(hostname, cert, 'hostname mismatch')
+                    console_write('  Certificate INVALID', prefix=False)
 
-                if self.debuglevel == -1:
-                    console_write('  Certificate validated for %s', hostname, prefix=False)
+                raise InvalidCertificateException(hostname, cert, 'hostname mismatch')
 
-except (ImportError):
-    pass
+            if self.debuglevel == -1:
+                console_write('  Certificate validated for %s', hostname, prefix=False)
diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py
index aa20884..8ddbf10 100644
--- a/app/lib/package_control/http_cache.py
+++ b/app/lib/package_control/http_cache.py
@@ -17,31 +17,17 @@ def __init__(self, ttl):
         :param ttl:
             The number of seconds a cache entry should be valid for
         """
-        self.ttl = int(ttl)
+        self.ttl = float(ttl)
         self.base_path = os.path.join(sys_path.pc_cache_dir(), 'http_cache')
         os.makedirs(self.base_path, exist_ok=True)
 
-    def __del__(self):
-        """
-        Delete an existing instance.
-
-        Remove outdated cache files, when cache object is deleted.
-        All files which have been accessed by deleted instance keep untouched.
-        """
-
-        if self.ttl > 0:
-            self.clear(self.ttl)
-
-    def clear(self, ttl):
+    def prune(self):
         """
         Removes all cache entries older than the TTL
 
         :param ttl:
             The number of seconds a cache entry should be valid for
         """
-
-        ttl = int(ttl)
-
         try:
             for filename in os.listdir(self.base_path):
                 path = os.path.join(self.base_path, filename)
@@ -49,8 +35,7 @@ def clear(self, ttl):
                 # ignore to prevent an exception
                 if os.path.isdir(path):
                     continue
-                mtime = os.stat(path).st_mtime
-                if mtime < time.time() - ttl:
+                if os.stat(path).st_atime < time.time() - self.ttl:
                     os.unlink(path)
 
         except FileNotFoundError:
@@ -67,17 +52,15 @@ def get(self, key):
             The (binary) cached value, or False
         """
         try:
-            content = None
             cache_file = os.path.join(self.base_path, key)
-            with open(cache_file, 'rb') as fobj:
-                content = fobj.read()
 
             # update filetime to prevent unmodified cache files
             # from being deleted, if they are frequently accessed.
-            now = time.time()
-            os.utime(cache_file, (now, now))
+            # NOTE: try to rely on OS updating access time (`os.stat(path).st_atime`)
+            # os.utime(cache_file)
 
-            return content
+            with open(cache_file, 'rb') as fobj:
+                return fobj.read()
 
         except FileNotFoundError:
             return False
diff --git a/app/lib/package_control/providers/base_repository_provider.py b/app/lib/package_control/providers/base_repository_provider.py
index 6517534..f3d14a6 100644
--- a/app/lib/package_control/providers/base_repository_provider.py
+++ b/app/lib/package_control/providers/base_repository_provider.py
@@ -26,7 +26,7 @@ class BaseRepositoryProvider:
     """
 
     __slots__ = [
-        'broken_libriaries'
+        'broken_libriaries',
         'broken_packages',
         'failed_sources',
         'libraries',
diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py
index fb26030..05a79d4 100644
--- a/app/lib/package_control/providers/channel_provider.py
+++ b/app/lib/package_control/providers/channel_provider.py
@@ -28,8 +28,9 @@ class ChannelProvider:
 
     The current channel/repository infrastructure caches repository info into
     the channel to improve the Package Control client performance. This also
-    has the side effect of lessening the load on the GitHub and BitBucket APIs
-    and getting around not-infrequent HTTP 503 errors from those APIs.
+    has the side effect of lessening the load on the GitHub, GitLab and
+    BitBucket APIs and getting around not-infrequent HTTP 503 errors from
+    those APIs.
 
     :param channel_url:
         The URL of the channel
diff --git a/app/lib/package_control/providers/provider_exception.py b/app/lib/package_control/providers/provider_exception.py
index e327f59..4964782 100644
--- a/app/lib/package_control/providers/provider_exception.py
+++ b/app/lib/package_control/providers/provider_exception.py
@@ -2,9 +2,6 @@ class ProviderException(Exception):
 
     """If a provider could not return information"""
 
-    def __bytes__(self):
-        return self.__str__().encode('utf-8')
-
 
 class GitProviderUserInfoException(ProviderException):
     """

From 52b072f47013b6e12f1f6f79eebe10954deb3dfc Mon Sep 17 00:00:00 2001
From: deathaxe 
Date: Sun, 16 Jun 2024 09:18:24 +0200
Subject: [PATCH 39/39] Sync with Package Control 4.0.7

---
 app/lib/package_control/downloaders/__init__.py    | 14 ++++++++------
 .../providers/json_repository_provider.py          |  4 ++--
 2 files changed, 10 insertions(+), 8 deletions(-)

diff --git a/app/lib/package_control/downloaders/__init__.py b/app/lib/package_control/downloaders/__init__.py
index 5101fa0..158bd4b 100644
--- a/app/lib/package_control/downloaders/__init__.py
+++ b/app/lib/package_control/downloaders/__init__.py
@@ -13,12 +13,14 @@
     'wget': WgetDownloader
 }
 
-# oscrypto can fail badly on Linux in the Sublime Text 3 environment due to
-# trying to mix the statically-linked OpenSSL in plugin_host with the OpenSSL
-# loaded from the operating system. On Python 3.8 we dynamically link OpenSSL,
-# so it just needs to be configured properly, which is handled in
-# oscrypto_downloader.py.
-if sys.platform != 'linux' or sys.version_info[:2] != (3, 3) or sys.executable != 'python3':
+# oscrypto can fail badly
+# 1. on Linux in the Sublime Text 3 environment due to trying to mix the
+#    statically-linked OpenSSL in plugin_host with the OpenSSL loaded from the
+#    operating system. On Python 3.8 we dynamically link OpenSSL, so it just needs
+#    to be configured properly, which is handled in oscrypto_downloader.py.
+# 2. on MacOS ARM plattform due to whatever reason. Due to maintanance state of
+#    oscrypto, start fading it out by disabling it on python 3.8 (ST4)
+if sys.platform != 'linux' and sys.version_info[:2] == (3, 3):
     try:
         from .oscrypto_downloader import OscryptoDownloader
         DOWNLOADERS['oscrypto'] = OscryptoDownloader
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
index 824218f..30c517f 100644
--- a/app/lib/package_control/providers/json_repository_provider.py
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -254,7 +254,7 @@ def get_libraries(self, invalid_sources=None):
             }
             allowed_release_keys = {  # todo: remove 'branch'
                 'base', 'version', 'sublime_text', 'platforms', 'python_versions',
-                'branch', 'tags', 'asset', 'url', 'sha256'
+                'branch', 'tags', 'asset', 'url', 'date', 'sha256'
             }
         else:
             allowed_library_keys = {
@@ -262,7 +262,7 @@ def get_libraries(self, invalid_sources=None):
             }
             allowed_release_keys = {
                 'base', 'version', 'sublime_text', 'platforms',
-                'branch', 'tags', 'url', 'sha256'
+                'branch', 'tags', 'url', 'date', 'sha256'
             }
 
         copied_library_keys = ('name', 'description', 'author', 'homepage', 'issues')