diff --git a/app/lib/ca_certs_legacy.py b/app/lib/ca_certs_legacy.py index a5437c8..18d70cc 100644 --- a/app/lib/ca_certs_legacy.py +++ b/app/lib/ca_certs_legacy.py @@ -4,8 +4,6 @@ from .package_control.cmd import Cli from .package_control.ca_certs import get_system_ca_bundle_path -from .package_control.open_compat import open_compat, read_compat - def find_root_ca_cert(settings, domain): @@ -50,8 +48,8 @@ def find_root_ca_cert(settings, domain): def get_ca_cert_by_subject(settings, subject): bundle_path = get_system_ca_bundle_path(settings) - with open_compat(bundle_path, 'r') as f: - contents = read_compat(f) + with open(bundle_path, 'r', encoding='utf-8') as f: + contents = f.read() temp = [] diff --git a/app/lib/package_control/__init__.py b/app/lib/package_control/__init__.py index 4cda0f1..1c7082d 100644 --- a/app/lib/package_control/__init__.py +++ b/app/lib/package_control/__init__.py @@ -1,2 +1,2 @@ -__version__ = "3.4.1" -__version_info__ = (3, 4, 1) +__version__ = "4.0.0-beta9" +__version_info__ = (4, 0, 0, 'beta', 9) diff --git a/app/lib/package_control/ca_certs.py b/app/lib/package_control/ca_certs.py index 199a83d..0dacbcf 100644 --- a/app/lib/package_control/ca_certs.py +++ b/app/lib/package_control/ca_certs.py @@ -1,18 +1,38 @@ import os -import time import sys +from . import sys_path from .console_write import console_write -from .open_compat import open_compat, read_compat -from .sys_path import pc_cache_dir, user_config_dir +from .downloaders.downloader_exception import DownloaderException -from .deps.oscrypto import use_ctypes -use_ctypes() -from .deps.oscrypto import trust_list # noqa +try: + import certifi +except ImportError: + certifi = None +try: + from .deps.oscrypto import trust_list # noqa + from .deps.oscrypto.errors import CACertsError +except Exception as e: + trust_list = None + console_write('oscrypto trust lists unavailable - %s', e) -ca_bundle_dir = None -user_ca_bundle_dir = None + +MIN_BUNDLE_SIZE = 100 +""" +The least required file size a CA bundle must have to be valid. + +The size is calculated from public key boundaries +and least amount of public key size. + +``MIN_BUNDLE_SIZE = begin (27) + end (25) + newlines (2) + key (?)`` + +``` +-----BEGIN CERTIFICATE----- + +-----END CERTIFICATE----- +``` +""" def get_ca_bundle_path(settings): @@ -22,73 +42,84 @@ def get_ca_bundle_path(settings): :param settings: A dict to look in for the `debug` key + :raises: + OSError or IOError if CA bundle creation fails + :return: The filesystem path to the merged ca bundle path """ - ensure_ca_bundle_dir() + ca_bundle_dir = sys_path.pc_cache_dir() + if not ca_bundle_dir: + raise ValueError("Unknown Package Control cache directory") + + os.makedirs(ca_bundle_dir, exist_ok=True) - system_ca_bundle_path = get_system_ca_bundle_path(settings) + system_ca_bundle_path = get_system_ca_bundle_path(settings, ca_bundle_dir) user_ca_bundle_path = get_user_ca_bundle_path(settings) merged_ca_bundle_path = os.path.join(ca_bundle_dir, 'merged-ca-bundle.crt') - - merged_missing = not os.path.exists(merged_ca_bundle_path) - merged_empty = (not merged_missing) and os.stat(merged_ca_bundle_path).st_size == 0 - - regenerate = merged_missing or merged_empty - if system_ca_bundle_path and not merged_missing: - regenerate = regenerate or os.path.getmtime(system_ca_bundle_path) > os.path.getmtime(merged_ca_bundle_path) - if os.path.exists(user_ca_bundle_path) and not merged_missing: - regenerate = regenerate or os.path.getmtime(user_ca_bundle_path) > os.path.getmtime(merged_ca_bundle_path) + merged_ca_bundle_size = 0 + + try: + # file exists and is not empty + system_ca_bundle_exists = system_ca_bundle_path \ + and os.path.getsize(system_ca_bundle_path) > MIN_BUNDLE_SIZE + except FileNotFoundError: + system_ca_bundle_exists = False + + try: + # file exists and is not empty + user_ca_bundle_exists = user_ca_bundle_path \ + and os.path.getsize(user_ca_bundle_path) > MIN_BUNDLE_SIZE + except FileNotFoundError: + user_ca_bundle_exists = False + + regenerate = system_ca_bundle_exists or user_ca_bundle_exists + if regenerate: + try: + stats = os.stat(merged_ca_bundle_path) + except FileNotFoundError: + pass + else: + merged_ca_bundle_size = stats.st_size + # regenerate if merged file is empty + regenerate = merged_ca_bundle_size < MIN_BUNDLE_SIZE + # regenerate if system CA file is newer + if system_ca_bundle_exists and not regenerate: + regenerate = os.path.getmtime(system_ca_bundle_path) > stats.st_mtime + # regenerate if user CA file is newer + if user_ca_bundle_exists and not regenerate: + regenerate = os.path.getmtime(user_ca_bundle_path) > stats.st_mtime if regenerate: - with open(merged_ca_bundle_path, 'wb') as merged: - if system_ca_bundle_path: - with open_compat(system_ca_bundle_path, 'r') as system: - system_certs = read_compat(system).strip() - merged.write(system_certs.encode('utf-8')) + with open(merged_ca_bundle_path, 'w', encoding='utf-8') as merged: + if system_ca_bundle_exists: + with open(system_ca_bundle_path, 'r', encoding='utf-8') as system: + system_certs = system.read().strip() + merged.write(system_certs) if len(system_certs) > 0: - merged.write(b'\n') - if os.path.exists(user_ca_bundle_path): - with open_compat(user_ca_bundle_path, 'r') as user: - user_certs = read_compat(user).strip() - merged.write(user_certs.encode('utf-8')) + merged.write('\n') + if user_ca_bundle_exists: + with open(user_ca_bundle_path, 'r', encoding='utf-8') as user: + user_certs = user.read().strip() + merged.write(user_certs) if len(user_certs) > 0: - merged.write(b'\n') - if settings.get('debug'): - console_write( - u''' - Regenerated the merged CA bundle from the system and user CA bundles - ''' - ) + merged.write('\n') - return merged_ca_bundle_path - - -def get_user_ca_bundle_path(settings): - """ - Return the path to the user CA bundle, ensuring the file exists + merged_ca_bundle_size = merged.tell() - :param settings: - A dict to look in for `debug` - - :return: - The filesystem path to the user ca bundle - """ - - ensure_ca_bundle_dir() - - user_ca_bundle_path = os.path.join(user_ca_bundle_dir, 'Package Control.user-ca-bundle') - if not os.path.exists(user_ca_bundle_path): - if settings.get('debug'): + if merged_ca_bundle_size >= MIN_BUNDLE_SIZE and settings.get('debug'): console_write( - u''' - Created blank user CA bundle ''' + Regenerated the merged CA bundle from the system and user CA bundles (%d kB) + ''', + merged_ca_bundle_size / 1024 ) - open(user_ca_bundle_path, 'a').close() - return user_ca_bundle_path + if merged_ca_bundle_size < MIN_BUNDLE_SIZE: + raise DownloaderException("No CA bundle available for HTTPS!") + + return merged_ca_bundle_path def print_cert_subject(cert, reason): @@ -103,24 +134,24 @@ def print_cert_subject(cert, reason): if reason is None: console_write( - u''' + ''' Exported certificate: %s ''', cert.subject.human_friendly ) else: console_write( - u''' + ''' Skipped certificate: %s - reason %s ''', (cert.subject.human_friendly, reason) ) -def get_system_ca_bundle_path(settings): +def get_system_ca_bundle_path(settings, ca_bundle_dir): """ Get the filesystem path to the system CA bundle. On Linux it looks in a - number of predefined places, however on OS X it has to be programatically + number of predefined places, however on OS X it has to be programmatically exported from the SystemRootCertificates.keychain. Windows does not ship with a CA bundle, but also we use WinINet on Windows, so we don't need to worry about CA certs. @@ -128,54 +159,66 @@ def get_system_ca_bundle_path(settings): :param settings: A dict to look in for the `debug` key + :param ca_bundle_dir: + The filesystem path to the directory to store exported CA bundle in + :return: The full filesystem path to the .ca-bundle file, or False on error """ hours_to_cache = 7 * 24 - platform = sys.platform debug = settings.get('debug') ca_path = False - if platform == 'win32' or platform == 'darwin': - ensure_ca_bundle_dir() - ca_path, _ = trust_list._ca_path(ca_bundle_dir) - - exists = os.path.exists(ca_path) - is_empty = False - is_old = False - if exists: - stats = os.stat(ca_path) - is_empty = stats.st_size == 0 - # The bundle is old if it is a week or more out of date - is_old = stats.st_mtime < time.time() - (hours_to_cache * 60 * 60) - - if not exists or is_empty or is_old: - cert_callback = None - if debug: + if sys.platform == 'win32' or sys.platform == 'darwin': + if trust_list is not None: + ca_path, _ = trust_list._ca_path(ca_bundle_dir) + + if trust_list._cached_path_needs_update(ca_path, hours_to_cache): + cert_callback = None + if debug: + console_write( + ''' + Generating new CA bundle from system keychain + ''' + ) + cert_callback = print_cert_subject + + try: + trust_list.get_path(ca_bundle_dir, hours_to_cache, cert_callback) + if debug: + console_write( + ''' + Finished generating new CA bundle at %s (%d bytes) + ''', + (ca_path, os.stat(ca_path).st_size) + ) + + except (CACertsError, OSError) as e: + ca_path = False + if debug: + console_write( + ''' + Failed to generate new CA bundle. %s + ''', + e + ) + + elif debug: console_write( - u''' - Generating new CA bundle from system keychain ''' - ) - cert_callback = print_cert_subject - trust_list.get_path(ca_bundle_dir, hours_to_cache, cert_callback=cert_callback) - if debug: - console_write( - u''' - Finished generating new CA bundle at %s (%d bytes) + Found previously exported CA bundle at %s (%d bytes) ''', (ca_path, os.stat(ca_path).st_size) ) elif debug: console_write( - u''' - Found previously exported CA bundle at %s (%d bytes) + ''' + Unable to generate system CA bundle - oscrypto not available! ''', - (ca_path, os.stat(ca_path).st_size) ) # Linux @@ -190,43 +233,67 @@ def get_system_ca_bundle_path(settings): '/usr/local/share/certs/ca-root-nss.crt', '/etc/ssl/cert.pem' ] - # First try SSL_CERT_FILE - if 'SSL_CERT_FILE' in os.environ: - paths.insert(0, os.environ['SSL_CERT_FILE']) + + # Prepend SSL_CERT_FILE only, if it doesn't match ST4's certifi CA bundle. + # Otherwise we'd never pick up any OS level CA bundle. + ssl_cert_file = os.environ.get('SSL_CERT_FILE') + if ssl_cert_file and not (certifi and os.path.samefile(ssl_cert_file, certifi.where())): + paths.insert(0, ssl_cert_file) + for path in paths: - if os.path.exists(path) and os.path.getsize(path) > 0: + if os.path.isfile(path) and os.path.getsize(path) > MIN_BUNDLE_SIZE: ca_path = path break - if debug and ca_path: + if debug: + if ca_path: + console_write( + ''' + Found system CA bundle at %s (%d bytes) + ''', + (ca_path, os.stat(ca_path).st_size) + ) + else: + console_write( + ''' + Failed to find system CA bundle. + ''' + ) + + if ca_path is False and certifi is not None: + ca_path = certifi.where() + if debug: console_write( - u''' - Found system CA bundle at %s (%d bytes) + ''' + Using CA bundle from "certifi %s" instead. ''', - (ca_path, os.stat(ca_path).st_size) + certifi.__version__ ) return ca_path -def ensure_ca_bundle_dir(): +def get_user_ca_bundle_path(settings): """ - Make sure we have a placed to save the merged-ca-bundle and system-ca-bundle + Return the path to the user CA bundle, ensuring the file exists + + :param settings: + A dict to look in for `debug` + + :return: + The full filesystem path to the .user-ca-bundle file, or False on error """ - # If the sublime module is available, we bind this value at run time - # since the sublime.packages_path() is not available at import time - global ca_bundle_dir - global user_ca_bundle_dir + user_ca_bundle = os.path.join(sys_path.user_config_dir(), 'Package Control.user-ca-bundle') + try: + open(user_ca_bundle, 'xb').close() + if settings.get('debug'): + console_write('Created blank user CA bundle') + except FileExistsError: + pass + except OSError as e: + user_ca_bundle = False + if settings.get('debug'): + console_write('Unable to create blank user CA bundle - %s', e) - if not ca_bundle_dir: - ca_bundle_dir = pc_cache_dir() - if not user_ca_bundle_dir: - user_ca_bundle_dir = user_config_dir() - if not os.path.exists(ca_bundle_dir): - try: - os.mkdir(ca_bundle_dir) - except EnvironmentError: - ca_bundle_dir = '/var/tmp/package_control' - if not os.path.exists(ca_bundle_dir): - os.mkdir(ca_bundle_dir) + return user_ca_bundle diff --git a/app/lib/package_control/cache.py b/app/lib/package_control/cache.py index 5ea1b9b..1d84540 100644 --- a/app/lib/package_control/cache.py +++ b/app/lib/package_control/cache.py @@ -28,7 +28,7 @@ def get_cache(key, default=None): struct = _channel_repository_cache.get(key, {}) expires = struct.get('expires') if expires and expires > time.time(): - return struct.get('data') + return struct.get('data', default) return default diff --git a/app/lib/package_control/clients/bitbucket_client.py b/app/lib/package_control/clients/bitbucket_client.py index 0fb7ed5..4d61f30 100644 --- a/app/lib/package_control/clients/bitbucket_client.py +++ b/app/lib/package_control/clients/bitbucket_client.py @@ -1,13 +1,10 @@ import re +from urllib.parse import urlencode, quote -from ..versions import version_sort, version_process +from ..downloaders.downloader_exception import DownloaderException +from ..package_version import version_match_prefix from .json_api_client import JSONApiClient -try: - from urllib import quote -except (ImportError): - from urllib.parse import quote - # A predefined list of readme filenames to look for _readme_filenames = [ @@ -25,44 +22,52 @@ class BitBucketClient(JSONApiClient): - def make_tags_url(self, repo): + @staticmethod + def user_repo_branch(url): """ - Generate the tags URL for a BitBucket repo if the value passed is a BitBucket - repository URL + Extract the username, repo and branch name from the URL - :param repo: - The repository URL + :param url: + The URL to extract the info from, in one of the forms: + https://bitbucket.org/{user} + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}.git + https://bitbucket.org/{user}/{repo}/src/{branch} :return: - The tags URL if repo was a BitBucket repo, otherwise False + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) - if not match: - return False + match = re.match( + r'^https?://bitbucket\.org/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/src/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) + if match: + return match.groups() - return 'https://bitbucket.org/%s#tags' % match.group(1) + return (None, None, None) - def make_branch_url(self, repo, branch): + @staticmethod + def repo_url(user_name, repo_name): """ - Generate the branch URL for a BitBucket repo if the value passed is a BitBucket + Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL - :param repo: - The repository URL + :param owener_name: + The repository owner name - :param branch: - The branch name + :param repo_name: + The repository name :return: - The branch URL if repo was a BitBucket repo, otherwise False + The repository URL of given owner and repo name """ - match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) - if not match: - return False - - return 'https://bitbucket.org/%s/src/%s' % (match.group(1), quote(branch)) + return 'https://bitbucket.com/%s/%s' % (quote(user_name), quote(repo_name)) def download_info(self, url, tag_prefix=None): """ @@ -77,7 +82,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -91,56 +97,129 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://bitbucket.org/([^/]+/[^#/]+)/?#tags$', url) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - version = None - url_pattern = 'https://bitbucket.org/%s/get/%s.zip' + def download_info_from_branch(self, url, default_branch=None): + """ + Retrieve information about downloading a package - output = [] - if tags_match: - user_repo = tags_match.group(1) + :param url: + The URL of the repository, in one of the forms: + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/src/{branch} + + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None + + user_repo = "%s/%s" % (user_name, repo_name) + + if branch is None: + branch = default_branch + if branch is None: + repo_info = self.fetch_json(self._api_url(user_repo)) + branch = repo_info['mainbranch'].get('name', 'master') + + branch_url = self._api_url(user_repo, '/refs/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) + + timestamp = branch_info['target']['date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) + + return [self._make_download_info(user_repo, branch, version, timestamp)] + + def download_info_from_releases(self, url, asset_templates, tag_prefix=None): + """ + BitBucket doesn't support releases in ways GitHub/Gitlab do. + + It supports download assets, but those are not bound to tags or releases. + + Version information could be extracted from file names, + but that's not how PC evaluates download assets, currently. + """ + + return None + + def download_info_from_tags(self, url, tag_prefix=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://bitbucket.org/{user}/{repo} + https://bitbucket.org/{user}/{repo}/#tags + Grabs the info from the newest tag(s) that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response - tags_list = {} - tags_url = self._make_api_url(user_repo, '/refs/tags?pagelen=100') + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + tags_match = re.match(r'https?://bitbucket\.org/([^/#?]+/[^/#?]+)/?(?:#tags)?$', url) + if not tags_match: + return None + + def _get_releases(user_repo, tag_prefix, page_size=100): + used_versions = set() + query_string = urlencode({'pagelen': page_size}) + tags_url = self._api_url(user_repo, '/refs/tags?%s' % query_string) while tags_url: tags_json = self.fetch_json(tags_url) for tag in tags_json['values']: - tags_list[tag['name']] = tag['target']['date'][0:19].replace('T', ' ') - tags_url = tags_json['next'] if 'next' in tags_json else None - - tag_info = version_process(tags_list.keys(), tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False - - used_versions = {} - for info in tag_info: - version = info['version'] - if version in used_versions: - continue - tag = info['prefix'] + version - output.append({ - 'url': url_pattern % (user_repo, tag), - 'version': version, - 'date': tags_list[tag] - }) - used_versions[version] = True - - else: - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo - - branch_url = self._make_api_url(user_repo, '/refs/branches/%s' % branch) - branch_info = self.fetch_json(branch_url) - - timestamp = branch_info['target']['date'][0:19].replace('T', ' ') - - output.append({ - 'url': url_pattern % (user_repo, branch), - 'version': re.sub(r'[\-: ]', '.', timestamp), - 'date': timestamp - }) + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield ( + version, + tag['name'], + tag['target']['date'][0:19].replace('T', ' ') + ) + + tags_url = tags_json.get('next') + + user_repo = tags_match.group(1) + + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 + + output = [] + for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): + version, tag, timestamp = release + + output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) + + num_releases += version.is_final + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -166,52 +245,87 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - api_url = self._make_api_url(user_repo) + user_repo = "%s/%s" % (user_name, repo_name) + api_url = self._api_url(user_repo) + repo_info = self.fetch_json(api_url) - info = self.fetch_json(api_url) + if branch is None: + branch = repo_info['mainbranch'].get('name', 'master') - issues_url = u'https://bitbucket.org/%s/issues' % user_repo + issues_url = 'https://bitbucket.org/%s/issues' % user_repo - author = info['owner'].get('nickname') + author = repo_info['owner'].get('nickname') if author is None: - author = info['owner'].get('username') + author = repo_info['owner'].get('username') + + is_client = self.settings.get('min_api_calls', False) + readme_url = None if is_client else self._readme_url(user_repo, branch) return { - 'name': info['name'], - 'description': info['description'] or 'No description provided', - 'homepage': info['website'] or url, + 'name': repo_info['name'], + 'description': repo_info['description'] or 'No description provided', + 'homepage': repo_info['website'] or url, 'author': author, 'donate': None, - 'readme': self._readme_url(user_repo, branch), - 'issues': issues_url if info['has_issues'] else None + 'readme': readme_url, + 'issues': issues_url if repo_info['has_issues'] else None, + 'default_branch': branch } - def _main_branch_name(self, user_repo): + def user_info(self, url): + """ + For API compatibility with other clients. + + :param url: + The URL to the repository, in one of the forms: + https://bitbucket.org/{user} + + :return: + None """ - Fetch the name of the default branch + return None + + def _make_download_info(self, user_repo, ref_name, version, timestamp): + """ + Generate a download_info record :param user_repo: - The user/repo name to get the main branch for + The user/repo of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created :raises: DownloaderException: when there is an error downloading ClientException: when there is an error parsing the response :return: - The name of the main branch - `master` or `default` + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - main_branch_url = self._make_api_url(user_repo) - main_branch_info = self.fetch_json(main_branch_url, True) - return main_branch_info['mainbranch']['name'] + return { + 'url': 'https://bitbucket.org/%s/get/%s.zip' % (user_repo, ref_name), + 'version': version, + 'date': timestamp + } - def _make_api_url(self, user_repo, suffix=''): + def _api_url(self, user_repo, suffix=''): """ Generate a URL for the BitBucket API @@ -249,48 +363,20 @@ def _readme_url(self, user_repo, branch, prefer_cached=False): The URL to the readme file, or None """ - listing_url = self._make_api_url(user_repo, '/src/%s/?pagelen=100' % branch) + listing_url = self._api_url(user_repo, '/src/%s/?pagelen=100' % branch) - while listing_url: - root_dir_info = self.fetch_json(listing_url, prefer_cached) + try: + while listing_url: + root_dir_info = self.fetch_json(listing_url, prefer_cached) - for entry in root_dir_info['values']: - if entry['path'].lower() in _readme_filenames: - return 'https://bitbucket.org/%s/raw/%s/%s' % (user_repo, branch, entry['path']) + for entry in root_dir_info['values']: + if entry['path'].lower() in _readme_filenames: + return 'https://bitbucket.org/%s/raw/%s/%s' % (user_repo, branch, entry['path']) - listing_url = root_dir_info['next'] if 'next' in root_dir_info else None + listing_url = root_dir_info['next'] if 'next' in root_dir_info else None - return None - - def _user_repo_branch(self, url): - """ - Extract the username/repo and branch name from the URL + except (DownloaderException) as e: + if 'HTTP error 404' not in str(e): + raise - :param url: - The URL to extract the info from, in one of the forms: - https://bitbucket.org/{user}/{repo} - https://bitbucket.org/{user}/{repo}/src/{branch} - - :raises: - DownloaderException: when there is an error downloading - ClientException: when there is an error parsing the response - - :return: - A tuple of (user/repo, branch name) or (None, None) if not matching - """ - - repo_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/?$', url) - branch_match = re.match('https?://bitbucket.org/([^/]+/[^/]+)/src/([^/]+)/?$', url) - - if repo_match: - user_repo = repo_match.group(1) - branch = self._main_branch_name(user_repo) - - elif branch_match: - user_repo = branch_match.group(1) - branch = branch_match.group(2) - - else: - return (None, None) - - return (user_repo, branch) + return None diff --git a/app/lib/package_control/clients/client_exception.py b/app/lib/package_control/clients/client_exception.py index da90c1c..5fcbb76 100644 --- a/app/lib/package_control/clients/client_exception.py +++ b/app/lib/package_control/clients/client_exception.py @@ -1,17 +1,3 @@ -import sys - - class ClientException(Exception): """If a client could not fetch information""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/clients/github_client.py b/app/lib/package_control/clients/github_client.py index cf5b57a..9f66bb7 100644 --- a/app/lib/package_control/clients/github_client.py +++ b/app/lib/package_control/clients/github_client.py @@ -1,59 +1,58 @@ import re +from urllib.parse import urlencode, quote -try: - # Python 3 - from urllib.parse import urlencode, quote - str_cls = str -except (ImportError): - # Python 2 - from urllib import urlencode, quote - str_cls = unicode # noqa - -from ..versions import version_sort, version_process -from .json_api_client import JSONApiClient from ..downloaders.downloader_exception import DownloaderException +from ..package_version import version_match_prefix +from .json_api_client import JSONApiClient class GitHubClient(JSONApiClient): - def make_tags_url(self, repo): + @staticmethod + def user_repo_branch(url): """ - Generate the tags URL for a GitHub repo if the value passed is a GitHub - repository URL + Extract the username, repo and branch name from the URL - :param repo: - The repository URL + :param url: + The URL to extract the info from, in one of the forms: + https://github.com/{user} + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}.git + https://github.com/{user}/{repo}/tree/{branch} :return: - The tags URL if repo was a GitHub repo, otherwise False + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. """ + match = re.match( + r'^https?://github\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/tree/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) + if match: + return match.groups() - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo) - if not match: - return False + return (None, None, None) - return 'https://github.com/%s/tags' % match.group(1) - - def make_branch_url(self, repo, branch): + @staticmethod + def repo_url(user_name, repo_name): """ - Generate the branch URL for a GitHub repo if the value passed is a GitHub + Generate the tags URL for a GitHub repo if the value passed is a GitHub repository URL - :param repo: - The repository URL + :param owener_name: + The repository owner name - :param branch: - The branch name + :param repo_name: + The repository name :return: - The branch URL if repo was a GitHub repo, otherwise False + The repository URL of given owner and repo name """ - match = re.match('https?://github.com/([^/]+/[^/]+)/?$', repo) - if not match: - return False - - return 'https://github.com/%s/tree/%s' % (match.group(1), quote(branch)) + return 'https://github.com/%s/%s' % (quote(user_name), quote(repo_name)) def download_info(self, url, tag_prefix=None): """ @@ -68,7 +67,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -82,61 +82,281 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://github.com/([^/]+/[^/]+)/tags/?$', url) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - version = None - url_pattern = 'https://codeload.github.com/%s/zip/%s' + def download_info_from_branch(self, url, default_branch=None): + """ + Retrieve information about downloading a package - output = [] - if tags_match: - user_repo = tags_match.group(1) - tags_url = self._make_api_url(user_repo, '/tags?per_page=100') - tags_list = self.fetch_json(tags_url) - tags = [tag['name'] for tag in tags_list] - tag_info = version_process(tags, tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False - - used_versions = {} - for info in tag_info: - version = info['version'] - if version in used_versions: - continue - tag = info['prefix'] + version - output.append({ - 'url': url_pattern % (user_repo, tag), - 'commit': tag, - 'version': version - }) - used_versions[version] = True - - else: - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo + :param url: + The URL of the repository, in one of the forms: + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/tree/{branch} + + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None + user_repo = "%s/%s" % (user_name, repo_name) + + if branch is None: + branch = default_branch if branch is None: - repo_info = self.fetch_json(self._make_api_url(user_repo)) + repo_info = self.fetch_json(self._api_url(user_repo)) branch = repo_info.get('default_branch', 'master') - output.append({ - 'url': url_pattern % (user_repo, branch), - 'commit': branch - }) + branch_url = self._api_url(user_repo, '/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) + + timestamp = branch_info['commit']['commit']['committer']['date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) + + return [self._make_download_info(user_repo, branch, version, timestamp)] + + def download_info_from_releases(self, url, asset_templates, tag_prefix=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/releases + Grabs the info from the newest tag(s) that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :param asset_templates: + A list of tuples of asset template and download_info. + + [ + ( + "Name-${version}-st${st_build}-*-x??.sublime", + { + "platforms": ["windows-x64"], + "python_versions": ["3.3", "3.8"], + "sublime_text": ">=4107" + } + ) + ] + + Supported globs: + + * : any number of characters + ? : single character placeholder + + Supported variables are: + + ${platform} + A platform-arch string as given in "platforms" list. + A separate explicit release is evaluated for each platform. + If "platforms": ['*'] is specified, variable is set to "any". + + ${py_version} + Major and minor part of required python version without period. + One of "33", "38" or any other valid python version supported by ST. + + ${st_build} + Value of "st_specifier" stripped by leading operator + "*" => "any" + ">=4107" => "4107" + "<4107" => "4107" + "4107 - 4126" => "4107" + + ${version} + Resolved semver without tag prefix + (e.g.: tag st4107-1.0.5 => version 1.0.5) + + Note: is not replaced by this method, but by the ``ClientProvider``. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + + - `version` - the version number of the download + - `url` - the download URL of a zip file of the package + - `date` - the ISO-8601 timestamp string when the version was published + - `platforms` - list of unicode strings with compatible platforms + - `python_versions` - list of compatible python versions + - `sublime_text` - sublime text version specifier + + Example: + + ```py + [ + { + "url": "https://server.com/file.zip", + "version": "1.0.0", + "date": "2023-10-21 12:00:00", + "platforms": ["windows-x64"], + "python_versions": ["3.8"], + "sublime_text": ">=4107" + }, + ... + ] + ``` + """ + + match = re.match(r'https?://github\.com/([^/#?]+/[^/#?]+)(?:/releases)?/?$', url) + if not match: + return None + + def _get_releases(user_repo, tag_prefix=None, page_size=1000): + used_versions = set() + for page in range(10): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + api_url = self._api_url(user_repo, '/releases?%s' % query_string) + releases = self.fetch_json(api_url) + + for release in releases: + if release['draft']: + continue + version = version_match_prefix(release['tag_name'], tag_prefix) + if not version or version in used_versions: + continue + + used_versions.add(version) + + yield ( + version, + release['published_at'][0:19].replace('T', ' '), + [ + ((a['label'], a['browser_download_url'])) + for a in release['assets'] + if a['state'] == 'uploaded' + ] + ) + + if len(releases) < page_size: + return + + asset_templates = self._expand_asset_variables(asset_templates) + + user_repo = match.group(1) + max_releases = self.settings.get('max_releases', 0) + num_releases = [0] * len(asset_templates) + + output = [] + + for release in _get_releases(user_repo, tag_prefix): + version, timestamp, assets = release + + version_string = str(version) + + for idx, (pattern, selectors) in enumerate(asset_templates): + if max_releases > 0 and num_releases[idx] >= max_releases: + continue + + pattern = pattern.replace('${version}', version_string) + pattern = pattern.replace('.', r'\.') + pattern = pattern.replace('?', r'.') + pattern = pattern.replace('*', r'.*?') + regex = re.compile(pattern) + + for asset_name, asset_url in assets: + if not regex.match(asset_name): + continue - for release in output: - query_string = urlencode({'sha': release['commit'], 'per_page': 1}) - commit_url = self._make_api_url(user_repo, '/commits?%s' % query_string) - commit_info = self.fetch_json(commit_url) + info = {'url': asset_url, 'version': version_string, 'date': timestamp} + info.update(selectors) + output.append(info) + num_releases[idx] += version.is_final + break - timestamp = commit_info[0]['commit']['committer']['date'][0:19].replace('T', ' ') + if max_releases > 0 and min(num_releases) >= max_releases: + break - if 'version' not in release: - release['version'] = re.sub(r'[\-: ]', '.', timestamp) - release['date'] = timestamp + return output - del release['commit'] + def download_info_from_tags(self, url, tag_prefix=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://github.com/{user}/{repo} + https://github.com/{user}/{repo}/tags + Grabs the info from the newest tag(s) that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + tags_match = re.match(r'https?://github\.com/([^/#?]+/[^/#?]+)(?:/tags)?/?$', url) + if not tags_match: + return None + + def _get_releases(user_repo, tag_prefix=None, page_size=1000): + used_versions = set() + for page in range(10): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + tags_url = self._api_url(user_repo, '/tags?%s' % query_string) + tags_json = self.fetch_json(tags_url) + + for tag in tags_json: + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield (version, tag['name'], tag['commit']['url']) + + if len(tags_json) < page_size: + return + + user_repo = tags_match.group(1) + is_client = self.settings.get('min_api_calls', False) + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 + + output = [] + for release in sorted(_get_releases(user_repo, tag_prefix), reverse=True): + version, tag, tag_url = release + + if is_client: + timestamp = '1970-01-01 00:00:00' + else: + tag_info = self.fetch_json(tag_url) + timestamp = tag_info['commit']['committer']['date'][0:19].replace('T', ' ') + + output.append(self._make_download_info(user_repo, tag, str(version), timestamp)) + + num_releases += version.is_final + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -162,28 +382,21 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_branch(url) - if not user_repo: - return user_repo + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None - api_url = self._make_api_url(user_repo) + user_repo = "%s/%s" % (user_name, repo_name) + api_url = self._api_url(user_repo) + repo_info = self.fetch_json(api_url) - info = self.fetch_json(api_url) if branch is None: - branch = info.get('default_branch', 'master') - - output = self._extract_repo_info(info) - output['readme'] = None + branch = repo_info.get('default_branch', 'master') - readme_info = self._readme_info(user_repo, branch) - if not readme_info: - return output - - output['readme'] = 'https://raw.githubusercontent.com/%s/%s/%s' % ( - user_repo, branch, readme_info['path']) - return output + return self._extract_repo_info(branch, repo_info) def user_info(self, url): """ @@ -207,9 +420,10 @@ def user_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_match = re.match('https?://github.com/([^/]+)/?$', url) + user_match = re.match(r'https?://github\.com/([^/#?]+)/?$', url) if user_match is None: return None @@ -218,26 +432,18 @@ def user_info(self, url): repos_info = self.fetch_json(api_url) - output = [] - for info in repos_info: - user_repo = '%s/%s' % (user, info['name']) - branch = info.get('default_branch', 'master') - - repo_output = self._extract_repo_info(info) - repo_output['readme'] = None - - readme_info = self._readme_info(user_repo, branch) - if readme_info: - repo_output['readme'] = 'https://raw.githubusercontent.com/%s/%s/%s' % ( - user_repo, branch, readme_info['path']) + return [ + self._extract_repo_info(info.get('default_branch', 'master'), info) + for info in repos_info + ] - output.append(repo_output) - return output - - def _extract_repo_info(self, result): + def _extract_repo_info(self, branch, result): """ Extracts information about a repository from the API result + :param branch: + The branch to return data from + :param result: A dict representing the data returned from the GitHub API @@ -247,22 +453,65 @@ def _extract_repo_info(self, result): `description` `homepage` - URL of the homepage `author` + `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - issues_url = u'https://github.com/%s/%s/issues' % (result['owner']['login'], result['name']) + user_name = result['owner']['login'] + repo_name = result['name'] + user_repo = '%s/%s' % (user_name, repo_name) + + issues_url = None + if result['has_issues']: + issues_url = 'https://github.com/%s/issues' % user_repo return { - 'name': result['name'], + 'name': repo_name, 'description': result['description'] or 'No description provided', 'homepage': result['homepage'] or result['html_url'], - 'author': result['owner']['login'], - 'issues': issues_url if result['has_issues'] else None, - 'donate': None + 'author': user_name, + 'readme': self._readme_url(user_repo, branch), + 'issues': issues_url, + 'donate': None, + 'default_branch': branch + } + + def _make_download_info(self, user_repo, ref_name, version, timestamp): + """ + Generate a download_info record + + :param user_repo: + The user/repo of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + return { + 'url': 'https://codeload.github.com/%s/zip/%s' % (user_repo, ref_name), + 'version': version, + 'date': timestamp } - def _make_api_url(self, user_repo, suffix=''): + def _api_url(self, user_repo, suffix=''): """ Generate a URL for the BitBucket API @@ -278,7 +527,7 @@ def _make_api_url(self, user_repo, suffix=''): return 'https://api.github.com/repos/%s%s' % (user_repo, suffix) - def _readme_info(self, user_repo, branch, prefer_cached=False): + def _readme_url(self, user_repo, branch, prefer_cached=False): """ Fetches the raw GitHub API information about a readme @@ -300,35 +549,15 @@ def _readme_info(self, user_repo, branch, prefer_cached=False): """ query_string = urlencode({'ref': branch}) - readme_url = self._make_api_url(user_repo, '/readme?%s' % query_string) - try: - return self.fetch_json(readme_url, prefer_cached) - except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: - return None - raise - - def _user_repo_branch(self, url): - """ - Extract the username/repo and branch name from the URL + readme_url = self._api_url(user_repo, '/readme?%s' % query_string) - :param url: - The URL to extract the info from, in one of the forms: - https://github.com/{user}/{repo} - https://github.com/{user}/{repo}/tree/{branch} - - :return: - A tuple of (user/repo, branch name) or (None, None) if no match - """ - - branch = None - branch_match = re.match('https?://github.com/[^/]+/[^/]+/tree/([^/]+)/?$', url) - if branch_match is not None: - branch = branch_match.group(1) + try: + readme_file = self.fetch_json(readme_url, prefer_cached).get('path') + if readme_file: + return 'https://raw.githubusercontent.com/%s/%s/%s' % (user_repo, branch, readme_file) - repo_match = re.match('https?://github.com/([^/]+/[^/]+)($|/.*$)', url) - if repo_match is None: - return (None, None) + except (DownloaderException) as e: + if 'HTTP error 404' not in str(e): + raise - user_repo = repo_match.group(1) - return (user_repo, branch) + return None diff --git a/app/lib/package_control/clients/gitlab_client.py b/app/lib/package_control/clients/gitlab_client.py index dc6ddfa..5f289e7 100644 --- a/app/lib/package_control/clients/gitlab_client.py +++ b/app/lib/package_control/clients/gitlab_client.py @@ -1,61 +1,61 @@ import re +from urllib.parse import urlencode, quote from ..downloaders.downloader_exception import DownloaderException -from ..versions import version_process, version_sort +from ..package_version import version_match_prefix from .json_api_client import JSONApiClient -try: - # Python 3 - from urllib.parse import urlencode, quote - - str_cls = str -except (ImportError): - # Python 2 - from urllib import urlencode, quote - - str_cls = unicode # noqa - class GitLabClient(JSONApiClient): - def make_tags_url(self, repo): + + @staticmethod + def user_repo_branch(url): """ - Generate the tags URL for a GitLab repo if the value passed is a GitLab - repository URL + Extract the username, repo and branch name from the URL - :param repo: - The repository URL + :param url: + The URL to extract the info from, in one of the forms: + https://gitlab.com/{user} + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}.git + https://gitlab.com/{user}/{repo}/-/tree/{branch} :return: - The tags URL if repo was a GitLab repo, otherwise False + A tuple of + (user name, repo name, branch name) or + (user name, repo name, None) or + (user name, None, None) or + (None, None, None) if no match. + + The branch name may be a branch name or a commit """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo) - if not match: - return False + match = re.match( + r'^https?://gitlab\.com/([^/#?]+)(?:/([^/#?]+?)(?:\.git|/-/tree/([^#?]*[^/#?])/?|/?)|/?)$', + url + ) + if match: + return match.groups() - return 'https://gitlab.com/%s/-/tags' % match.group(1) + return (None, None, None) - def make_branch_url(self, repo, branch): + @staticmethod + def repo_url(user_name, repo_name): """ - Generate the branch URL for a GitLab repo if the value passed is a GitLab + Generate the tags URL for a GitLab repo if the value passed is a GitLab repository URL - :param repo: - The repository URL + :param owener_name: + The repository owner name - :param branch: - The branch name + :param repo_name: + The repository name :return: - The branch URL if repo was a GitLab repo, otherwise False + The repository URL of given owner and repo name """ - match = re.match('https?://gitlab.com/([^/]+/[^/]+)/?$', repo) - if not match: - return False - - return 'https://gitlab.com/%s/-/tree/%s' % (match.group(1), - quote(branch)) + return 'https://gitlab.com/%s/%s' % (quote(user_name), quote(repo_name)) def download_info(self, url, tag_prefix=None): """ @@ -70,7 +70,8 @@ def download_info(self, url, tag_prefix=None): tag that is a valid semver version. :param tag_prefix: - If the URL is a tags URL, only match tags that have this prefix + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. :raises: DownloaderException: when there is an error downloading @@ -84,94 +85,279 @@ def download_info(self, url, tag_prefix=None): `date` - the ISO-8601 timestamp string when the version was published """ - tags_match = re.match('https?://gitlab.com/([^/]+)/([^/]+)/-/tags/?$', - url) + output = self.download_info_from_branch(url) + if output is None: + output = self.download_info_from_tags(url, tag_prefix) + return output - version = None - url_pattern = 'https://gitlab.com/%s/-/archive/%s/%s-%s.zip' + def download_info_from_branch(self, url, default_branch=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/-/tree/{branch} + + :param default_branch: + The branch to use, in case url is a repo url + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + user_name, repo_name, branch = self.user_repo_branch(url) + if not repo_name: + return None + + repo_id = '%s%%2F%s' % (user_name, repo_name) + + if branch is None: + branch = default_branch + if branch is None: + repo_info = self.fetch_json(self._api_url(repo_id)) + branch = repo_info.get('default_branch', 'master') + + branch_url = self._api_url(repo_id, '/repository/branches/%s' % branch) + branch_info = self.fetch_json(branch_url) + + timestamp = branch_info['commit']['committed_date'][0:19].replace('T', ' ') + version = re.sub(r'[\-: ]', '.', timestamp) + + return [self._make_download_info(user_name, repo_name, branch, version, timestamp)] + + def download_info_from_releases(self, url, asset_templates, tag_prefix=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/-/releases + Grabs the info from the newest tag(s) that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :param asset_templates: + A list of tuples of asset template and download_info. + + [ + ( + "Name-${version}-st${st_build}-*-x??.sublime", + { + "platforms": ["windows-x64"], + "python_versions": ["3.3", "3.8"], + "sublime_text": ">=4107" + } + ) + ] + + Supported globs: + + * : any number of characters + ? : single character placeholder + + Supported variables are: + + ${platform} + A platform-arch string as given in "platforms" list. + A separate explicit release is evaluated for each platform. + If "platforms": ['*'] is specified, variable is set to "any". + + ${py_version} + Major and minor part of required python version without period. + One of "33", "38" or any other valid python version supported by ST. + + ${st_build} + Value of "st_specifier" stripped by leading operator + "*" => "any" + ">=4107" => "4107" + "<4107" => "4107" + "4107 - 4126" => "4107" + + ${version} + Resolved semver without tag prefix + (e.g.: tag st4107-1.0.5 => version 1.0.5) + + Note: is not replaced by this method, but by the ``ClientProvider``. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + + - `version` - the version number of the download + - `url` - the download URL of a zip file of the package + - `date` - the ISO-8601 timestamp string when the version was published + - `platforms` - list of unicode strings with compatible platforms + - `python_versions` - list of compatible python versions + - `sublime_text` - sublime text version specifier + + Example: + + ```py + [ + { + "url": "https://server.com/file.zip", + "version": "1.0.0", + "date": "2023-10-21 12:00:00", + "platforms": ["windows-x64"], + "python_versions": ["3.8"], + "sublime_text": ">=4107" + }, + ... + ] + ``` + """ + + match = re.match(r'https?://gitlab\.com/([^/#?]+)/([^/#?]+)(?:/-/releases)?/?$', url) + if not match: + return None + + def _get_releases(user_repo, tag_prefix=None, page_size=1000): + used_versions = set() + for page in range(10): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + api_url = self._api_url(user_repo, '/releases?%s' % query_string) + releases = self.fetch_json(api_url) + + for release in releases: + version = version_match_prefix(release['tag_name'], tag_prefix) + if not version or version in used_versions: + continue + + used_versions.add(version) + + yield ( + version, + release['released_at'][0:19].replace('T', ' '), + [ + ((a['name'], a['direct_asset_url'])) + for a in release['assets']['links'] + ] + ) + + if len(releases) < page_size: + return + + user_name, repo_name = match.groups() + repo_id = '%s%%2F%s' % (user_name, repo_name) + + asset_templates = self._expand_asset_variables(asset_templates) + + max_releases = self.settings.get('max_releases', 0) + num_releases = [0] * len(asset_templates) output = [] - if tags_match: - (user_id, user_repo_type) = self._extract_user_id(tags_match.group(1)) - repo_id, _ = self._extract_repo_id_default_branch( - user_id, - tags_match.group(2), - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: - return None + for release in _get_releases(repo_id, tag_prefix): + version, timestamp, assets = release - user_repo = '%s/%s' % (tags_match.group(1), tags_match.group(2)) - tags_url = self._make_api_url( - repo_id, - '/repository/tags?per_page=100' - ) - tags_list = self.fetch_json(tags_url) - tags = [tag['name'] for tag in tags_list] - tag_info = version_process(tags, tag_prefix) - tag_info = version_sort(tag_info, reverse=True) - if not tag_info: - return False - - used_versions = {} - for info in tag_info: - version = info['version'] - if version in used_versions: + version_string = str(version) + + for idx, (pattern, selectors) in enumerate(asset_templates): + if max_releases > 0 and num_releases[idx] >= max_releases: continue - tag = info['prefix'] + version - repo_name = user_repo.split('/')[1] - output.append({ - 'url': url_pattern % (user_repo, tag, repo_name, tag), - 'commit': tag, - 'version': version, - }) - used_versions[version] = True - - else: - user_repo, commit = self._user_repo_ref(url) - if not user_repo: - return user_repo - user, repo = user_repo.split('/') - (user_id, user_repo_type) = self._extract_user_id(user) - - repo_id, default_branch = self._extract_repo_id_default_branch( - user_id, - repo, - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: - return None - - if commit is None: - commit = default_branch - - repo_name = user_repo.split('/')[1] - output.append({ - 'url': url_pattern % (user_repo, commit, repo_name, commit), - 'commit': commit - }) - - for release in output: - query_string = urlencode({ - 'ref_name': release['commit'], - 'per_page': 1 - }) - commit_url = self._make_api_url( - repo_id, - '/repository/commits?%s' % query_string - ) - commit_info = self.fetch_json(commit_url) - if not commit_info[0].get('commit'): - timestamp = commit_info[0]['committed_date'][0:19].replace('T', ' ') - else: - timestamp = commit_info[0]['commit']['committed_date'][0:19].replace('T', ' ') - if 'version' not in release: - release['version'] = re.sub(r'[\-: ]', '.', timestamp) - release['date'] = timestamp + pattern = pattern.replace('${version}', version_string) + pattern = pattern.replace('.', r'\.') + pattern = pattern.replace('?', r'.') + pattern = pattern.replace('*', r'.*?') + regex = re.compile(pattern) - del release['commit'] + for asset_name, asset_url in assets: + if not regex.match(asset_name): + continue + + info = {'url': asset_url, 'version': version_string, 'date': timestamp} + info.update(selectors) + output.append(info) + num_releases[idx] += version.is_final + break + + if max_releases > 0 and min(num_releases) >= max_releases: + break + + return output + + def download_info_from_tags(self, url, tag_prefix=None): + """ + Retrieve information about downloading a package + + :param url: + The URL of the repository, in one of the forms: + https://gitlab.com/{user}/{repo} + https://gitlab.com/{user}/{repo}/-/tags + Grabs the info from the newest tag(s) that is a valid semver version. + + :param tag_prefix: + If the URL is a tags URL, only match tags that have this prefix. + If tag_prefix is None, match only tags without prefix. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + None if no match, False if no commit, or a list of dicts with the + following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published + """ + + tags_match = re.match(r'https?://gitlab\.com/([^/#?]+)/([^/#?]+)(?:/-/tags)?/?$', url) + if not tags_match: + return None + + def _get_releases(repo_id, tag_prefix=None, page_size=1000): + used_versions = set() + for page in range(10): + query_string = urlencode({'page': page * page_size, 'per_page': page_size}) + tags_url = self._api_url(repo_id, '/repository/tags?%s' % query_string) + tags_json = self.fetch_json(tags_url) + + for tag in tags_json: + version = version_match_prefix(tag['name'], tag_prefix) + if version and version not in used_versions: + used_versions.add(version) + yield ( + version, + tag['name'], + tag['commit']['committed_date'][0:19].replace('T', ' ') + ) + + if len(tags_json) < page_size: + return + + user_name, repo_name = tags_match.groups() + repo_id = '%s%%2F%s' % (user_name, repo_name) + + max_releases = self.settings.get('max_releases', 0) + num_releases = 0 + + output = [] + for release in sorted(_get_releases(repo_id, tag_prefix), reverse=True): + version, tag, timestamp = release + + output.append(self._make_download_info(user_name, repo_name, tag, str(version), timestamp)) + + num_releases += version.is_final + if max_releases > 0 and num_releases >= max_releases: + break return output @@ -182,9 +368,11 @@ def repo_info(self, url): The URL to the repository, in one of the forms: https://gitlab.com/{user}/{repo} https://gitlab.com/{user}/{repo}/-/tree/{branch} + :raises: DownloaderException: when there is an error downloading ClientException: when there is an error parsing the response + :return: None if no match, or a dict with the following keys: `name` @@ -194,41 +382,21 @@ def repo_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_repo, branch = self._user_repo_ref(url) - if not user_repo: - return user_repo - - user, repo = user_repo.split('/') - - (user_id, user_repo_type) = self._extract_user_id(user) - - repo_id, default_branch = self._extract_repo_id_default_branch( - user_id, - repo, - 'users' if user_repo_type else 'groups' - ) - if repo_id is None: + user_name, repo_name, branch = self.user_repo_branch(url) + if not user_name or not repo_name: return None - if branch is None: - branch = default_branch - - api_url = self._make_api_url(repo_id) - info = self.fetch_json(api_url) + repo_id = '%s%%2F%s' % (user_name, repo_name) + repo_url = self._api_url(repo_id) + repo_info = self.fetch_json(repo_url) - output = self._extract_repo_info(info) + if not branch: + branch = repo_info.get('default_branch', 'master') - if not output['readme']: - return output - - output['readme'] = 'https://gitlab.com/%s/-/%s/%s' % ( - user_repo, - branch, - output['readme'].split('/')[-1], - ) - return output + return self._extract_repo_info(branch, repo_info) def user_info(self, url): """ @@ -252,40 +420,33 @@ def user_info(self, url): `readme` - URL of the readme `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ - user_match = re.match('https?://gitlab.com/([^/]+)/?$', url) + user_match = re.match(r'https?://gitlab\.com/([^/#?]+)/?$', url) if user_match is None: return None user = user_match.group(1) - (user_id, user_repo_type) = self._extract_user_id(user) + user_id, user_repo_type = self._extract_user_id(user) api_url = 'https://gitlab.com/api/v4/%s/%s/projects' % ( 'users' if user_repo_type else 'groups', user_id) repos_info = self.fetch_json(api_url) - output = [] - for info in repos_info: - user_repo = '%s/%s' % (user, info['name']) - branch = info['default_branch'] - - repo_output = self._extract_repo_info(info) - - if repo_output['readme']: - repo_output['readme'] = 'https://gitlab.com/%s/-/raw/%s/%s' % ( - user_repo, - branch, - repo_output['readme'].split('/')[-1], - ) - output.append(repo_output) - return output + return [ + self._extract_repo_info(info.get('default_branch', 'master'), info) + for info in repos_info + ] - def _extract_repo_info(self, result): + def _extract_repo_info(self, branch, result): """ Extracts information about a repository from the API result + :param branch: + The branch to return data from + :param result: A dict representing the data returned from the GitLab API @@ -295,67 +456,85 @@ def _extract_repo_info(self, result): `description` `homepage` - URL of the homepage `author` + `readme` - URL of the homepage `issues` - URL of bug tracker `donate` - URL of a donate page + `default_branch` """ + user_name = result['owner']['username'] if result.get('owner') else result['namespace']['name'] + repo_name = result['name'] + user_repo = '%s/%s' % (user_name, repo_name) + + readme_url = None + if result['readme_url']: + readme_url = 'https://gitlab.com/%s/-/raw/%s/%s' % ( + user_repo, branch, result['readme_url'].split('/')[-1] + ) + return { - 'name': result['name'], + 'name': repo_name, 'description': result['description'] or 'No description provided', 'homepage': result['web_url'] or None, - 'readme': result['readme_url'] if result['readme_url'] else None, - 'author': result['owner']['username'] if result.get('owner') else result['namespace']['name'], + 'author': user_name, + 'readme': readme_url, 'issues': result.get('issues', None) if result.get('_links') else None, 'donate': None, + 'default_branch': branch } - def _make_api_url(self, project_id, suffix=''): + def _make_download_info(self, user_name, repo_name, ref_name, version, timestamp): """ - Generate a URL for the GitLab API + Generate a download_info record - :param user_repo: - The user/repo of the repository + :param user_name: + The owner of the repository - :param suffix: - The extra API path info to add to the URL + :param repo_name: + The name of the repository + + :param ref_name: + The git reference (branch, commit, tag) + + :param version: + The prefixed version to add to the record + + :param timestamp: + The timestamp the revision was created + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response :return: - The API URL + A dictionary with following keys: + `version` - the version number of the download + `url` - the download URL of a zip file of the package + `date` - the ISO-8601 timestamp string when the version was published """ - return 'https://gitlab.com/api/v4/projects/%s%s' % (project_id, suffix) + return { + 'url': 'https://gitlab.com/%s/%s/-/archive/%s/%s-%s.zip' % ( + user_name, repo_name, ref_name, repo_name, ref_name), + 'version': version, + 'date': timestamp + } - def _user_repo_ref(self, url): + def _api_url(self, project_id, suffix=''): """ - Extract the username/repo and ref name from the URL + Generate a URL for the GitLab API - :param url: - The URL to extract the info from, in one of the forms: - https://gitlab.com/{user}/{repo} - https://gitlab.com/{user}/{repo}/-/tree/{ref} + :param user_repo: + The user/repo of the repository + + :param suffix: + The extra API path info to add to the URL :return: - A tuple of (user/repo, ref name) or (None, None) if no match. - The ref name may be a branch name or a commit + The API URL """ - branch = None - branch_match = re.match( - r'https?://gitlab.com/[^/]+/[^/]+/-/tree/([^/]+)/?$', - url - ) - if branch_match is not None: - branch = branch_match.group(1) - - repo_match = re.match( - r'https?://gitlab.com/([^/]+/[^/]+)($|/.*$)', - url - ) - if repo_match is None: - return (None, None) - - user_repo = repo_match.group(1) - return (user_repo, branch) + return 'https://gitlab.com/api/v4/projects/%s%s' % (project_id, suffix) def _extract_user_id(self, username): """ @@ -372,7 +551,7 @@ def _extract_user_id(self, username): try: repos_info = self.fetch_json(user_url) except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: + if str(e).find('HTTP error 404') != -1: return self._extract_group_id(username) raise @@ -396,7 +575,7 @@ def _extract_group_id(self, group_name): try: repos_info = self.fetch_json(group_url) except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: + if str(e).find('HTTP error 404') != -1: return (None, None) raise @@ -404,37 +583,3 @@ def _extract_group_id(self, group_name): return (None, None) return (repos_info[0]['id'], False) - - def _extract_repo_id_default_branch(self, user_id, repo_name, repo_type): - """ - Extract the repo id from the repo results - - :param user_id: - The user_id of the user who owns the repo - - :param repo_name: - The name of the repository - - :param repo_type: - A string "users" or "groups", based on the user_id being from a - user or a group - - :return: - A 2-element tuple, (repo_id, default_branch) or (None, None) if no match - """ - - user_url = 'https://gitlab.com/api/v4/%s/%s/projects' % (repo_type, user_id) - try: - repos_info = self.fetch_json(user_url) - except (DownloaderException) as e: - if str_cls(e).find('HTTP error 404') != -1: - return (None, None) - raise - - repo_info = next( - (repo for repo in repos_info if repo['name'].lower() == repo_name.lower()), None) - - if not repo_info: - return (None, None) - - return (repo_info['id'], repo_info['default_branch']) diff --git a/app/lib/package_control/clients/json_api_client.py b/app/lib/package_control/clients/json_api_client.py index 38fa88b..0dd9cfd 100644 --- a/app/lib/package_control/clients/json_api_client.py +++ b/app/lib/package_control/clients/json_api_client.py @@ -1,18 +1,11 @@ import json - -try: - # Python 3 - from urllib.parse import urlencode, urlparse -except (ImportError): - # Python 2 - from urllib import urlencode - from urlparse import urlparse +from urllib.parse import urlencode, urlparse from .client_exception import ClientException -from ..download_manager import downloader +from ..download_manager import http_get -class JSONApiClient(): +class JSONApiClient: def __init__(self, settings): self.settings = settings @@ -27,7 +20,11 @@ def fetch(self, url, prefer_cached=False): :param prefer_cached: If a cached copy of the content is preferred - :return: The bytes/string + :raises: + DownloaderException: when there is an error downloading + + :return: + The bytes/string """ # If there are extra params for the domain name, add them @@ -38,9 +35,7 @@ def fetch(self, url, prefer_cached=False): joiner = '?%s' if url.find('?') == -1 else '&%s' url += joiner % params - with downloader(url, self.settings) as manager: - content = manager.fetch(url, 'Error downloading repository.', prefer_cached) - return content + return http_get(url, self.settings, 'Error downloading repository.', prefer_cached) def fetch_json(self, url, prefer_cached=False): """ @@ -52,7 +47,11 @@ def fetch_json(self, url, prefer_cached=False): :param prefer_cached: If a cached copy of the JSON is preferred - :return: A dict or list from the JSON + :raises: + ClientException: when there is an error parsing the response + + :return: + A dict or list from the JSON """ repository_json = self.fetch(url, prefer_cached) @@ -60,5 +59,112 @@ def fetch_json(self, url, prefer_cached=False): try: return json.loads(repository_json.decode('utf-8')) except (ValueError): - error_string = u'Error parsing JSON from URL %s.' % url + error_string = 'Error parsing JSON from URL %s.' % url raise ClientException(error_string) + + @staticmethod + def _expand_asset_variables(asset_templates): + """ + Expands the asset variables. + + Note: ``${version}`` is not replaced. + + :param asset_templates: + A list of tuples of asset template and download_info. + + ```py + [ + ( + "Name-${version}-py${py_version}-*-x??.whl", + { + "platforms": ["windows-x64"], + "python_versions": ["3.3", "3.8"], + "sublime_text": ">=4107" + } + ) + ] + ``` + + Supported variables are: + + ``${platform}`` + A platform-arch string as given in "platforms" list. + A separate explicit release is evaluated for each platform. + If "platforms": ['*'] is specified, variable is set to "any". + + ``${py_version}`` + Major and minor part of required python version without period. + One of "33", "38" or any other valid python version supported by ST. + + ``${st_build}`` + Value of "st_specifier" stripped by leading operator + "*" => "any" + ">=4107" => "4107" + "<4107" => "4107" + "4107 - 4126" => "4107" + + :returns: + A list of asset templates with all variables (except ``${version}``) resolved. + + ```py + [ + ( + "Name-${version}-py33-*-x??.whl", + { + "platforms": ["windows-x64"], + "python_versions": ["3.3"], + "sublime_text": ">=4107" + } + ), + ( + "Name-${version}-py33-*-x??.whl", + { + "platforms": ["windows-x64"], + "python_versions": ["3.8"], + "sublime_text": ">=4107" + } + ) + ] + ``` + """ + + output = [] + var = '${st_build}' + for pattern, selectors in asset_templates: + # resolve ${st_build} + if var in pattern: + # convert st_specifier version specifier to build number + st_specifier = selectors['sublime_text'] + if st_specifier == '*': + st_build = 'any' + elif st_specifier[0].isdigit(): + # 4107, 4107 - 4126 + st_build = st_specifier[:4] + elif st_specifier[1].isdigit(): + # <4107, >4107 + st_build = st_specifier[1:] + else: + # ==4107, <=4107, >=4107 + st_build = st_specifier[2:] + + pattern = pattern.replace(var, st_build) + + output.append((pattern, selectors)) + + def resolve(templates, var, key): + for pattern, selectors in templates: + if var not in pattern: + yield (pattern, selectors) + continue + + for value in selectors[key]: + new_selectors = selectors.copy() + new_selectors[key] = [value] + # remove `.` from python versions; n.r. for platforms + yield (pattern.replace(var, value.replace('.', '')), new_selectors) + + return None + + output = resolve(output, '${platform}', 'platforms') + output = resolve(output, '${py_version}', 'python_versions') + return list(output) diff --git a/app/lib/package_control/clients/pypi_client.py b/app/lib/package_control/clients/pypi_client.py new file mode 100644 index 0000000..493c993 --- /dev/null +++ b/app/lib/package_control/clients/pypi_client.py @@ -0,0 +1,290 @@ +import re + +from ..pep440 import PEP440InvalidVersionError +from ..pep440 import PEP440Version +from ..pep440 import PEP440VersionSpecifier + +from .json_api_client import JSONApiClient + + +class PyPiClient(JSONApiClient): + @staticmethod + def name_and_version(url): + match = re.match( + r"^https?://pypi\.org/project/([^/#?]+)(?:/([^/#?]+?)|/?)$", url + ) + if match: + return match.groups() + + return (None, None) + + def repo_info(self, url): + name, _ = self.name_and_version(url) + if not name: + return None + + pypi_url = "https://pypi.org/pypi/{}/json".format(name) + info = self.fetch_json(pypi_url) + + return { + "name": name, + "description": info["summary"], + "homepage": info["home_page"] + or info.get("project_urls", {}).get("Homepage"), + "author": info["author"], + "issues": info["bugtrack_url"] + or info.get("project_urls", {}).get("Issues"), + } + + def download_info(self, url, tag_prefix=None): + """Branch or tag based releases are not supported.""" + return None + + def download_info_from_branch(self, url, default_branch=None): + """Branch or tag based releases are not supported.""" + return None + + def download_info_from_tags(self, url, tag_prefix=None): + """Branch or tag based releases are not supported.""" + return None + + def download_info_from_releases(self, url, asset_templates, tag_prefix=None): + """ + Retrieve information about package + + :param url: + The URL of the repository, in one of the forms: + https://pypi.org/projects/{library_name} + https://pypi.org/projects/{library_name}/{version} + Grabs the info from the newest compatible release(s). + + :param tag_prefix: + unused, present for API compatibility. + + :param asset_templates: + A list of tuples of asset template and download_info. + + ```py + [ + ( + "coverage-${version}-cp33-*-win_amd64*.whl", + { + "platforms": ["windows-x64"], + "python_versions": ["3.3"] + } + ) + ] + ``` + + Supported globs: + + * : any number of characters + ? : single character placeholder + + Supported variables are: + + ${platform} + A platform-arch string as given in "platforms" list. + A separate explicit release is evaluated for each platform. + If "platforms": ["*"] is specified, variable is set to "any". + + ${py_version} + Major and minor part of required python version without period. + One of "33", "38" or any other valid python version supported by ST. + + ${st_build} + Value of "st_specifier" stripped by leading operator + "*" => "any" + ">=4107" => "4107" + "<4107" => "4107" + "4107 - 4126" => "4107" + + ${version} + Resolved semver without tag prefix + (e.g.: tag st4107-1.0.5 => version 1.0.5) + + Note: is not replaced by this method, but by the ``ClientProvider``. + + :raises: + DownloaderException: when there is an error downloading + ClientException: when there is an error parsing the response + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + + - `version` - the version number of the download + - `url` - the download URL of a zip file of the package + - `date` - the ISO-8601 timestamp string when the version was published + - `platforms` - list of unicode strings with compatible platforms + - `python_versions` - list of compatible python versions + - `sublime_text` - sublime text version specifier + + Example: + + ```py + [ + { + "url": "https://files.pythonhosted.org/packages/.../coverage-4.2-cp33-cp33m-win_amd64.whl", + "version": "4.2", + "date": "2016-07-26 21:09:17", + "sha256": "bd4eba631f07cae8cdb9c55c144f165649e6701b962f9d604b4e00cf8802406c", + "platforms": ["windows-x64"], + "python_versions": ["3.3"] + }, + ... + ] + ``` + """ + + name, version = self.name_and_version(url) + if not name: + return None + + if version: + return self._download_info_from_fixed_version( + name, version, asset_templates + ) + + return self._download_info_from_latest_version(name, asset_templates) + + def _download_info_from_fixed_version(self, name, version, asset_templates): + """ + Build download information from fixed version. + + :param name: + The package name + :param version: + The package version + :param asset_templates: + A list of tuples of asset template and download_info. + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + """ + + pypi_url = "https://pypi.org/pypi/{}/{}/json".format(name, version) + assets = self.fetch_json(pypi_url)["urls"] + + asset_templates = self._expand_asset_variables(asset_templates) + + output = [] + for pattern, selectors in asset_templates: + info = self._make_download_info(pattern, selectors, version, assets) + if info: + output.append(info) + + return output + + def _download_info_from_latest_version(self, name, asset_templates): + """ + Build download information from latest compatible versions of each asset template. + + :param name: + The package name + :param version: + The package version + :param asset_templates: + A list of tuples of asset template and download_info. + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + """ + + pypi_url = "https://pypi.org/pypi/{}/json".format(name) + + # fetch dictionary of form `version: [asset, asset]` + releases = self.fetch_json(pypi_url)["releases"] + + # create a list of valid pep440 versions + versions = [] + for version in releases: + try: + versions.append(PEP440Version(version)) + except PEP440InvalidVersionError: + continue + + asset_templates = self._expand_asset_variables(asset_templates) + + max_releases = self.settings.get("max_releases", 0) + num_releases = [0] * len(asset_templates) + + # get latest compatible release for each asset template + output = [] + for version in sorted(versions, reverse=True): + # we don"t want beta releases! + if not version.is_final: + continue + + version_string = str(version) + assets = releases[version_string] + for idx, (pattern, selectors) in enumerate(asset_templates): + if max_releases > 0 and num_releases[idx] >= max_releases: + continue + info = self._make_download_info(pattern, selectors, version_string, assets) + if not info: + continue + output.append(info) + num_releases[idx] += 1 + + if max_releases > 0 and min(num_releases) >= max_releases: + break + + return output + + @staticmethod + def _make_download_info(pattern, selectors, version, assets): + """ + Build download information for given asset template. + + :param pattern: + The glob pattern of a given asset template + :param selectors: + The dictionary of release specification of given asset template from repository.json + :param version: + The package version + :param assets: + A list of dictionaries of asset information downloaded from PyPI. + + :return: + ``None`` if no match, ``False`` if no commit, or a list of dicts with the + following keys: + """ + + pattern = pattern.replace("${version}", version) + pattern = pattern.replace(".", r"\.") + pattern = pattern.replace("?", r".") + pattern = pattern.replace("*", r".*?") + regex = re.compile(pattern) + + python_versions = (PEP440Version(ver) for ver in selectors["python_versions"]) + + for asset in assets: + if asset["packagetype"] != "bdist_wheel": + continue + if asset["yanked"]: + continue + if not regex.match(asset["filename"]): + continue + + specs = asset["requires_python"] + if specs: + specs = ( + PEP440VersionSpecifier(spec) + for spec in asset["requires_python"].split(",") + ) + if not all(ver in spec for spec in specs for ver in python_versions): + continue + + info = { + "url": asset["url"], + "version": version, + "date": asset["upload_time"][0:19].replace("T", " "), + "sha256": asset["digests"]["sha256"], + } + info.update(selectors) + return info + + return None diff --git a/app/lib/package_control/clients/readme_client.py b/app/lib/package_control/clients/readme_client.py index 5edf416..d3f09dc 100644 --- a/app/lib/package_control/clients/readme_client.py +++ b/app/lib/package_control/clients/readme_client.py @@ -1,13 +1,7 @@ import re import os import base64 - -try: - # Python 3 - from urllib.parse import urlencode -except (ImportError): - # Python 2 - from urllib import urlencode +from urllib.parse import urlencode from .json_api_client import JSONApiClient @@ -49,7 +43,7 @@ def readme_info(self, url): # Try to grab the contents of a GitHub-based readme by grabbing the cached # content of the readme API call github_match = re.match( - r'https://raw\.github(?:usercontent)?\.com/([^/]+/[^/]+)/([^/]+)/' + r'https://raw\.github(?:usercontent)?\.com/([^/#?]+/[^/#?]+)/([^/#?]+)/' r'readme(\.(md|mkd|mdown|markdown|textile|creole|rst|txt))?$', url, re.I @@ -69,7 +63,7 @@ def readme_info(self, url): if not contents: contents = self.fetch(url) - basename, ext = os.path.splitext(url) + _, ext = os.path.splitext(url) format = 'txt' ext = ext.lower() if ext in _readme_formats: diff --git a/app/lib/package_control/cmd.py b/app/lib/package_control/cmd.py index 09f7464..ae576fe 100644 --- a/app/lib/package_control/cmd.py +++ b/app/lib/package_control/cmd.py @@ -1,10 +1,8 @@ import os import subprocess import re -import sys from .console_write import console_write -from .unicode import unicode_from_os from .show_error import show_error from . import text @@ -18,13 +16,6 @@ except (ImportError): sublime = None -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - def create_cmd(args, basename_binary=False): """ @@ -50,12 +41,12 @@ def create_cmd(args, basename_binary=False): escaped_args = [] for arg in args: if re.search('^[a-zA-Z0-9/_^\\-\\.:=]+$', arg) is None: - arg = u"'" + arg.replace(u"'", u"'\\''") + u"'" + arg = "'" + arg.replace("'", "'\\''") + "'" escaped_args.append(arg) - return u' '.join(escaped_args) + return ' '.join(escaped_args) -class Cli(object): +class Cli: """ Base class for running command line apps @@ -119,15 +110,13 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal if self.debug: console_write( - u''' + ''' Executing %s [%s] ''', (create_cmd(args), cwd) ) try: - if sys.platform == 'win32' and sys.version_info < (3,): - cwd = cwd.encode('mbcs') proc = subprocess.Popen( args, stdin=subprocess.PIPE, @@ -138,7 +127,7 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal env=os.environ ) - if input and isinstance(input, str_cls): + if input and isinstance(input, str): input = input.encode(encoding) stuck = True @@ -148,6 +137,8 @@ def execute(self, args, cwd, input=None, encoding='utf-8', meaningful_output=Fal is_vcs = True elif re.search('hg', binary_name): is_vcs = True + else: + is_vcs = False if sublime: def kill_proc(): @@ -157,7 +148,7 @@ def kill_proc(): proc.kill() message = text.format( - u''' + ''' The process %s seems to have gotten stuck. Command: %s @@ -168,22 +159,19 @@ def kill_proc(): ) if is_vcs: message += text.format( - u''' + ''' This is likely due to a password or passphrase prompt. Please ensure %s works without a prompt, or change the "ignore_vcs_packages" Package Control setting to true. - - Sublime Text will need to be restarted once these - changes are made. ''', binary_name ) show_error(message) sublime.set_timeout(kill_proc, 60000) - output, _ = proc.communicate(input) + output, error = proc.communicate(input) stuck = False @@ -191,29 +179,29 @@ def kill_proc(): output = output.replace('\r\n', '\n').rstrip(' \n\r') if proc.returncode not in self.ok_returncodes: - if not ignore_errors or re.search(ignore_errors, output) is None: + if error: + error = error.decode(encoding) + error = error.replace('\r\n', '\n').rstrip(' \n\r') + if not ignore_errors or re.search(ignore_errors, error or output) is None: message = text.format( - u''' + ''' Error executing: %s Working directory: %s %s ''', - (create_cmd(args), orig_cwd, output) - ) + (create_cmd(args), orig_cwd, error or output) + ).rstrip() if is_vcs: message += text.format( ''' VCS-based packages can be ignored by changing the "ignore_vcs_packages" setting to true. - - Sublime Text will need to be restarted once the - setting is changed. ''' ) - show_error(message) + console_write(message) return False if meaningful_output and self.debug and len(output) > 0: @@ -223,14 +211,14 @@ def kill_proc(): except (OSError) as e: show_error( - u''' + ''' Error executing: %s %s Try checking your "%s_binary" setting? ''', - (create_cmd(args), unicode_from_os(e), self.cli_name) + (create_cmd(args), str(e), self.cli_name) ) return False @@ -283,7 +271,7 @@ def find_binary(self, name): if self.debug: console_write( - u''' + ''' Looking for %s at: "%s" ''', (self.cli_name, '", "'.join(check_binaries)) @@ -293,7 +281,7 @@ def find_binary(self, name): if os.path.exists(path) and not os.path.isdir(path) and os.access(path, os.X_OK): if self.debug: console_write( - u''' + ''' Found %s at "%s" ''', (self.cli_name, path) @@ -303,7 +291,7 @@ def find_binary(self, name): if self.debug: console_write( - u''' + ''' Could not find %s on your machine ''', self.cli_name diff --git a/app/lib/package_control/console_write.py b/app/lib/package_control/console_write.py index 72822e6..0f29e0b 100644 --- a/app/lib/package_control/console_write.py +++ b/app/lib/package_control/console_write.py @@ -1,12 +1,5 @@ import sys -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - from . import text @@ -31,11 +24,7 @@ def console_write(string, params=None, strip=True, indent=None, prefix=True): If the string "Package Control: " should be prefixed to the string """ - string = text.format(str_cls(string), params, strip=strip, indent=indent) - - if sys.version_info < (3,): - if isinstance(string, str_cls): - string = string.encode('UTF-8') + string = text.format(str(string), params, strip=strip, indent=indent) if prefix: sys.stdout.write('Package Control: ') diff --git a/app/lib/package_control/deps/__init__.py b/app/lib/package_control/deps/__init__.py index e69de29..e313053 100644 --- a/app/lib/package_control/deps/__init__.py +++ b/app/lib/package_control/deps/__init__.py @@ -0,0 +1,28 @@ +import os.path +import sys + +try: + from .oscrypto import use_ctypes, use_openssl + + use_ctypes() + + # On Linux we need to use the version of OpenSSL included with Sublime Text + # to prevent conflicts between two different versions of OpenSSL being + # dynamically linked. On ST3, we can't use oscrypto for OpenSSL stuff since + # it has OpenSSL statically linked, and we can't dlopen() that. + # ST 4081 broke sys.executable to return "sublime_text", but other 4xxx builds + # will contain "plugin_host". + if sys.version_info[:2] == (3, 8) and sys.platform == 'linux' and ( + 'sublime_text' in sys.executable or + 'plugin_host' in sys.executable): + install_dir = os.path.dirname(sys.executable) + try: + use_openssl( + os.path.join(install_dir, 'libcrypto.so.1.1'), + os.path.join(install_dir, 'libssl.so.1.1') + ) + except RuntimeError: + pass # runtime error may be raised, when reloading modules. + +except ImportError: + pass diff --git a/app/lib/package_control/deps/asn1crypto/algos.py b/app/lib/package_control/deps/asn1crypto/algos.py index d49be26..ff05626 100644 --- a/app/lib/package_control/deps/asn1crypto/algos.py +++ b/app/lib/package_control/deps/asn1crypto/algos.py @@ -245,21 +245,46 @@ class SignedDigestAlgorithmId(ObjectIdentifier): '1.2.840.10040.4.3': 'sha1_dsa', '1.3.14.3.2.13': 'sha1_dsa', '1.3.14.3.2.27': 'sha1_dsa', + # Source: NIST CSOR Algorithm Registrations '2.16.840.1.101.3.4.3.1': 'sha224_dsa', '2.16.840.1.101.3.4.3.2': 'sha256_dsa', + '2.16.840.1.101.3.4.3.3': 'sha384_dsa', + '2.16.840.1.101.3.4.3.4': 'sha512_dsa', '1.2.840.10045.4.1': 'sha1_ecdsa', '1.2.840.10045.4.3.1': 'sha224_ecdsa', '1.2.840.10045.4.3.2': 'sha256_ecdsa', '1.2.840.10045.4.3.3': 'sha384_ecdsa', '1.2.840.10045.4.3.4': 'sha512_ecdsa', + # Source: NIST CSOR Algorithm Registrations + '2.16.840.1.101.3.4.3.5': 'sha3_224_dsa', + '2.16.840.1.101.3.4.3.6': 'sha3_256_dsa', + '2.16.840.1.101.3.4.3.7': 'sha3_384_dsa', + '2.16.840.1.101.3.4.3.8': 'sha3_512_dsa', '2.16.840.1.101.3.4.3.9': 'sha3_224_ecdsa', '2.16.840.1.101.3.4.3.10': 'sha3_256_ecdsa', '2.16.840.1.101.3.4.3.11': 'sha3_384_ecdsa', '2.16.840.1.101.3.4.3.12': 'sha3_512_ecdsa', + '2.16.840.1.101.3.4.3.13': 'sha3_224_rsa', + '2.16.840.1.101.3.4.3.14': 'sha3_256_rsa', + '2.16.840.1.101.3.4.3.15': 'sha3_384_rsa', + '2.16.840.1.101.3.4.3.16': 'sha3_512_rsa', # For when the digest is specified elsewhere in a Sequence '1.2.840.113549.1.1.1': 'rsassa_pkcs1v15', '1.2.840.10040.4.1': 'dsa', '1.2.840.10045.4': 'ecdsa', + # RFC 8410 -- https://tools.ietf.org/html/rfc8410 + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', + # Source: BSI TR-03111 V-2 + '0.4.0.127.0.7.1.1.4.1.1': 'sha1_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.2': 'sha224_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.3': 'sha256_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.4': 'sha384_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.5': 'sha512_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.8': 'sha3_224_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.9': 'sha3_256_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.10': 'sha3_384_ecdsa_plain', + '0.4.0.127.0.7.1.1.4.1.11': 'sha3_512_ecdsa_plain', } _reverse_map = { @@ -278,14 +303,36 @@ class SignedDigestAlgorithmId(ObjectIdentifier): 'sha256_dsa': '2.16.840.1.101.3.4.3.2', 'sha256_ecdsa': '1.2.840.10045.4.3.2', 'sha256_rsa': '1.2.840.113549.1.1.11', + 'sha384_dsa': '2.16.840.1.101.3.4.3.3', 'sha384_ecdsa': '1.2.840.10045.4.3.3', 'sha384_rsa': '1.2.840.113549.1.1.12', + 'sha512_dsa': '2.16.840.1.101.3.4.3.4', 'sha512_ecdsa': '1.2.840.10045.4.3.4', 'sha512_rsa': '1.2.840.113549.1.1.13', + # Source: NIST CSOR Algorithm Registrations + 'sha3_224_dsa': '2.16.840.1.101.3.4.3.5', + 'sha3_256_dsa': '2.16.840.1.101.3.4.3.6', + 'sha3_384_dsa': '2.16.840.1.101.3.4.3.7', + 'sha3_512_dsa': '2.16.840.1.101.3.4.3.8', 'sha3_224_ecdsa': '2.16.840.1.101.3.4.3.9', 'sha3_256_ecdsa': '2.16.840.1.101.3.4.3.10', 'sha3_384_ecdsa': '2.16.840.1.101.3.4.3.11', 'sha3_512_ecdsa': '2.16.840.1.101.3.4.3.12', + 'sha3_224_rsa': '2.16.840.1.101.3.4.3.13', + 'sha3_256_rsa': '2.16.840.1.101.3.4.3.14', + 'sha3_384_rsa': '2.16.840.1.101.3.4.3.15', + 'sha3_512_rsa': '2.16.840.1.101.3.4.3.16', + 'ed25519': '1.3.101.112', + 'ed448': '1.3.101.113', + 'sha1_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.1', + 'sha224_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.2', + 'sha256_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.3', + 'sha384_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.4', + 'sha512_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.5', + 'sha3_224_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.8', + 'sha3_256_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.9', + 'sha3_384_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.10', + 'sha3_512_ecdsa_plain': '0.4.0.127.0.7.1.1.4.1.11', } @@ -300,12 +347,53 @@ class SignedDigestAlgorithm(_ForceNullParameters, Sequence): 'rsassa_pss': RSASSAPSSParams, } + _algo_map = { + 'md2_rsa': 'md2', + 'md5_rsa': 'md5', + 'sha1_rsa': 'sha1', + 'sha224_rsa': 'sha224', + 'sha256_rsa': 'sha256', + 'sha384_rsa': 'sha384', + 'sha512_rsa': 'sha512', + 'sha1_dsa': 'sha1', + 'sha224_dsa': 'sha224', + 'sha256_dsa': 'sha256', + 'sha384_dsa': 'sha384', + 'sha512_dsa': 'sha512', + 'sha1_ecdsa': 'sha1', + 'sha1_ecdsa_plain': 'sha1', + 'sha224_ecdsa': 'sha224', + 'sha256_ecdsa': 'sha256', + 'sha384_ecdsa': 'sha384', + 'sha512_ecdsa': 'sha512', + 'sha224_ecdsa_plain': 'sha224', + 'sha256_ecdsa_plain': 'sha256', + 'sha384_ecdsa_plain': 'sha384', + 'sha512_ecdsa_plain': 'sha512', + 'sha3_224_dsa': 'sha3_224', + 'sha3_256_dsa': 'sha3_256', + 'sha3_384_dsa': 'sha3_384', + 'sha3_512_dsa': 'sha3_512', + 'sha3_224_ecdsa': 'sha3_224', + 'sha3_256_ecdsa': 'sha3_256', + 'sha3_384_ecdsa': 'sha3_384', + 'sha3_512_ecdsa': 'sha3_512', + 'sha3_224_ecdsa_plain': 'sha3_224', + 'sha3_256_ecdsa_plain': 'sha3_256', + 'sha3_384_ecdsa_plain': 'sha3_384', + 'sha3_512_ecdsa_plain': 'sha3_512', + 'sha3_224_rsa': 'sha3_224', + 'sha3_256_rsa': 'sha3_256', + 'sha3_384_rsa': 'sha3_384', + 'sha3_512_rsa': 'sha3_512', + } + @property def signature_algo(self): """ :return: - A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa" or - "ecdsa" + A unicode string of "rsassa_pkcs1v15", "rsassa_pss", "dsa", + "ecdsa", "ed25519" or "ed448" """ algorithm = self['algorithm'].native @@ -318,11 +406,21 @@ def signature_algo(self): 'sha256_rsa': 'rsassa_pkcs1v15', 'sha384_rsa': 'rsassa_pkcs1v15', 'sha512_rsa': 'rsassa_pkcs1v15', + 'sha3_224_rsa': 'rsassa_pkcs1v15', + 'sha3_256_rsa': 'rsassa_pkcs1v15', + 'sha3_384_rsa': 'rsassa_pkcs1v15', + 'sha3_512_rsa': 'rsassa_pkcs1v15', 'rsassa_pkcs1v15': 'rsassa_pkcs1v15', 'rsassa_pss': 'rsassa_pss', 'sha1_dsa': 'dsa', 'sha224_dsa': 'dsa', 'sha256_dsa': 'dsa', + 'sha384_dsa': 'dsa', + 'sha512_dsa': 'dsa', + 'sha3_224_dsa': 'dsa', + 'sha3_256_dsa': 'dsa', + 'sha3_384_dsa': 'dsa', + 'sha3_512_dsa': 'dsa', 'dsa': 'dsa', 'sha1_ecdsa': 'ecdsa', 'sha224_ecdsa': 'ecdsa', @@ -333,7 +431,18 @@ def signature_algo(self): 'sha3_256_ecdsa': 'ecdsa', 'sha3_384_ecdsa': 'ecdsa', 'sha3_512_ecdsa': 'ecdsa', + 'sha1_ecdsa_plain': 'ecdsa', + 'sha224_ecdsa_plain': 'ecdsa', + 'sha256_ecdsa_plain': 'ecdsa', + 'sha384_ecdsa_plain': 'ecdsa', + 'sha512_ecdsa_plain': 'ecdsa', + 'sha3_224_ecdsa_plain': 'ecdsa', + 'sha3_256_ecdsa_plain': 'ecdsa', + 'sha3_384_ecdsa_plain': 'ecdsa', + 'sha3_512_ecdsa_plain': 'ecdsa', 'ecdsa': 'ecdsa', + 'ed25519': 'ed25519', + 'ed448': 'ed448', } if algorithm in algo_map: return algo_map[algorithm] @@ -350,34 +459,57 @@ def hash_algo(self): """ :return: A unicode string of "md2", "md5", "sha1", "sha224", "sha256", - "sha384", "sha512", "sha512_224", "sha512_256" + "sha384", "sha512", "sha512_224", "sha512_256" or "shake256" """ algorithm = self['algorithm'].native + if algorithm in self._algo_map: + return self._algo_map[algorithm] - algo_map = { - 'md2_rsa': 'md2', - 'md5_rsa': 'md5', - 'sha1_rsa': 'sha1', - 'sha224_rsa': 'sha224', - 'sha256_rsa': 'sha256', - 'sha384_rsa': 'sha384', - 'sha512_rsa': 'sha512', - 'sha1_dsa': 'sha1', - 'sha224_dsa': 'sha224', - 'sha256_dsa': 'sha256', - 'sha1_ecdsa': 'sha1', - 'sha224_ecdsa': 'sha224', - 'sha256_ecdsa': 'sha256', - 'sha384_ecdsa': 'sha384', - 'sha512_ecdsa': 'sha512', - } - if algorithm in algo_map: - return algo_map[algorithm] + if algorithm == 'rsassa_pss': + return self['parameters']['hash_algorithm']['algorithm'].native + + if algorithm == 'ed25519' or algorithm == 'ed448': + raise ValueError(unwrap( + ''' + Hash algorithm not known for %s - use .cms_hash_algorithm for CMS purposes. + More info at https://github.com/wbond/asn1crypto/pull/230. + ''', + algorithm + )) + + raise ValueError(unwrap( + ''' + Hash algorithm not known for %s + ''', + algorithm + )) + + @property + def cms_hash_algo(self): + """ + The hash algorithm for CMS hashing + + :return: + A unicode string of "md2", "md5", "sha1", "sha224", "sha256", + "sha384", "sha512", "sha512_224", "sha512_256" or "shake256" + """ + + algorithm = self['algorithm'].native + + if algorithm in self._algo_map: + return self._algo_map[algorithm] if algorithm == 'rsassa_pss': return self['parameters']['hash_algorithm']['algorithm'].native + cms_algo_map = { + 'ed25519': 'sha512', + 'ed448': 'shake256', + } + if algorithm in cms_algo_map: + return cms_algo_map[algorithm] + raise ValueError(unwrap( ''' Hash algorithm not known for %s @@ -402,9 +534,21 @@ class Pbkdf2Params(Sequence): ] +class ScryptParams(Sequence): + # https://tools.ietf.org/html/rfc7914#section-7 + _fields = [ + ('salt', OctetString), + ('cost_parameter', Integer), + ('block_size', Integer), + ('parallelization_parameter', Integer), + ('key_length', Integer, {'optional': True}), + ] + + class KdfAlgorithmId(ObjectIdentifier): _map = { - '1.2.840.113549.1.5.12': 'pbkdf2' + '1.2.840.113549.1.5.12': 'pbkdf2', + '1.3.6.1.4.1.11591.4.11': 'scrypt', } @@ -415,7 +559,8 @@ class KdfAlgorithm(Sequence): ] _oid_pair = ('algorithm', 'parameters') _oid_specs = { - 'pbkdf2': Pbkdf2Params + 'pbkdf2': Pbkdf2Params, + 'scrypt': ScryptParams, } @@ -738,6 +883,8 @@ def kdf_hmac(self): encryption_algo = self['algorithm'].native if encryption_algo == 'pbes2': + if self.kdf == 'scrypt': + return None return self['parameters']['key_derivation_func']['parameters']['prf']['algorithm'].native if encryption_algo.find('.') == -1: @@ -818,6 +965,8 @@ def kdf_iterations(self): encryption_algo = self['algorithm'].native if encryption_algo == 'pbes2': + if self.kdf == 'scrypt': + return None return self['parameters']['key_derivation_func']['parameters']['iteration_count'].native if encryption_algo.find('.') == -1: @@ -874,8 +1023,7 @@ def key_length(self): return cipher_lengths[encryption_algo] if encryption_algo == 'rc2': - rc2_params = self['parameters'].parsed['encryption_scheme']['parameters'].parsed - rc2_parameter_version = rc2_params['rc2_parameter_version'].native + rc2_parameter_version = self['parameters']['rc2_parameter_version'].native # See page 24 of # http://www.emc.com/collateral/white-papers/h11302-pkcs5v2-1-password-based-cryptography-standard-wp.pdf @@ -1042,7 +1190,7 @@ def encryption_block_size(self): return cipher_map[encryption_algo] if encryption_algo == 'rc5': - return self['parameters'].parsed['block_size_in_bits'].native / 8 + return self['parameters']['block_size_in_bits'].native // 8 if encryption_algo == 'pbes2': return self['parameters']['encryption_scheme'].encryption_block_size @@ -1084,7 +1232,7 @@ def encryption_iv(self): encryption_algo = self['algorithm'].native if encryption_algo in set(['rc2', 'rc5']): - return self['parameters'].parsed['iv'].native + return self['parameters']['iv'].native # For DES/Triple DES and AES the IV is the entirety of the parameters octet_string_iv_oids = set([ diff --git a/app/lib/package_control/deps/asn1crypto/cms.py b/app/lib/package_control/deps/asn1crypto/cms.py index 2115aed..b104c99 100644 --- a/app/lib/package_control/deps/asn1crypto/cms.py +++ b/app/lib/package_control/deps/asn1crypto/cms.py @@ -30,6 +30,7 @@ _ForceNullParameters, DigestAlgorithm, EncryptionAlgorithm, + EncryptionAlgorithmId, HmacAlgorithm, KdfAlgorithm, RSAESOAEPParams, @@ -100,6 +101,8 @@ class CMSAttributeType(ObjectIdentifier): '1.2.840.113549.1.9.4': 'message_digest', '1.2.840.113549.1.9.5': 'signing_time', '1.2.840.113549.1.9.6': 'counter_signature', + # https://datatracker.ietf.org/doc/html/rfc2633#section-2.5.2 + '1.2.840.113549.1.9.15': 'smime_capabilities', # https://tools.ietf.org/html/rfc2633#page-26 '1.2.840.113549.1.9.16.2.11': 'encrypt_key_pref', # https://tools.ietf.org/html/rfc3161#page-20 @@ -273,7 +276,7 @@ class V2Form(Sequence): class AttCertIssuer(Choice): _alternatives = [ ('v1_form', GeneralNames), - ('v2_form', V2Form, {'explicit': 0}), + ('v2_form', V2Form, {'implicit': 0}), ] @@ -315,7 +318,7 @@ class SetOfSvceAuthInfo(SetOf): class RoleSyntax(Sequence): _fields = [ ('role_authority', GeneralNames, {'implicit': 0, 'optional': True}), - ('role_name', GeneralName, {'implicit': 1}), + ('role_name', GeneralName, {'explicit': 1}), ] @@ -337,7 +340,7 @@ class ClassList(BitString): class SecurityCategory(Sequence): _fields = [ ('type', ObjectIdentifier, {'implicit': 0}), - ('value', Any, {'implicit': 1}), + ('value', Any, {'explicit': 1}), ] @@ -347,9 +350,9 @@ class SetOfSecurityCategory(SetOf): class Clearance(Sequence): _fields = [ - ('policy_id', ObjectIdentifier, {'implicit': 0}), - ('class_list', ClassList, {'implicit': 1, 'default': 'unclassified'}), - ('security_categories', SetOfSecurityCategory, {'implicit': 2, 'optional': True}), + ('policy_id', ObjectIdentifier), + ('class_list', ClassList, {'default': set(['unclassified'])}), + ('security_categories', SetOfSecurityCategory, {'optional': True}), ] @@ -726,6 +729,12 @@ class RecipientKeyIdentifier(Sequence): ('other', OtherKeyAttribute, {'optional': True}), ] + def _setup(self): + super(RecipientKeyIdentifier, self)._setup() + # This creates a backwards compatible shim for an + # incorrect format field name that was in old versions + self._field_map['subjectKeyIdentifier'] = self._field_map['subject_key_identifier'] + class KeyAgreementRecipientIdentifier(Choice): _alternatives = [ @@ -926,26 +935,40 @@ def decompressed(self): return self._decompressed -class RecipientKeyIdentifier(Sequence): - _fields = [ - ('subjectKeyIdentifier', OctetString), - ('date', GeneralizedTime, {'optional': True}), - ('other', OtherKeyAttribute, {'optional': True}), - ] - - class SMIMEEncryptionKeyPreference(Choice): _alternatives = [ ('issuer_and_serial_number', IssuerAndSerialNumber, {'implicit': 0}), - ('recipientKeyId', RecipientKeyIdentifier, {'implicit': 1}), - ('subjectAltKeyIdentifier', PublicKeyInfo, {'implicit': 2}), + ('recipient_key_id', RecipientKeyIdentifier, {'implicit': 1}), + ('subject_alt_key_identifier', PublicKeyInfo, {'implicit': 2}), ] + def _setup(self): + super(SMIMEEncryptionKeyPreference, self)._setup() + # This creates backwards compatible shims for two + # incorrect format alternative names that were in old versions + self._name_map['recipientKeyId'] = self._name_map['recipient_key_id'] + self._name_map['subjectAltKeyIdentifier'] = self._name_map['subject_alt_key_identifier'] + class SMIMEEncryptionKeyPreferences(SetOf): _child_spec = SMIMEEncryptionKeyPreference +class SMIMECapabilityIdentifier(Sequence): + _fields = [ + ('capability_id', EncryptionAlgorithmId), + ('parameters', Any, {'optional': True}), + ] + + +class SMIMECapabilites(SequenceOf): + _child_spec = SMIMECapabilityIdentifier + + +class SetOfSMIMECapabilites(SetOf): + _child_spec = SMIMECapabilites + + ContentInfo._oid_specs = { 'data': OctetString, 'signed_data': SignedData, @@ -981,4 +1004,5 @@ class SMIMEEncryptionKeyPreferences(SetOf): 'microsoft_nested_signature': SetOfContentInfo, 'microsoft_time_stamp_token': SetOfContentInfo, 'encrypt_key_pref': SMIMEEncryptionKeyPreferences, + 'smime_capabilities': SetOfSMIMECapabilites, } diff --git a/app/lib/package_control/deps/asn1crypto/core.py b/app/lib/package_control/deps/asn1crypto/core.py index 7133367..428ef0e 100644 --- a/app/lib/package_control/deps/asn1crypto/core.py +++ b/app/lib/package_control/deps/asn1crypto/core.py @@ -166,6 +166,15 @@ def load(encoded_data, strict=False): return Asn1Value.load(encoded_data, strict=strict) +def unpickle_helper(asn1crypto_cls, der_bytes): + """ + Helper function to integrate with pickle. + + Note that this must be an importable top-level function. + """ + return asn1crypto_cls.load(der_bytes) + + class Asn1Value(object): """ The basis of all ASN.1 values @@ -481,6 +490,12 @@ def __unicode__(self): return self.__repr__() + def __reduce__(self): + """ + Permits pickling Asn1Value objects using their DER representation. + """ + return unpickle_helper, (self.__class__, self.dump()) + def _new_instance(self): """ Constructs a new copy of the current object, preserving any tagging @@ -3414,6 +3429,17 @@ def __init__(self, value=None, default=None, **kwargs): self.__setitem__(key, value[key]) unused_keys.remove(key) + # This handles the situation where there is field name + # mapping going on due to a field be renamed. Normally + # the keys are checked against the primary field list. + # If there are still keys left over, check to see if they + # are mapped via checking the _field_map. + if len(unused_keys): + for key in list(unused_keys): + if key in self._field_map: + self.__setitem__(key, value[key]) + unused_keys.remove(key) + if len(unused_keys): raise ValueError(unwrap( ''' @@ -4113,6 +4139,10 @@ def dump(self, force=False): if self._header is not None and self._header[-1:] == b'\x80': force = True + # We can't force encoding if we don't have a spec + if force and self._fields == [] and self.__class__ is Sequence: + force = False + if force: self._set_contents(force=force) diff --git a/app/lib/package_control/deps/asn1crypto/crl.py b/app/lib/package_control/deps/asn1crypto/crl.py index 84cb168..bf30196 100644 --- a/app/lib/package_control/deps/asn1crypto/crl.py +++ b/app/lib/package_control/deps/asn1crypto/crl.py @@ -44,7 +44,6 @@ class Version(Integer): _map = { 0: 'v1', 1: 'v2', - 2: 'v3', } diff --git a/app/lib/package_control/deps/asn1crypto/csr.py b/app/lib/package_control/deps/asn1crypto/csr.py index 7ea2848..7d5ba44 100644 --- a/app/lib/package_control/deps/asn1crypto/csr.py +++ b/app/lib/package_control/deps/asn1crypto/csr.py @@ -4,7 +4,7 @@ ASN.1 type classes for certificate signing requests (CSR). Exports the following items: - - CertificatationRequest() + - CertificationRequest() Other type classes are defined that help compose the types listed above. """ @@ -14,11 +14,14 @@ from .algos import SignedDigestAlgorithm from .core import ( Any, + BitString, + BMPString, Integer, ObjectIdentifier, OctetBitString, Sequence, SetOf, + UTF8String ) from .keys import PublicKeyInfo from .x509 import DirectoryString, Extensions, Name @@ -39,6 +42,12 @@ class CSRAttributeType(ObjectIdentifier): '1.2.840.113549.1.9.7': 'challenge_password', '1.2.840.113549.1.9.9': 'extended_certificate_attributes', '1.2.840.113549.1.9.14': 'extension_request', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/a5eaae36-e9f3-4dc5-a687-bfa7115954f1 + '1.3.6.1.4.1.311.13.2.2': 'microsoft_enrollment_csp_provider', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/7c677cba-030d-48be-ba2b-01e407705f34 + '1.3.6.1.4.1.311.13.2.3': 'microsoft_os_version', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/64e5ff6d-c6dd-4578-92f7-b3d895f9b9c7 + '1.3.6.1.4.1.311.21.20': 'microsoft_request_client_info', } @@ -61,6 +70,31 @@ class SetOfExtensions(SetOf): _child_spec = Extensions +class MicrosoftEnrollmentCSProvider(Sequence): + _fields = [ + ('keyspec', Integer), + ('cspname', BMPString), # cryptographic service provider name + ('signature', BitString), + ] + + +class SetOfMicrosoftEnrollmentCSProvider(SetOf): + _child_spec = MicrosoftEnrollmentCSProvider + + +class MicrosoftRequestClientInfo(Sequence): + _fields = [ + ('clientid', Integer), + ('machinename', UTF8String), + ('username', UTF8String), + ('processname', UTF8String), + ] + + +class SetOfMicrosoftRequestClientInfo(SetOf): + _child_spec = MicrosoftRequestClientInfo + + class CRIAttribute(Sequence): _fields = [ ('type', CSRAttributeType), @@ -72,6 +106,9 @@ class CRIAttribute(Sequence): 'challenge_password': SetOfDirectoryString, 'extended_certificate_attributes': SetOfAttributes, 'extension_request': SetOfExtensions, + 'microsoft_enrollment_csp_provider': SetOfMicrosoftEnrollmentCSProvider, + 'microsoft_os_version': SetOfDirectoryString, + 'microsoft_request_client_info': SetOfMicrosoftRequestClientInfo, } diff --git a/app/lib/package_control/deps/asn1crypto/keys.py b/app/lib/package_control/deps/asn1crypto/keys.py index 96b763e..b4a87ae 100644 --- a/app/lib/package_control/deps/asn1crypto/keys.py +++ b/app/lib/package_control/deps/asn1crypto/keys.py @@ -666,6 +666,11 @@ class PrivateKeyAlgorithmId(ObjectIdentifier): '1.2.840.10040.4.1': 'dsa', # https://tools.ietf.org/html/rfc3279#page-13 '1.2.840.10045.2.1': 'ec', + # https://tools.ietf.org/html/rfc8410#section-9 + '1.3.101.110': 'x25519', + '1.3.101.111': 'x448', + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', } @@ -707,6 +712,12 @@ def _private_key_spec(self): 'rsassa_pss': RSAPrivateKey, 'dsa': Integer, 'ec': ECPrivateKey, + # These should be treated as opaque octet strings according + # to RFC 8410 + 'x25519': OctetString, + 'x448': OctetString, + 'ed25519': OctetString, + 'ed448': OctetString, }[algorithm] _spec_callbacks = { @@ -741,7 +752,7 @@ def wrap(cls, private_key, algorithm): type_name(private_key) )) - if algorithm == 'rsa': + if algorithm == 'rsa' or algorithm == 'rsassa_pss': if not isinstance(private_key, RSAPrivateKey): private_key = RSAPrivateKey.load(private_key) params = Null() @@ -882,7 +893,7 @@ def hash_algo(self): def algorithm(self): """ :return: - A unicode string of "rsa", "dsa" or "ec" + A unicode string of "rsa", "rsassa_pss", "dsa" or "ec" """ if self._algorithm is None: @@ -897,7 +908,7 @@ def bit_size(self): """ if self._bit_size is None: - if self.algorithm == 'rsa': + if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss': prime = self['private_key'].parsed['modulus'].native elif self.algorithm == 'dsa': prime = self['private_key_algorithm']['parameters']['p'].native @@ -1017,6 +1028,11 @@ class PublicKeyAlgorithmId(ObjectIdentifier): '1.2.840.10045.2.1': 'ec', # https://tools.ietf.org/html/rfc3279#page-10 '1.2.840.10046.2.1': 'dh', + # https://tools.ietf.org/html/rfc8410#section-9 + '1.3.101.110': 'x25519', + '1.3.101.111': 'x448', + '1.3.101.112': 'ed25519', + '1.3.101.113': 'ed448', } @@ -1063,6 +1079,12 @@ def _public_key_spec(self): # decompose the byte string into the constituent X and Y coords 'ec': (ECPointBitString, None), 'dh': Integer, + # These should be treated as opaque bit strings according + # to RFC 8410, and need not even be valid ASN.1 + 'x25519': (OctetBitString, None), + 'x448': (OctetBitString, None), + 'ed25519': (OctetBitString, None), + 'ed448': (OctetBitString, None), }[algorithm] _spec_callbacks = { @@ -1098,7 +1120,7 @@ def wrap(cls, public_key, algorithm): type_name(public_key) )) - if algorithm != 'rsa': + if algorithm != 'rsa' and algorithm != 'rsassa_pss': raise ValueError(unwrap( ''' algorithm must "rsa", not %s @@ -1200,7 +1222,7 @@ def hash_algo(self): def algorithm(self): """ :return: - A unicode string of "rsa", "dsa" or "ec" + A unicode string of "rsa", "rsassa_pss", "dsa" or "ec" """ if self._algorithm is None: @@ -1218,7 +1240,7 @@ def bit_size(self): if self.algorithm == 'ec': self._bit_size = int(((len(self['public_key'].native) - 1) / 2) * 8) else: - if self.algorithm == 'rsa': + if self.algorithm == 'rsa' or self.algorithm == 'rsassa_pss': prime = self['public_key'].parsed['modulus'].native elif self.algorithm == 'dsa': prime = self['algorithm']['parameters']['p'].native diff --git a/app/lib/package_control/deps/asn1crypto/parser.py b/app/lib/package_control/deps/asn1crypto/parser.py index c4f91f6..2f5a63e 100644 --- a/app/lib/package_control/deps/asn1crypto/parser.py +++ b/app/lib/package_control/deps/asn1crypto/parser.py @@ -20,6 +20,7 @@ _PY2 = sys.version_info <= (3,) _INSUFFICIENT_DATA_MESSAGE = 'Insufficient data - %s bytes requested but only %s available' +_MAX_DEPTH = 10 def emit(class_, method, tag, contents): @@ -136,7 +137,7 @@ def peek(contents): return consumed -def _parse(encoded_data, data_len, pointer=0, lengths_only=False): +def _parse(encoded_data, data_len, pointer=0, lengths_only=False, depth=0): """ Parses a byte string into component parts @@ -154,83 +155,89 @@ def _parse(encoded_data, data_len, pointer=0, lengths_only=False): number of bytes in the header and the integer number of bytes in the contents. Internal use only. + :param depth: + The recursion depth when evaluating indefinite-length encoding. + :return: A 2-element tuple: - 0: A tuple of (class_, method, tag, header, content, trailer) - 1: An integer indicating how many bytes were consumed """ - if data_len < pointer + 2: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer)) + if depth > _MAX_DEPTH: + raise ValueError('Indefinite-length recursion limit exceeded') start = pointer + + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] + pointer += 1 tag = first_octet & 31 + constructed = (first_octet >> 5) & 1 # Base 128 length using 8th bit as continuation indicator if tag == 31: tag = 0 while True: + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] pointer += 1 + if num == 0x80 and tag == 0: + raise ValueError('Non-minimal tag encoding') tag *= 128 tag += num & 127 if num >> 7 == 0: break + if tag < 31: + raise ValueError('Non-minimal tag encoding') + if data_len < pointer + 1: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (1, data_len - pointer)) length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer] pointer += 1 + trailer = b'' if length_octet >> 7 == 0: - if lengths_only: - return (pointer, pointer + (length_octet & 127)) contents_end = pointer + (length_octet & 127) else: length_octets = length_octet & 127 if length_octets: + if data_len < pointer + length_octets: + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (length_octets, data_len - pointer)) pointer += length_octets contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False) - if lengths_only: - return (pointer, contents_end) else: # To properly parse indefinite length values, we need to scan forward # parsing headers until we find a value with a length of zero. If we # just scanned looking for \x00\x00, nested indefinite length values # would not work. + if not constructed: + raise ValueError('Indefinite-length element must be constructed') contents_end = pointer - while contents_end < data_len: - sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True) - if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00': - break - if lengths_only: - return (pointer, contents_end) - if contents_end > data_len: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) - return ( - ( - first_octet >> 6, - (first_octet >> 5) & 1, - tag, - encoded_data[start:pointer], - encoded_data[pointer:contents_end - 2], - b'\x00\x00' - ), - contents_end - ) + while data_len < contents_end + 2 or encoded_data[contents_end:contents_end+2] != b'\x00\x00': + _, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True, depth=depth+1) + contents_end += 2 + trailer = b'\x00\x00' if contents_end > data_len: - raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len)) + raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end - pointer, data_len - pointer)) + + if lengths_only: + return (pointer, contents_end) + return ( ( first_octet >> 6, - (first_octet >> 5) & 1, + constructed, tag, encoded_data[start:pointer], - encoded_data[pointer:contents_end], - b'' + encoded_data[pointer:contents_end-len(trailer)], + trailer ), contents_end ) diff --git a/app/lib/package_control/deps/asn1crypto/tsp.py b/app/lib/package_control/deps/asn1crypto/tsp.py index bd40810..f006da9 100644 --- a/app/lib/package_control/deps/asn1crypto/tsp.py +++ b/app/lib/package_control/deps/asn1crypto/tsp.py @@ -169,7 +169,7 @@ class MetaData(Sequence): ] -class TimeStampAndCRL(SequenceOf): +class TimeStampAndCRL(Sequence): _fields = [ ('time_stamp', EncapsulatedContentInfo), ('crl', CertificateList, {'optional': True}), diff --git a/app/lib/package_control/deps/asn1crypto/version.py b/app/lib/package_control/deps/asn1crypto/version.py index 3cf4892..966b57a 100644 --- a/app/lib/package_control/deps/asn1crypto/version.py +++ b/app/lib/package_control/deps/asn1crypto/version.py @@ -2,5 +2,5 @@ from __future__ import unicode_literals, division, absolute_import, print_function -__version__ = '1.4.0' -__version_info__ = (1, 4, 0) +__version__ = '1.5.1' +__version_info__ = (1, 5, 1) diff --git a/app/lib/package_control/deps/asn1crypto/x509.py b/app/lib/package_control/deps/asn1crypto/x509.py index 16f7deb..38aa770 100644 --- a/app/lib/package_control/deps/asn1crypto/x509.py +++ b/app/lib/package_control/deps/asn1crypto/x509.py @@ -27,7 +27,7 @@ from ._errors import unwrap from ._iri import iri_to_uri, uri_to_iri from ._ordereddict import OrderedDict -from ._types import type_name, str_cls, bytes_to_list +from ._types import type_name, str_cls, byte_cls, bytes_to_list from .algos import AlgorithmIdentifier, AnyAlgorithmIdentifier, DigestAlgorithm, SignedDigestAlgorithm from .core import ( Any, @@ -708,7 +708,13 @@ def prepped_value(self): """ if self._prepped is None: - self._prepped = self._ldap_string_prep(self['value'].native) + native = self['value'].native + if isinstance(native, str_cls): + self._prepped = self._ldap_string_prep(native) + else: + if isinstance(native, byte_cls): + native = ' ' + native.decode('cp1252') + ' ' + self._prepped = native return self._prepped def __ne__(self, other): @@ -987,7 +993,7 @@ def build(cls, name_dict, use_printable=False): :param name_dict: A dict of name information, e.g. {"common_name": "Will Bond", - "country_name": "US", "organization": "Codex Non Sufficit LC"} + "country_name": "US", "organization_name": "Codex Non Sufficit LC"} :param use_printable: A bool - if PrintableString should be used for encoding instead of @@ -1015,15 +1021,27 @@ def build(cls, name_dict, use_printable=False): for attribute_name, attribute_value in name_dict.items(): attribute_name = NameType.map(attribute_name) - if attribute_name == 'email_address': - value = EmailAddress(attribute_value) - elif attribute_name == 'domain_component': - value = DNSName(attribute_value) + attribute_class = NameTypeAndValue._oid_specs.get(attribute_name) + if not attribute_class: + raise ValueError(unwrap( + ''' + No encoding specification found for %s + ''', + attribute_name + )) + + if isinstance(attribute_value, attribute_class): + value = attribute_value + + elif attribute_class is not DirectoryString: + value = attribute_class(attribute_value) + elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']): value = DirectoryString( name='printable_string', value=PrintableString(attribute_value) ) + else: value = DirectoryString( name=encoding_name, @@ -2079,6 +2097,8 @@ class ExtensionId(ObjectIdentifier): '2.16.840.1.113730.1.1': 'netscape_certificate_type', # https://tools.ietf.org/html/rfc6962.html#page-14 '1.3.6.1.4.1.11129.2.4.2': 'signed_certificate_timestamp_list', + # https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-wcce/3aec3e50-511a-42f9-a5d5-240af503e470 + '1.3.6.1.4.1.311.20.2': 'microsoft_enroll_certtype', } @@ -2114,6 +2134,9 @@ class Extension(Sequence): 'entrust_version_extension': EntrustVersionInfo, 'netscape_certificate_type': NetscapeCertificateType, 'signed_certificate_timestamp_list': OctetString, + # Not UTF8String as Microsofts docs claim, see: + # https://www.alvestrand.no/objectid/1.3.6.1.4.1.311.20.2.html + 'microsoft_enroll_certtype': BMPString, } diff --git a/app/lib/package_control/deps/oscrypto/__init__.py b/app/lib/package_control/deps/oscrypto/__init__.py index eb27313..6e4d487 100644 --- a/app/lib/package_control/deps/oscrypto/__init__.py +++ b/app/lib/package_control/deps/oscrypto/__init__.py @@ -119,11 +119,17 @@ def use_openssl(libcrypto_path, libssl_path, trust_list_path=None): if not isinstance(libssl_path, str_cls): raise ValueError('libssl_path must be a unicode string, not %s' % type_name(libssl_path)) - if not os.path.exists(libcrypto_path): - raise LibraryNotFoundError('libcrypto does not exist at %s' % libcrypto_path) + do_path_checks = True + if sys.platform == 'darwin': + mac_version_info = tuple(map(int, platform.mac_ver()[0].split('.')[:2])) + do_path_checks = mac_version_info < (10, 16) - if not os.path.exists(libssl_path): - raise LibraryNotFoundError('libssl does not exist at %s' % libssl_path) + if do_path_checks: + if not os.path.exists(libcrypto_path): + raise LibraryNotFoundError('libcrypto does not exist at %s' % libcrypto_path) + + if not os.path.exists(libssl_path): + raise LibraryNotFoundError('libssl does not exist at %s' % libssl_path) if trust_list_path is not None: if not isinstance(trust_list_path, str_cls): diff --git a/app/lib/package_control/deps/oscrypto/_asymmetric.py b/app/lib/package_control/deps/oscrypto/_asymmetric.py index 1e7cca4..aec0385 100644 --- a/app/lib/package_control/deps/oscrypto/_asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_asymmetric.py @@ -241,10 +241,12 @@ def _unwrap_private_key_info(key_info): - asn1crypto.keys.ECPrivateKey """ - if key_info.algorithm == 'rsa': + key_alg = key_info.algorithm + + if key_alg == 'rsa' or key_alg == 'rsassa_pss': return key_info['private_key'].parsed - if key_info.algorithm == 'dsa': + if key_alg == 'dsa': params = key_info['private_key_algorithm']['parameters'] parsed = key_info['private_key'].parsed return DSAPrivateKey({ @@ -260,7 +262,7 @@ def _unwrap_private_key_info(key_info): 'private_key': parsed, }) - if key_info.algorithm == 'ec': + if key_alg == 'ec': parsed = key_info['private_key'].parsed parsed['parameters'] = key_info['private_key_algorithm']['parameters'] return parsed @@ -660,7 +662,7 @@ def _unarmor_pem(data, password=None): data = data.strip() # RSA private keys are encrypted after being DER-encoded, but before base64 - # encoding, so they need to be hanlded specially + # encoding, so they need to be handled specially if pem_header in set(['RSA PRIVATE KEY', 'DSA PRIVATE KEY', 'EC PRIVATE KEY']): algo = armor_type.group(2).lower() return ('private key', algo, _unarmor_pem_openssl_private(headers, der_bytes, password)) diff --git a/app/lib/package_control/deps/oscrypto/_ffi.py b/app/lib/package_control/deps/oscrypto/_ffi.py index fe6bdee..7b7ab87 100644 --- a/app/lib/package_control/deps/oscrypto/_ffi.py +++ b/app/lib/package_control/deps/oscrypto/_ffi.py @@ -111,8 +111,6 @@ def is_null(point): return True if ffi.getctype(ffi.typeof(point)) == 'void *': return False - if point[0] == ffi.NULL: - return True return False def errno(): @@ -212,6 +210,7 @@ def callback(library, signature_name, func): 'int': c_int, 'unsigned int': c_uint, 'size_t': ctypes.c_size_t, + 'uint16_t': ctypes.c_uint16, 'uint32_t': ctypes.c_uint32, } if sys.platform == 'win32': @@ -294,6 +293,8 @@ def cast(library, type_, value): return ctypes.cast(value, type_) def sizeof(library, value): + if isinstance(value, str_cls): + return ctypes.sizeof(getattr(library, value)) return ctypes.sizeof(value) def bytes_from_buffer(buffer, maxlen=None): diff --git a/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py new file mode 100644 index 0000000..30c768b --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_common_crypto_cffi.py @@ -0,0 +1,29 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi + +from cffi import FFI + + +__all__ = [ + 'CommonCrypto', +] + + +ffi = FFI() +ffi.cdef(""" + typedef uint32_t CCPBKDFAlgorithm; + + typedef uint32_t CCPseudoRandomAlgorithm; + typedef unsigned int uint; + + int CCKeyDerivationPBKDF(CCPBKDFAlgorithm algorithm, const char *password, size_t passwordLen, + const char *salt, size_t saltLen, CCPseudoRandomAlgorithm prf, uint rounds, + char *derivedKey, size_t derivedKeyLen); +""") + +common_crypto_path = '/usr/lib/system/libcommonCrypto.dylib' + +CommonCrypto = ffi.dlopen(common_crypto_path) +register_ffi(CommonCrypto, ffi) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py new file mode 100644 index 0000000..d0c7951 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_core_foundation_cffi.py @@ -0,0 +1,375 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import ( + buffer_from_bytes, + byte_string_from_buffer, + deref, + is_null, + new, + register_ffi, +) + +from cffi import FFI + + +__all__ = [ + 'CFHelpers', + 'CoreFoundation', +] + + +ffi = FFI() +ffi.cdef(""" + typedef bool Boolean; + typedef long CFIndex; + typedef unsigned long CFStringEncoding; + typedef unsigned long CFNumberType; + typedef unsigned long CFTypeID; + + typedef void *CFTypeRef; + typedef CFTypeRef CFArrayRef; + typedef CFTypeRef CFDataRef; + typedef CFTypeRef CFStringRef; + typedef CFTypeRef CFNumberRef; + typedef CFTypeRef CFBooleanRef; + typedef CFTypeRef CFDictionaryRef; + typedef CFTypeRef CFErrorRef; + typedef CFTypeRef CFAllocatorRef; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + void *hash; + } CFDictionaryKeyCallBacks; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + } CFDictionaryValueCallBacks; + + typedef struct { + CFIndex version; + void *retain; + void *release; + void *copyDescription; + void *equal; + } CFArrayCallBacks; + + CFIndex CFDataGetLength(CFDataRef theData); + const char *CFDataGetBytePtr(CFDataRef theData); + CFDataRef CFDataCreate(CFAllocatorRef allocator, const char *bytes, CFIndex length); + + CFDictionaryRef CFDictionaryCreate(CFAllocatorRef allocator, const void **keys, const void **values, + CFIndex numValues, const CFDictionaryKeyCallBacks *keyCallBacks, + const CFDictionaryValueCallBacks *valueCallBacks); + CFIndex CFDictionaryGetCount(CFDictionaryRef theDict); + + const char *CFStringGetCStringPtr(CFStringRef theString, CFStringEncoding encoding); + Boolean CFStringGetCString(CFStringRef theString, char *buffer, CFIndex bufferSize, CFStringEncoding encoding); + CFStringRef CFStringCreateWithCString(CFAllocatorRef alloc, const char *cStr, CFStringEncoding encoding); + + CFNumberRef CFNumberCreate(CFAllocatorRef allocator, CFNumberType theType, const void *valuePtr); + + CFStringRef CFCopyTypeIDDescription(CFTypeID type_id); + + void CFRelease(CFTypeRef cf); + void CFRetain(CFTypeRef cf); + + CFStringRef CFErrorCopyDescription(CFErrorRef err); + CFStringRef CFErrorGetDomain(CFErrorRef err); + CFIndex CFErrorGetCode(CFErrorRef err); + + Boolean CFBooleanGetValue(CFBooleanRef boolean); + + CFTypeID CFDictionaryGetTypeID(void); + CFTypeID CFNumberGetTypeID(void); + CFTypeID CFStringGetTypeID(void); + CFTypeID CFDataGetTypeID(void); + + CFArrayRef CFArrayCreate(CFAllocatorRef allocator, const void **values, CFIndex numValues, + const CFArrayCallBacks *callBacks); + CFIndex CFArrayGetCount(CFArrayRef theArray); + CFTypeRef CFArrayGetValueAtIndex(CFArrayRef theArray, CFIndex idx); + CFNumberType CFNumberGetType(CFNumberRef number); + Boolean CFNumberGetValue(CFNumberRef number, CFNumberType theType, void *valuePtr); + CFIndex CFDictionaryGetKeysAndValues(CFDictionaryRef theDict, const void **keys, const void **values); + CFTypeID CFGetTypeID(CFTypeRef cf); + + extern CFAllocatorRef kCFAllocatorDefault; + extern CFArrayCallBacks kCFTypeArrayCallBacks; + extern CFBooleanRef kCFBooleanTrue; + extern CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; + extern CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; +""") + +core_foundation_path = '/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation' + +CoreFoundation = ffi.dlopen(core_foundation_path) +register_ffi(CoreFoundation, ffi) + +kCFNumberCFIndexType = 14 +kCFStringEncodingUTF8 = 0x08000100 + + +class CFHelpers(): + """ + Namespace for core foundation helpers + """ + + _native_map = {} + + @classmethod + def register_native_mapping(cls, type_id, callback): + """ + Register a function to convert a core foundation data type into its + equivalent in python + + :param type_id: + The CFTypeId for the type + + :param callback: + A callback to pass the CFType object to + """ + + cls._native_map[int(type_id)] = callback + + @staticmethod + def cf_number_to_number(value): + """ + Converts a CFNumber object to a python float or integer + + :param value: + The CFNumber object + + :return: + A python number (float or integer) + """ + + type_ = CoreFoundation.CFNumberGetType(value) + type_name_ = { + 1: 'int8_t', # kCFNumberSInt8Type + 2: 'in16_t', # kCFNumberSInt16Type + 3: 'int32_t', # kCFNumberSInt32Type + 4: 'int64_t', # kCFNumberSInt64Type + 5: 'float', # kCFNumberFloat32Type + 6: 'double', # kCFNumberFloat64Type + 7: 'char', # kCFNumberCharType + 8: 'short', # kCFNumberShortType + 9: 'int', # kCFNumberIntType + 10: 'long', # kCFNumberLongType + 11: 'long long', # kCFNumberLongLongType + 12: 'float', # kCFNumberFloatType + 13: 'double', # kCFNumberDoubleType + 14: 'long', # kCFNumberCFIndexType + 15: 'int', # kCFNumberNSIntegerType + 16: 'double', # kCFNumberCGFloatType + }[type_] + output = new(CoreFoundation, type_name_ + ' *') + CoreFoundation.CFNumberGetValue(value, type_, output) + return deref(output) + + @staticmethod + def cf_dictionary_to_dict(dictionary): + """ + Converts a CFDictionary object into a python dictionary + + :param dictionary: + The CFDictionary to convert + + :return: + A python dict + """ + + dict_length = CoreFoundation.CFDictionaryGetCount(dictionary) + + keys = new(CoreFoundation, 'CFTypeRef[%s]' % dict_length) + values = new(CoreFoundation, 'CFTypeRef[%s]' % dict_length) + CoreFoundation.CFDictionaryGetKeysAndValues( + dictionary, + keys, + values + ) + + output = {} + for index in range(0, dict_length): + output[CFHelpers.native(keys[index])] = CFHelpers.native(values[index]) + + return output + + @classmethod + def native(cls, value): + """ + Converts a CF* object into its python equivalent + + :param value: + The CF* object to convert + + :return: + The native python object + """ + + type_id = CoreFoundation.CFGetTypeID(value) + if type_id in cls._native_map: + return cls._native_map[type_id](value) + else: + return value + + @staticmethod + def cf_string_to_unicode(value): + """ + Creates a python unicode string from a CFString object + + :param value: + The CFString to convert + + :return: + A python unicode string + """ + + string_ptr = CoreFoundation.CFStringGetCStringPtr( + value, + kCFStringEncodingUTF8 + ) + string = None if is_null(string_ptr) else ffi.string(string_ptr) + if string is None: + buffer = buffer_from_bytes(1024) + result = CoreFoundation.CFStringGetCString( + value, + buffer, + 1024, + kCFStringEncodingUTF8 + ) + if not result: + raise OSError('Error copying C string from CFStringRef') + string = byte_string_from_buffer(buffer) + if string is not None: + string = string.decode('utf-8') + return string + + @staticmethod + def cf_string_from_unicode(string): + """ + Creates a CFStringRef object from a unicode string + + :param string: + The unicode string to create the CFString object from + + :return: + A CFStringRef + """ + + return CoreFoundation.CFStringCreateWithCString( + CoreFoundation.kCFAllocatorDefault, + string.encode('utf-8'), + kCFStringEncodingUTF8 + ) + + @staticmethod + def cf_data_to_bytes(value): + """ + Extracts a bytestring from a CFData object + + :param value: + A CFData object + + :return: + A byte string + """ + + start = CoreFoundation.CFDataGetBytePtr(value) + num_bytes = CoreFoundation.CFDataGetLength(value) + return ffi.buffer(start, num_bytes)[:] + + @staticmethod + def cf_data_from_bytes(bytes_): + """ + Creates a CFDataRef object from a byte string + + :param bytes_: + The data to create the CFData object from + + :return: + A CFDataRef + """ + + return CoreFoundation.CFDataCreate( + CoreFoundation.kCFAllocatorDefault, + bytes_, + len(bytes_) + ) + + @staticmethod + def cf_dictionary_from_pairs(pairs): + """ + Creates a CFDictionaryRef object from a list of 2-element tuples + representing the key and value. Each key should be a CFStringRef and each + value some sort of CF* type. + + :param pairs: + A list of 2-element tuples + + :return: + A CFDictionaryRef + """ + + length = len(pairs) + keys = [] + values = [] + for pair in pairs: + key, value = pair + keys.append(key) + values.append(value) + return CoreFoundation.CFDictionaryCreate( + CoreFoundation.kCFAllocatorDefault, + keys, + values, + length, + ffi.addressof(CoreFoundation.kCFTypeDictionaryKeyCallBacks), + ffi.addressof(CoreFoundation.kCFTypeDictionaryValueCallBacks) + ) + + @staticmethod + def cf_array_from_list(values): + """ + Creates a CFArrayRef object from a list of CF* type objects. + + :param values: + A list of CF* type object + + :return: + A CFArrayRef + """ + + length = len(values) + return CoreFoundation.CFArrayCreate( + CoreFoundation.kCFAllocatorDefault, + values, + length, + ffi.addressof(CoreFoundation.kCFTypeArrayCallBacks) + ) + + @staticmethod + def cf_number_from_integer(integer): + """ + Creates a CFNumber object from an integer + + :param integer: + The integer to create the CFNumber for + + :return: + A CFNumber + """ + + integer_as_long = ffi.new('long *', integer) + return CoreFoundation.CFNumberCreate( + CoreFoundation.kCFAllocatorDefault, + kCFNumberCFIndexType, + integer_as_long + ) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security.py b/app/lib/package_control/deps/oscrypto/_mac/_security.py index 03ef22a..0d2e08d 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/_security.py +++ b/app/lib/package_control/deps/oscrypto/_mac/_security.py @@ -119,6 +119,7 @@ class SecurityConst(): CSSMERR_TP_CERT_NOT_VALID_YET = -2147409653 CSSMERR_TP_CERT_REVOKED = -2147409652 CSSMERR_TP_NOT_TRUSTED = -2147409622 + CSSMERR_TP_CERT_SUSPENDED = -2147409651 CSSM_CERT_X_509v3 = 0x00000004 diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py new file mode 100644 index 0000000..4cce32b --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_mac/_security_cffi.py @@ -0,0 +1,249 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import platform + +from .._ffi import register_ffi + +from cffi import FFI + + +__all__ = [ + 'Security', + 'version', + 'version_info', +] + + +version = platform.mac_ver()[0] +version_info = tuple(map(int, version.split('.'))) + +if version_info < (10, 7): + raise OSError('Only OS X 10.7 and newer are supported, not %s.%s' % (version_info[0], version_info[1])) + +ffi = FFI() + +# It appears SSLCipherSuite is uint16_t on ARM64, but uint32_t on X86_64 +if platform.machine() == 'arm64': + ffi.cdef(""" + typedef uint16_t SSLCipherSuite; + """) +else: + ffi.cdef(""" + typedef uint32_t SSLCipherSuite; + """) + +ffi.cdef(""" + typedef bool Boolean; + typedef long CFIndex; + typedef int32_t OSStatus; + typedef unsigned long CFTypeID; + typedef uint32_t SecTrustSettingsDomain; + typedef uint32_t SecPadding; + typedef uint32_t SecItemImportExportFlags; + typedef uint32_t SecKeyImportExportFlags; + typedef uint32_t SecExternalFormat; + typedef uint32_t SecExternalItemType; + typedef uint32_t CSSM_ALGORITHMS; + typedef uint64_t CSSM_CC_HANDLE; + typedef uint32_t CSSM_KEYUSE; + typedef uint32_t CSSM_CERT_TYPE; + typedef uint32_t SSLProtocol; + typedef uint32_t SecTrustResultType; + + typedef void *CFTypeRef; + typedef CFTypeRef CFArrayRef; + typedef CFTypeRef CFDataRef; + typedef CFTypeRef CFStringRef; + typedef CFTypeRef CFDictionaryRef; + typedef CFTypeRef CFErrorRef; + typedef CFTypeRef CFAllocatorRef; + + typedef ... *SecKeyRef; + typedef ... *SecCertificateRef; + typedef ... *SecTransformRef; + typedef ... *SecRandomRef; + typedef ... *SecPolicyRef; + typedef ... *SecPolicySearchRef; + typedef ... *SecAccessRef; + typedef struct + { + uint32_t version; + SecKeyImportExportFlags flags; + CFTypeRef passphrase; + CFStringRef alertTitle; + CFStringRef alertPrompt; + SecAccessRef accessRef; + CFArrayRef keyUsage; + CFArrayRef keyAttributes; + } SecItemImportExportKeyParameters; + typedef ... *SecKeychainRef; + typedef ... *SSLContextRef; + typedef ... *SecTrustRef; + typedef uint32_t SSLConnectionRef; + + typedef struct { + uint32_t Length; + char *Data; + } CSSM_DATA, CSSM_OID; + + typedef struct { + uint32_t Version; + uint32_t Flags; + CSSM_DATA *LocalResponder; + CSSM_DATA *LocalResponderCert; + } CSSM_APPLE_TP_OCSP_OPTIONS; + + typedef struct { + uint32_t Version; + uint32_t CrlFlags; + void *crlStore; + } CSSM_APPLE_TP_CRL_OPTIONS; + + OSStatus SecKeychainCreate(char *path, uint32_t pass_len, void *pass, + Boolean prompt, SecAccessRef initialAccess, SecKeychainRef *keychain); + OSStatus SecKeychainDelete(SecKeychainRef keychain); + int SecRandomCopyBytes(SecRandomRef rnd, size_t count, char *bytes); + SecKeyRef SecKeyCreateFromData(CFDictionaryRef parameters, CFDataRef keyData, CFErrorRef *error); + SecTransformRef SecEncryptTransformCreate(SecKeyRef keyRef, CFErrorRef *error); + SecTransformRef SecDecryptTransformCreate(SecKeyRef keyRef, CFErrorRef *error); + Boolean SecTransformSetAttribute(SecTransformRef transformRef, CFStringRef key, CFTypeRef value, CFErrorRef *error); + CFTypeRef SecTransformExecute(SecTransformRef transformRef, CFErrorRef *errorRef); + SecTransformRef SecVerifyTransformCreate(SecKeyRef key, CFDataRef signature, CFErrorRef *error); + SecTransformRef SecSignTransformCreate(SecKeyRef key, CFErrorRef *error); + SecCertificateRef SecCertificateCreateWithData(CFAllocatorRef allocator, CFDataRef data); + OSStatus SecCertificateCopyPublicKey(SecCertificateRef certificate, SecKeyRef *key); + SecKeyRef SecCertificateCopyKey(SecCertificateRef certificate); + CFStringRef SecCopyErrorMessageString(OSStatus status, void *reserved); + OSStatus SecTrustCopyAnchorCertificates(CFArrayRef *anchors); + CFDataRef SecCertificateCopyData(SecCertificateRef certificate); + OSStatus SecTrustSettingsCopyCertificates(SecTrustSettingsDomain domain, CFArrayRef *certArray); + OSStatus SecTrustSettingsCopyTrustSettings(SecCertificateRef certRef, SecTrustSettingsDomain domain, + CFArrayRef *trustSettings); + CFDictionaryRef SecPolicyCopyProperties(SecPolicyRef policyRef); + CFTypeID SecPolicyGetTypeID(void); + OSStatus SecKeyEncrypt(SecKeyRef key, SecPadding padding, const char *plainText, size_t plainTextLen, + char *cipherText, size_t *cipherTextLen); + OSStatus SecKeyDecrypt(SecKeyRef key, SecPadding padding, const char *cipherText, size_t cipherTextLen, + char *plainText, size_t *plainTextLen); + OSStatus SecKeyRawSign(SecKeyRef key, SecPadding padding, const char *dataToSign, size_t dataToSignLen, + char *sig, size_t * sigLen); + OSStatus SecKeyRawVerify(SecKeyRef key, SecPadding padding, const char *signedData, size_t signedDataLen, + const char *sig, size_t sigLen); + OSStatus SecItemImport(CFDataRef importedData, CFStringRef fileNameOrExtension, + SecExternalFormat *inputFormat, SecExternalItemType *itemType, + SecItemImportExportFlags flags, const SecItemImportExportKeyParameters *keyParams, + SecKeychainRef importKeychain, CFArrayRef *outItems); + OSStatus SecItemExport(CFTypeRef secItemOrArray, SecExternalFormat outputFormat, SecItemImportExportFlags flags, + const SecItemImportExportKeyParameters *keyParams, CFDataRef *exportedData); + OSStatus SecAccessCreate(CFStringRef descriptor, CFArrayRef trustedlist, SecAccessRef *accessRef); + OSStatus SecKeyCreatePair(SecKeychainRef keychainRef, CSSM_ALGORITHMS algorithm, uint32_t keySizeInBits, + CSSM_CC_HANDLE contextHandle, CSSM_KEYUSE publicKeyUsage, uint32_t publicKeyAttr, + CSSM_KEYUSE privateKeyUsage, uint32_t privateKeyAttr, SecAccessRef initialAccess, + SecKeyRef* publicKeyRef, SecKeyRef* privateKeyRef); + OSStatus SecKeychainItemDelete(SecKeyRef itemRef); + + typedef OSStatus (*SSLReadFunc)(SSLConnectionRef connection, char *data, size_t *dataLength); + typedef OSStatus (*SSLWriteFunc)(SSLConnectionRef connection, const char *data, size_t *dataLength); + OSStatus SSLSetIOFuncs(SSLContextRef context, SSLReadFunc readFunc, SSLWriteFunc writeFunc); + + OSStatus SSLSetPeerID(SSLContextRef context, const char *peerID, size_t peerIDLen); + + OSStatus SSLSetConnection(SSLContextRef context, SSLConnectionRef connection); + OSStatus SSLSetPeerDomainName(SSLContextRef context, const char *peerName, size_t peerNameLen); + OSStatus SSLHandshake(SSLContextRef context); + OSStatus SSLGetBufferedReadSize(SSLContextRef context, size_t *bufSize); + OSStatus SSLRead(SSLContextRef context, char *data, size_t dataLength, size_t *processed); + OSStatus SSLWrite(SSLContextRef context, const char *data, size_t dataLength, size_t *processed); + OSStatus SSLClose(SSLContextRef context); + + OSStatus SSLGetNumberSupportedCiphers(SSLContextRef context, size_t *numCiphers); + OSStatus SSLGetSupportedCiphers(SSLContextRef context, SSLCipherSuite *ciphers, size_t *numCiphers); + OSStatus SSLSetEnabledCiphers(SSLContextRef context, const SSLCipherSuite *ciphers, size_t numCiphers); + OSStatus SSLGetNumberEnabledCiphers(SSLContextRef context, size_t *numCiphers); + OSStatus SSLGetEnabledCiphers(SSLContextRef context, SSLCipherSuite *ciphers, size_t *numCiphers); + + OSStatus SSLGetNegotiatedCipher(SSLContextRef context, SSLCipherSuite *cipherSuite); + OSStatus SSLGetNegotiatedProtocolVersion(SSLContextRef context, SSLProtocol *protocol); + + OSStatus SSLCopyPeerTrust(SSLContextRef context, SecTrustRef *trust); + OSStatus SecTrustGetCssmResultCode(SecTrustRef trust, OSStatus *resultCode); + CFIndex SecTrustGetCertificateCount(SecTrustRef trust); + SecCertificateRef SecTrustGetCertificateAtIndex(SecTrustRef trust, CFIndex ix); + OSStatus SecTrustSetAnchorCertificates(SecTrustRef trust, CFArrayRef anchorCertificates); + OSStatus SecTrustSetAnchorCertificatesOnly(SecTrustRef trust, Boolean anchorCertificatesOnly); + OSStatus SecTrustSetPolicies(SecTrustRef trust, CFArrayRef policies); + SecPolicyRef SecPolicyCreateSSL(Boolean server, CFStringRef hostname); + OSStatus SecPolicySearchCreate(CSSM_CERT_TYPE certType, const CSSM_OID *policyOID, const CSSM_DATA *value, + SecPolicySearchRef *searchRef); + OSStatus SecPolicySearchCopyNext(SecPolicySearchRef searchRef, SecPolicyRef *policyRef); + OSStatus SecPolicySetValue(SecPolicyRef policyRef, const CSSM_DATA *value); + OSStatus SecTrustEvaluate(SecTrustRef trust, SecTrustResultType *result); + + extern SecRandomRef kSecRandomDefault; + + extern CFStringRef kSecPaddingKey; + extern CFStringRef kSecPaddingPKCS7Key; + extern CFStringRef kSecPaddingPKCS5Key; + extern CFStringRef kSecPaddingPKCS1Key; + extern CFStringRef kSecPaddingOAEPKey; + extern CFStringRef kSecPaddingNoneKey; + extern CFStringRef kSecModeCBCKey; + extern CFStringRef kSecTransformInputAttributeName; + extern CFStringRef kSecDigestTypeAttribute; + extern CFStringRef kSecDigestLengthAttribute; + extern CFStringRef kSecIVKey; + + extern CFStringRef kSecAttrIsExtractable; + + extern CFStringRef kSecDigestSHA1; + extern CFStringRef kSecDigestSHA2; + extern CFStringRef kSecDigestMD5; + + extern CFStringRef kSecAttrKeyType; + + extern CFTypeRef kSecAttrKeyTypeRSA; + extern CFTypeRef kSecAttrKeyTypeDSA; + extern CFTypeRef kSecAttrKeyTypeECDSA; + + extern CFStringRef kSecAttrKeySizeInBits; + extern CFStringRef kSecAttrLabel; + + extern CFTypeRef kSecAttrCanSign; + extern CFTypeRef kSecAttrCanVerify; + + extern CFTypeRef kSecAttrKeyTypeAES; + extern CFTypeRef kSecAttrKeyTypeRC4; + extern CFTypeRef kSecAttrKeyTypeRC2; + extern CFTypeRef kSecAttrKeyType3DES; + extern CFTypeRef kSecAttrKeyTypeDES; +""") + +if version_info < (10, 8): + ffi.cdef(""" + OSStatus SSLNewContext(Boolean isServer, SSLContextRef *contextPtr); + OSStatus SSLDisposeContext(SSLContextRef context); + + OSStatus SSLSetEnableCertVerify(SSLContextRef context, Boolean enableVerify); + + OSStatus SSLSetProtocolVersionEnabled(SSLContextRef context, SSLProtocol protocol, Boolean enable); + """) +else: + ffi.cdef(""" + typedef uint32_t SSLProtocolSide; + typedef uint32_t SSLConnectionType; + typedef uint32_t SSLSessionOption; + + SSLContextRef SSLCreateContext(CFAllocatorRef alloc, SSLProtocolSide protocolSide, + SSLConnectionType connectionType); + + OSStatus SSLSetSessionOption(SSLContextRef context, SSLSessionOption option, Boolean value); + + OSStatus SSLSetProtocolVersionMin(SSLContextRef context, SSLProtocol minVersion); + OSStatus SSLSetProtocolVersionMax(SSLContextRef context, SSLProtocol maxVersion); + """) + +security_path = '/System/Library/Frameworks/Security.framework/Security' + +Security = ffi.dlopen(security_path) +register_ffi(Security, ffi) diff --git a/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py b/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py index ee1be00..3dfdefa 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py +++ b/app/lib/package_control/deps/oscrypto/_mac/_security_ctypes.py @@ -2,7 +2,20 @@ from __future__ import unicode_literals, division, absolute_import, print_function import platform -from ctypes import c_void_p, c_int32, c_char_p, c_size_t, c_byte, c_int, c_uint32, c_uint64, c_ulong, c_long, c_bool +from ctypes import ( + c_bool, + c_byte, + c_char_p, + c_int, + c_int32, + c_long, + c_size_t, + c_uint16, + c_uint32, + c_uint64, + c_ulong, + c_void_p, +) from ctypes import CDLL, POINTER, CFUNCTYPE, Structure from .._ffi import FFIEngineError @@ -57,7 +70,11 @@ SecExternalItemType = c_uint32 SecPadding = c_uint32 SSLProtocol = c_uint32 -SSLCipherSuite = c_uint32 +# It appears SSLCipherSuite is uint16_t on ARM64, but uint32_t on X86_64 +if platform.machine() == 'arm64': + SSLCipherSuite = c_uint16 +else: + SSLCipherSuite = c_uint32 SecPolicyRef = POINTER(c_void_p) CSSM_CC_HANDLE = c_uint64 CSSM_ALGORITHMS = c_uint32 @@ -186,6 +203,11 @@ class SecItemImportExportKeyParameters(Structure): ] Security.SecCertificateCreateWithData.restype = SecCertificateRef + Security.SecCertificateCopyKey.argtypes = [ + SecCertificateRef, + ] + Security.SecCertificateCopyKey.restype = SecKeyRef + Security.SecCertificateCopyPublicKey.argtypes = [ SecCertificateRef, POINTER(SecKeyRef) diff --git a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py index 1d30aa1..3bb851b 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_mac/asymmetric.py @@ -250,10 +250,25 @@ def public_key(self): """ if not self._public_key and self.sec_certificate_ref: + if self.asn1.signature_algo == "rsassa_pss": + # macOS doesn't like importing RSA PSS certs, so we treat it like a + # traditional RSA cert + asn1 = self.asn1.copy() + asn1['tbs_certificate']['subject_public_key_info']['algorithm']['algorithm'] = 'rsa' + temp_cert = _load_x509(asn1) + sec_cert_ref = temp_cert.sec_certificate_ref + else: + sec_cert_ref = self.sec_certificate_ref + sec_public_key_ref_pointer = new(Security, 'SecKeyRef *') - res = Security.SecCertificateCopyPublicKey(self.sec_certificate_ref, sec_public_key_ref_pointer) - handle_sec_error(res) - sec_public_key_ref = unwrap(sec_public_key_ref_pointer) + if osx_version_info >= (10, 14): + sec_public_key_ref = Security.SecCertificateCopyKey(sec_cert_ref) + if is_null(sec_public_key_ref): + raise ValueError('Unable to extract public key from certificate') + else: + res = Security.SecCertificateCopyPublicKey(sec_cert_ref, sec_public_key_ref_pointer) + handle_sec_error(res) + sec_public_key_ref = unwrap(sec_public_key_ref_pointer) self._public_key = PublicKey(sec_public_key_ref, self.asn1['tbs_certificate']['subject_public_key_info']) return self._public_key @@ -274,6 +289,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -832,7 +849,14 @@ def _load_key(key_object): )) if isinstance(key_object, PublicKeyInfo): - source = key_object.dump() + if key_object.algorithm == 'rsassa_pss': + # We have to masquerade an RSA PSS key as plain RSA or it won't + # import properly + temp_key_object = key_object.copy() + temp_key_object['algorithm']['algorithm'] = 'rsa' + source = temp_key_object.dump() + else: + source = key_object.dump() item_type = SecurityConst.kSecItemTypePublicKey else: @@ -1392,7 +1416,8 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): type_name(data) )) - if certificate_or_public_key.algorithm != 'rsa': + cp_algo = certificate_or_public_key.algorithm + if cp_algo != 'rsa' and cp_algo != 'rsassa_pss': raise ValueError('The key specified is not an RSA public key') hash_length = { @@ -1735,7 +1760,8 @@ def rsa_pss_sign(private_key, data, hash_algorithm): type_name(data) )) - if private_key.algorithm != 'rsa': + pk_algo = private_key.algorithm + if pk_algo != 'rsa' and pk_algo != 'rsassa_pss': raise ValueError('The key specified is not an RSA private key') hash_length = { diff --git a/app/lib/package_control/deps/oscrypto/_mac/tls.py b/app/lib/package_control/deps/oscrypto/_mac/tls.py index a0ca540..50dc158 100644 --- a/app/lib/package_control/deps/oscrypto/_mac/tls.py +++ b/app/lib/package_control/deps/oscrypto/_mac/tls.py @@ -29,6 +29,7 @@ new, null, pointer_set, + sizeof, struct, struct_bytes, unwrap, @@ -50,6 +51,7 @@ raise_expired_not_yet_valid, raise_handshake, raise_hostname, + raise_lifetime_too_long, raise_no_issuer, raise_protocol_error, raise_protocol_version, @@ -103,7 +105,7 @@ def _read_callback(connection_id, data_buffer, data_length_pointer): Callback called by Secure Transport to actually read the socket :param connection_id: - An integer identifing the connection + An integer identifying the connection :param data_buffer: A char pointer FFI type to write the data to @@ -218,7 +220,7 @@ def _write_callback(connection_id, data_buffer, data_length_pointer): Callback called by Secure Transport to actually write to the socket :param connection_id: - An integer identifing the connection + An integer identifying the connection :param data_buffer: A char pointer FFI type containing the data to write @@ -463,7 +465,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to @@ -632,8 +634,8 @@ def _handshake(self): supported_ciphers = deref(supported_ciphers_pointer) - cipher_buffer = buffer_from_bytes(supported_ciphers * 4) - supported_cipher_suites_pointer = cast(Security, 'uint32_t *', cipher_buffer) + cipher_buffer = buffer_from_bytes(supported_ciphers * sizeof(Security, 'SSLCipherSuite')) + supported_cipher_suites_pointer = cast(Security, 'SSLCipherSuite *', cipher_buffer) result = Security.SSLGetSupportedCiphers( session_context, supported_cipher_suites_pointer, @@ -644,7 +646,7 @@ def _handshake(self): supported_ciphers = deref(supported_ciphers_pointer) supported_cipher_suites = array_from_pointer( Security, - 'uint32_t', + 'SSLCipherSuite', supported_cipher_suites_pointer, supported_ciphers ) @@ -657,9 +659,9 @@ def _handshake(self): good_ciphers.append(supported_cipher_suite) num_good_ciphers = len(good_ciphers) - good_ciphers_array = new(Security, 'uint32_t[]', num_good_ciphers) + good_ciphers_array = new(Security, 'SSLCipherSuite[]', num_good_ciphers) array_set(good_ciphers_array, good_ciphers) - good_ciphers_pointer = cast(Security, 'uint32_t *', good_ciphers_array) + good_ciphers_pointer = cast(Security, 'SSLCipherSuite *', good_ciphers_array) result = Security.SSLSetEnabledCiphers( session_context, good_ciphers_pointer, @@ -875,6 +877,7 @@ def _handshake(self): expired = result_code == SecurityConst.CSSMERR_TP_CERT_EXPIRED not_yet_valid = result_code == SecurityConst.CSSMERR_TP_CERT_NOT_VALID_YET bad_hostname = result_code == SecurityConst.CSSMERR_APPLETP_HOSTNAME_MISMATCH + validity_too_long = result_code == SecurityConst.CSSMERR_TP_CERT_SUSPENDED # On macOS 10.12, some expired certificates return errSSLInternal if osx_version_info >= (10, 12): @@ -903,6 +906,9 @@ def _handshake(self): elif self_signed: raise_self_signed(cert) + elif validity_too_long: + raise_lifetime_too_long(cert) + if detect_client_auth_request(self._server_hello): raise_client_auth() diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py index 2881689..90768d7 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto.py @@ -22,6 +22,7 @@ __all__ = [ 'handle_openssl_error', 'libcrypto', + 'libcrypto_legacy_support', 'libcrypto_version', 'libcrypto_version_info', 'LibcryptoConst', @@ -38,6 +39,19 @@ libcrypto.OPENSSL_config(null()) +# This enables legacy algorithms in OpenSSL 3.0, such as RC2, etc +# which are used by various tests and some old protocols and things +# like PKCS12 +libcrypto_legacy_support = True +if libcrypto_version_info >= (3, ): + + libcrypto.OSSL_PROVIDER_load(null(), "legacy".encode("ascii")) + libcrypto.OSSL_PROVIDER_load(null(), "default".encode("ascii")) + + if libcrypto.OSSL_PROVIDER_available(null(), "legacy".encode("ascii")) == 0: + libcrypto_legacy_support = False + + def _try_decode(value): try: @@ -57,7 +71,7 @@ def _try_decode(value): def handle_openssl_error(result, exception_class=None): """ - Checks if an error occured, and if so throws an OSError containing the + Checks if an error occurred, and if so throws an OSError containing the last OpenSSL error message :param result: @@ -95,9 +109,15 @@ def peek_openssl_error(): """ error = libcrypto.ERR_peek_error() - lib = int((error >> 24) & 0xff) - func = int((error >> 12) & 0xfff) - reason = int(error & 0xfff) + if libcrypto_version_info < (3, 0): + lib = int((error >> 24) & 0xff) + func = int((error >> 12) & 0xfff) + reason = int(error & 0xfff) + else: + lib = int((error >> 23) & 0xff) + # OpenSSL 3.0 removed ERR_GET_FUNC() + func = 0 + reason = int(error & 0x7fffff) return (lib, func, reason) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py new file mode 100644 index 0000000..6f901ea --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_cffi.py @@ -0,0 +1,278 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import re + +from .. import _backend_config +from .._errors import pretty_message +from .._ffi import get_library, register_ffi +from ..errors import LibraryNotFoundError + +from cffi import FFI + + +__all__ = [ + 'is_libressl', + 'libcrypto', + 'libressl_version', + 'libressl_version_info', + 'version', + 'version_info', +] + +libcrypto_path = _backend_config().get('libcrypto_path') +if libcrypto_path is None: + libcrypto_path = get_library('crypto', 'libcrypto.dylib', '42') +if not libcrypto_path: + raise LibraryNotFoundError('The library libcrypto could not be found') + +try: + vffi = FFI() + vffi.cdef("const char *SSLeay_version(int type);") + version_string = vffi.string(vffi.dlopen(libcrypto_path).SSLeay_version(0)).decode('utf-8') +except (AttributeError): + vffi = FFI() + vffi.cdef("const char *OpenSSL_version(int type);") + version_string = vffi.string(vffi.dlopen(libcrypto_path).OpenSSL_version(0)).decode('utf-8') + +is_libressl = 'LibreSSL' in version_string + +version_match = re.search('\\b(\\d+\\.\\d+\\.\\d+[a-z]*)\\b', version_string) +if not version_match: + version_match = re.search('(?<=LibreSSL )(\\d+\\.\\d+(\\.\\d+)?)\\b', version_string) +if not version_match: + raise LibraryNotFoundError('Error detecting the version of libcrypto') +version = version_match.group(1) +version_parts = re.sub('(\\d+)([a-z]+)', '\\1.\\2', version).split('.') +version_info = tuple(int(part) if part.isdigit() else part for part in version_parts) + +# LibreSSL is compatible with libcrypto from OpenSSL 1.0.1 +libressl_version = '' +libressl_version_info = tuple() +if is_libressl: + libressl_version = version + libressl_version_info = version_info + version = '1.0.1' + version_info = (1, 0, 1) + +ffi = FFI() + +libcrypto = ffi.dlopen(libcrypto_path) +register_ffi(libcrypto, ffi) + +if version_info < (0, 9, 8): + raise LibraryNotFoundError(pretty_message( + ''' + OpenSSL versions older than 0.9.8 are not supported - found version %s + ''', + version + )) + +if version_info < (1, 1): + ffi.cdef(""" + void ERR_load_crypto_strings(void); + void ERR_free_strings(void); + """) + + +if version_info >= (3, ): + ffi.cdef(""" + typedef ... OSSL_LIB_CTX; + typedef ... OSSL_PROVIDER; + + int OSSL_PROVIDER_available(OSSL_LIB_CTX *libctx, const char *name); + OSSL_PROVIDER *OSSL_PROVIDER_load(OSSL_LIB_CTX *libctx, const char *name); + """) + +# The typedef uintptr_t lines here allow us to check for a NULL pointer, +# without having to redefine the structs in our code. This is kind of a hack, +# but it should cause problems since we treat these as opaque. +ffi.cdef(""" + typedef ... EVP_MD; + typedef uintptr_t EVP_CIPHER_CTX; + typedef ... EVP_CIPHER; + typedef ... ENGINE; + typedef uintptr_t EVP_PKEY; + typedef uintptr_t X509; + typedef uintptr_t DH; + typedef uintptr_t RSA; + typedef uintptr_t DSA; + typedef uintptr_t EC_KEY; + typedef ... EVP_MD_CTX; + typedef ... EVP_PKEY_CTX; + typedef ... BN_GENCB; + typedef ... BIGNUM; + + unsigned long ERR_get_error(void); + char *ERR_error_string(unsigned long e, char *buf); + unsigned long ERR_peek_error(void); + + void OPENSSL_config(const char *config_name); + + EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void); + void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *ctx); + + int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *x, int keylen); + int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *x, int padding); + int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *ctx, int type, int arg, void *ptr); + + const EVP_CIPHER *EVP_aes_128_cbc(void); + const EVP_CIPHER *EVP_aes_192_cbc(void); + const EVP_CIPHER *EVP_aes_256_cbc(void); + const EVP_CIPHER *EVP_des_cbc(void); + const EVP_CIPHER *EVP_des_ede_cbc(void); + const EVP_CIPHER *EVP_des_ede3_cbc(void); + const EVP_CIPHER *EVP_rc4(void); + const EVP_CIPHER *EVP_rc2_cbc(void); + + int EVP_EncryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, + ENGINE *impl, const char *key, + const char *iv); + int EVP_EncryptUpdate(EVP_CIPHER_CTX *ctx, char *out, int *outl, + const char *in, int inl); + int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *ctx, char *out, int *outl); + + int EVP_DecryptInit_ex(EVP_CIPHER_CTX *ctx, const EVP_CIPHER *cipher, + ENGINE *impl, const char *key, + const char *iv); + int EVP_DecryptUpdate(EVP_CIPHER_CTX *ctx, char *out, int *outl, + const char *in, int inl); + int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *ctx, char *out, int *outl); + + EVP_PKEY *d2i_AutoPrivateKey(EVP_PKEY **a, const char **pp, + long length); + EVP_PKEY *d2i_PUBKEY(EVP_PKEY **a, const char **pp, long length); + int i2d_PUBKEY(EVP_PKEY *a, char **pp); + void EVP_PKEY_free(EVP_PKEY *key); + + X509 *d2i_X509(X509 **px, const char **in, int len); + int i2d_X509(X509 *x, char **out); + EVP_PKEY *X509_get_pubkey(X509 *x); + void X509_free(X509 *a); + + RSA *EVP_PKEY_get1_RSA(EVP_PKEY *pkey); + void RSA_free(RSA *r); + + int RSA_public_encrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_private_encrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_public_decrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + int RSA_private_decrypt(int flen, const char *from, + char *to, RSA *rsa, int padding); + + int EVP_DigestUpdate(EVP_MD_CTX *ctx, const void *d, unsigned int cnt); + + const EVP_MD *EVP_md5(void); + const EVP_MD *EVP_sha1(void); + const EVP_MD *EVP_sha224(void); + const EVP_MD *EVP_sha256(void); + const EVP_MD *EVP_sha384(void); + const EVP_MD *EVP_sha512(void); + + int PKCS12_key_gen_uni(char *pass, int passlen, char *salt, + int saltlen, int id, int iter, int n, + char *out, const EVP_MD *md_type); + + void BN_free(BIGNUM *a); + int BN_dec2bn(BIGNUM **a, const char *str); + + DH *DH_new(void); + int DH_generate_parameters_ex(DH *dh, int prime_len, int generator, BN_GENCB *cb); + int i2d_DHparams(const DH *a, char **pp); + void DH_free(DH *dh); + + RSA *RSA_new(void); + int RSA_generate_key_ex(RSA *rsa, int bits, BIGNUM *e, BN_GENCB *cb); + int i2d_RSAPublicKey(RSA *a, char **pp); + int i2d_RSAPrivateKey(RSA *a, char **pp); + + DSA *DSA_new(void); + int DSA_generate_parameters_ex(DSA *dsa, int bits, + const char *seed, int seed_len, int *counter_ret, + unsigned long *h_ret, BN_GENCB *cb); + int DSA_generate_key(DSA *a); + int i2d_DSA_PUBKEY(const DSA *a, char **pp); + int i2d_DSAPrivateKey(const DSA *a, char **pp); + void DSA_free(DSA *dsa); + + EC_KEY *EC_KEY_new_by_curve_name(int nid); + int EC_KEY_generate_key(EC_KEY *key); + void EC_KEY_set_asn1_flag(EC_KEY *, int); + int i2d_ECPrivateKey(EC_KEY *key, char **out); + int i2o_ECPublicKey(EC_KEY *key, char **out); + void EC_KEY_free(EC_KEY *key); +""") + +if version_info < (3, ): + ffi.cdef(""" + int EVP_PKEY_size(EVP_PKEY *pkey); + """) +else: + ffi.cdef(""" + int EVP_PKEY_get_size(EVP_PKEY *pkey); + """) + +if version_info < (1, 1): + ffi.cdef(""" + EVP_MD_CTX *EVP_MD_CTX_create(void); + void EVP_MD_CTX_destroy(EVP_MD_CTX *ctx); + """) +else: + ffi.cdef(""" + EVP_MD_CTX *EVP_MD_CTX_new(void); + void EVP_MD_CTX_free(EVP_MD_CTX *ctx); + """) + +if version_info < (1,): + ffi.cdef(""" + typedef ... *DSA_SIG; + typedef ... *ECDSA_SIG; + + DSA_SIG *DSA_do_sign(const char *dgst, int dlen, DSA *dsa); + ECDSA_SIG *ECDSA_do_sign(const char *dgst, int dgst_len, EC_KEY *eckey); + + DSA_SIG *d2i_DSA_SIG(DSA_SIG **v, const char **pp, long length); + ECDSA_SIG *d2i_ECDSA_SIG(ECDSA_SIG **v, const char **pp, long len); + + int i2d_DSA_SIG(const DSA_SIG *a, char **pp); + int i2d_ECDSA_SIG(const ECDSA_SIG *a, char **pp); + + int DSA_do_verify(const char *dgst, int dgst_len, DSA_SIG *sig, DSA *dsa); + int ECDSA_do_verify(const char *dgst, int dgst_len, const ECDSA_SIG *sig, EC_KEY *eckey); + + void DSA_SIG_free(DSA_SIG *a); + void ECDSA_SIG_free(ECDSA_SIG *a); + + DSA *EVP_PKEY_get1_DSA(EVP_PKEY *pkey); + EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *pkey); + + int RSA_verify_PKCS1_PSS(RSA *rsa, const char *mHash, + const EVP_MD *Hash, const char *EM, + int sLen); + int RSA_padding_add_PKCS1_PSS(RSA *rsa, char *EM, + const char *mHash, const EVP_MD *Hash, + int sLen); + + int EVP_DigestInit_ex(EVP_MD_CTX *ctx, const EVP_MD *type, ENGINE *impl); + int EVP_SignFinal(EVP_MD_CTX *ctx, char *sig, unsigned int *s, EVP_PKEY *pkey); + int EVP_VerifyFinal(EVP_MD_CTX *ctx, char *sigbuf, unsigned int siglen, EVP_PKEY *pkey); + + void EVP_MD_CTX_set_flags(EVP_MD_CTX *ctx, int flags); + """) +else: + ffi.cdef(""" + int PKCS5_PBKDF2_HMAC(const char *pass, int passlen, + const char *salt, int saltlen, int iter, + const EVP_MD *digest, + int keylen, char *out); + + int EVP_DigestSignInit(EVP_MD_CTX *ctx, EVP_PKEY_CTX **pctx, const EVP_MD *type, ENGINE *e, EVP_PKEY *pkey); + int EVP_DigestSignFinal(EVP_MD_CTX *ctx, char *sig, size_t *siglen); + + int EVP_DigestVerifyInit(EVP_MD_CTX *ctx, EVP_PKEY_CTX **pctx, const EVP_MD *type, ENGINE *e, EVP_PKEY *pkey); + int EVP_DigestVerifyFinal(EVP_MD_CTX *ctx, const char *sig, size_t siglen); + + int EVP_PKEY_CTX_ctrl(EVP_PKEY_CTX *ctx, int keytype, int optype, int cmd, int p1, void *p2); + """) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py index f783663..4e1b3cf 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libcrypto_ctypes.py @@ -40,13 +40,13 @@ is_libressl = 'LibreSSL' in version_string -version_match = re.search('\\b(\\d\\.\\d\\.\\d[a-z]*)\\b', version_string) +version_match = re.search('\\b(\\d+\\.\\d+\\.\\d+[a-z]*)\\b', version_string) if not version_match: - version_match = re.search('(?<=LibreSSL )(\\d\\.\\d(\\.\\d)?)\\b', version_string) + version_match = re.search('(?<=LibreSSL )(\\d+\\.\\d+(\\.\\d+)?)\\b', version_string) if not version_match: raise LibraryNotFoundError('Error detecting the version of libcrypto') version = version_match.group(1) -version_parts = re.sub('(\\d)([a-z]+)', '\\1.\\2', version).split('.') +version_parts = re.sub('(\\d+)([a-z]+)', '\\1.\\2', version).split('.') version_info = tuple(int(part) if part.isdigit() else part for part in version_parts) # LibreSSL is compatible with libcrypto from OpenSSL 1.0.1 @@ -73,6 +73,8 @@ P_EVP_MD = c_void_p P_ENGINE = c_void_p +OSSL_PROVIDER = c_void_p +OSSL_LIB_CTX = c_void_p P_EVP_PKEY = c_void_p EVP_PKEY_CTX = c_void_p @@ -97,6 +99,13 @@ libcrypto.ERR_free_strings.argtypes = [] libcrypto.ERR_free_strings.restype = None + if version_info >= (3, ): + libcrypto.OSSL_PROVIDER_available.argtypes = [OSSL_LIB_CTX, c_char_p] + libcrypto.OSSL_PROVIDER_available.restype = c_int + + libcrypto.OSSL_PROVIDER_load.argtypes = [OSSL_LIB_CTX, c_char_p] + libcrypto.OSSL_PROVIDER_load.restype = POINTER(OSSL_PROVIDER) + libcrypto.ERR_get_error.argtypes = [] libcrypto.ERR_get_error.restype = c_ulong @@ -301,10 +310,16 @@ libcrypto.EVP_sha512.argtypes = [] libcrypto.EVP_sha512.restype = P_EVP_MD - libcrypto.EVP_PKEY_size.argtypes = [ - P_EVP_PKEY - ] - libcrypto.EVP_PKEY_size.restype = c_int + if version_info < (3, 0): + libcrypto.EVP_PKEY_size.argtypes = [ + P_EVP_PKEY + ] + libcrypto.EVP_PKEY_size.restype = c_int + else: + libcrypto.EVP_PKEY_get_size.argtypes = [ + P_EVP_PKEY + ] + libcrypto.EVP_PKEY_get_size.restype = c_int libcrypto.EVP_PKEY_get1_RSA.argtypes = [ P_EVP_PKEY diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py b/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py index 2fa2bce..7717650 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libssl.py @@ -1,7 +1,10 @@ # coding: utf-8 from __future__ import unicode_literals, division, absolute_import, print_function -from .. import ffi +import re +import sys + +from .. import ffi, _backend_config # Initialize OpenSSL from ._libcrypto import libcrypto_version_info @@ -15,6 +18,7 @@ __all__ = [ 'libssl', 'LibsslConst', + 'error_code_version_info', ] @@ -87,3 +91,17 @@ class LibsslConst(): if libcrypto_version_info >= (1, 1, 0): LibsslConst.SSL_R_DH_KEY_TOO_SMALL = 394 + + +error_code_version_info = libcrypto_version_info +# The Apple version of libssl seems to have changed various codes for +# some reason, but the rest of the API is still OpenSSL 1.0.1 +if sys.platform == 'darwin': + libssl_abi_match = re.match(r'/usr/lib/libssl\.(\d+)', _backend_config().get('libssl_path', '')) + if libssl_abi_match and int(libssl_abi_match.group(1)) >= 44: + LibsslConst.SSL_F_TLS_PROCESS_SERVER_CERTIFICATE = 7 + LibsslConst.SSL_F_SSL3_GET_KEY_EXCHANGE = 9 + LibsslConst.SSL_F_SSL3_READ_BYTES = 4 + LibsslConst.SSL_F_SSL3_GET_RECORD = 4 + LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO = 4 + error_code_version_info = (1, 1, 0) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py b/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py new file mode 100644 index 0000000..611f50c --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_openssl/_libssl_cffi.py @@ -0,0 +1,99 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .. import _backend_config +from .._ffi import get_library, register_ffi +from ..errors import LibraryNotFoundError +from ._libcrypto import libcrypto_version_info + +from cffi import FFI + + +__all__ = [ + 'libssl', +] + + +ffi = FFI() + +libssl_path = _backend_config().get('libssl_path') +if libssl_path is None: + libssl_path = get_library('ssl', 'libssl', '44') +if not libssl_path: + raise LibraryNotFoundError('The library libssl could not be found') + +libssl = ffi.dlopen(libssl_path) +register_ffi(libssl, ffi) + +ffi.cdef(""" + typedef ... SSL_METHOD; + typedef uintptr_t SSL_CTX; + typedef ... SSL_SESSION; + typedef uintptr_t SSL; + typedef ... BIO_METHOD; + typedef uintptr_t BIO; + typedef uintptr_t X509; + typedef ... X509_STORE; + typedef ... X509_STORE_CTX; + typedef uintptr_t _STACK; + + BIO_METHOD *BIO_s_mem(void); + BIO *BIO_new(BIO_METHOD *type); + int BIO_free(BIO *a); + int BIO_read(BIO *b, void *buf, int len); + int BIO_write(BIO *b, const void *buf, int len); + size_t BIO_ctrl_pending(BIO *b); + + SSL_CTX *SSL_CTX_new(const SSL_METHOD *method); + long SSL_CTX_set_timeout(SSL_CTX *ctx, long t); + void SSL_CTX_set_verify(SSL_CTX *ctx, int mode, + int (*verify_callback)(int, X509_STORE_CTX *)); + int SSL_CTX_set_default_verify_paths(SSL_CTX *ctx); + int SSL_CTX_load_verify_locations(SSL_CTX *ctx, const char *CAfile, + const char *CApath); + long SSL_get_verify_result(const SSL *ssl); + X509_STORE *SSL_CTX_get_cert_store(const SSL_CTX *ctx); + int X509_STORE_add_cert(X509_STORE *ctx, X509 *x); + int SSL_CTX_set_cipher_list(SSL_CTX *ctx, const char *str); + long SSL_CTX_ctrl(SSL_CTX *ctx, int cmd, long larg, void *parg); + void SSL_CTX_free(SSL_CTX *a); + + SSL *SSL_new(SSL_CTX *ctx); + void SSL_free(SSL *ssl); + void SSL_set_bio(SSL *ssl, BIO *rbio, BIO *wbio); + long SSL_ctrl(SSL *ssl, int cmd, long larg, void *parg); + _STACK *SSL_get_peer_cert_chain(const SSL *s); + + SSL_SESSION *SSL_get1_session(const SSL *ssl); + int SSL_set_session(SSL *ssl, SSL_SESSION *session); + void SSL_SESSION_free(SSL_SESSION *session); + + void SSL_set_connect_state(SSL *ssl); + int SSL_do_handshake(SSL *ssl); + int SSL_get_error(const SSL *ssl, int ret); + const char *SSL_get_version(const SSL *ssl); + + int SSL_read(SSL *ssl, void *buf, int num); + int SSL_write(SSL *ssl, const void *buf, int num); + int SSL_pending(const SSL *ssl); + + int SSL_shutdown(SSL *ssl); +""") + +if libcrypto_version_info < (1, 1): + ffi.cdef(""" + int sk_num(const _STACK *); + X509 *sk_value(const _STACK *, int); + + int SSL_library_init(void); + void OPENSSL_add_all_algorithms_noconf(void); + + SSL_METHOD *SSLv23_method(void); + """) +else: + ffi.cdef(""" + int OPENSSL_sk_num(const _STACK *); + X509 *OPENSSL_sk_value(const _STACK *, int); + + SSL_METHOD *TLS_method(void); + """) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py b/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py index 880bf4d..a823bca 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/asymmetric.py @@ -32,6 +32,7 @@ new, null, unwrap, + write_to_buffer, ) from ._libcrypto import libcrypto, LibcryptoConst, libcrypto_version_info, handle_openssl_error from ..errors import AsymmetricKeyError, IncompleteAsymmetricKeyError, SignatureError @@ -105,6 +106,16 @@ def public_key(self): pubkey_data = bytes_from_buffer(pubkey_buffer, pubkey_length) asn1 = PublicKeyInfo.load(pubkey_data) + + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and asn1.algorithm == 'rsassa_pss': + temp_asn1 = asn1.copy() + temp_asn1['algorithm']['algorithm'] = 'rsa' + temp_data = temp_asn1.dump() + write_to_buffer(pubkey_buffer, temp_data) + pubkey_length = len(temp_data) + pub_evp_pkey = libcrypto.d2i_PUBKEY(null(), buffer_pointer(pubkey_buffer), pubkey_length) if is_null(pub_evp_pkey): handle_openssl_error(0) @@ -212,8 +223,13 @@ def public_key(self): """ if not self._public_key and self.x509: - evp_pkey = libcrypto.X509_get_pubkey(self.x509) - self._public_key = PublicKey(evp_pkey, self.asn1['tbs_certificate']['subject_public_key_info']) + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and self.asn1.public_key.algorithm == 'rsassa_pss': + self._public_key = load_public_key(self.asn1.public_key) + else: + evp_pkey = libcrypto.X509_get_pubkey(self.x509) + self._public_key = PublicKey(evp_pkey, self.asn1.public_key) return self._public_key @@ -233,6 +249,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -692,7 +710,7 @@ def load_public_key(source): source must be a byte string, unicode string or asn1crypto.keys.PublicKeyInfo object, not %s ''', - type_name(public_key) + type_name(source) )) if public_key.algorithm == 'dsa': @@ -712,7 +730,15 @@ def load_public_key(source): ''' )) - data = public_key.dump() + # OpenSSL 1.x suffers from issues trying to use RSASSA-PSS keys, so we + # masquerade it as a normal RSA key so the OID checks work + if libcrypto_version_info < (3,) and public_key.algorithm == 'rsassa_pss': + temp_key = public_key.copy() + temp_key['algorithm']['algorithm'] = 'rsa' + data = temp_key.dump() + else: + data = public_key.dump() + buffer = buffer_from_bytes(data) evp_pkey = libcrypto.d2i_PUBKEY(null(), buffer_pointer(buffer), len(data)) if is_null(evp_pkey): @@ -928,6 +954,22 @@ def rsa_oaep_decrypt(private_key, ciphertext): return _decrypt(private_key, ciphertext, LibcryptoConst.RSA_PKCS1_OAEP_PADDING) +def _evp_pkey_get_size(evp_pkey): + """ + Handles the function name change from OpenSSL 1.1 -> 3.0 + + :param evp_pkey: + The EVP_PKEY of the Certificte or PublicKey to get the size of + + :return: + An int of the number of bytes necessary for the key + """ + + if libcrypto_version_info < (3, ): + return libcrypto.EVP_PKEY_size(evp_pkey) + return libcrypto.EVP_PKEY_get_size(evp_pkey) + + def _encrypt(certificate_or_public_key, data, padding): """ Encrypts plaintext using an RSA public key or certificate @@ -970,7 +1012,7 @@ def _encrypt(certificate_or_public_key, data, padding): rsa = None try: - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) buffer = buffer_from_bytes(buffer_size) rsa = libcrypto.EVP_PKEY_get1_RSA(certificate_or_public_key.evp_pkey) @@ -1025,7 +1067,7 @@ def _decrypt(private_key, ciphertext, padding): rsa = None try: - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) buffer = buffer_from_bytes(buffer_size) rsa = libcrypto.EVP_PKEY_get1_RSA(private_key.evp_pkey) @@ -1105,7 +1147,9 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): OSError - when an error is returned by the OS crypto library """ - if certificate_or_public_key.algorithm != 'rsa': + cp_alg = certificate_or_public_key.algorithm + + if cp_alg != 'rsa' and cp_alg != 'rsassa_pss': raise ValueError(pretty_message( ''' The key specified is not an RSA public key, but %s @@ -1235,13 +1279,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ type_name(data) )) + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']) - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha224", "sha256", "sha384", "sha512"' - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -1251,16 +1298,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ repr(hash_algorithm) )) - if certificate_or_public_key.algorithm != 'rsa' and rsa_pss_padding: + if not cp_is_rsa and rsa_pss_padding: raise ValueError(pretty_message( ''' PSS padding can only be used with RSA keys - the key provided is a %s key ''', - certificate_or_public_key.algorithm.upper() + cp_alg.upper() )) - if certificate_or_public_key.algorithm == 'rsa' and hash_algorithm == 'raw': + if cp_is_rsa and hash_algorithm == 'raw': if len(data) > certificate_or_public_key.byte_size - 11: raise ValueError(pretty_message( ''' @@ -1279,7 +1326,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) decrypted_buffer = buffer_from_bytes(buffer_size) decrypted_length = libcrypto.RSA_public_decrypt( len(signature), @@ -1323,14 +1370,14 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ }[hash_algorithm]() if libcrypto_version_info < (1,): - if certificate_or_public_key.algorithm == 'rsa' and rsa_pss_padding: + if cp_is_rsa and rsa_pss_padding: digest = getattr(hashlib, hash_algorithm)(data).digest() rsa = libcrypto.EVP_PKEY_get1_RSA(certificate_or_public_key.evp_pkey) if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(certificate_or_public_key.evp_pkey) + buffer_size = _evp_pkey_get_size(certificate_or_public_key.evp_pkey) decoded_buffer = buffer_from_bytes(buffer_size) decoded_length = libcrypto.RSA_public_decrypt( len(signature), @@ -1349,7 +1396,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ LibcryptoConst.EVP_MD_CTX_FLAG_PSS_MDLEN ) - elif certificate_or_public_key.algorithm == 'rsa': + elif cp_is_rsa: res = libcrypto.EVP_DigestInit_ex(evp_md_ctx, evp_md, null()) handle_openssl_error(res) @@ -1363,7 +1410,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ certificate_or_public_key.evp_pkey ) - elif certificate_or_public_key.algorithm == 'dsa': + elif cp_alg == 'dsa': digest = getattr(hashlib, hash_algorithm)(data).digest() signature_buffer = buffer_from_bytes(signature) @@ -1378,7 +1425,7 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ res = libcrypto.DSA_do_verify(digest, len(digest), dsa_sig, dsa) - elif certificate_or_public_key.algorithm == 'ec': + elif cp_alg == 'ec': digest = getattr(hashlib, hash_algorithm)(data).digest() signature_buffer = buffer_from_bytes(signature) @@ -1418,15 +1465,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ handle_openssl_error(res) # Use the hash algorithm output length as the salt length - res = libcrypto.EVP_PKEY_CTX_ctrl( - evp_pkey_ctx_pointer, - LibcryptoConst.EVP_PKEY_RSA, - LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, - LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, - -1, - null() - ) - handle_openssl_error(res) + if libcrypto_version_info < (3, 0): + res = libcrypto.EVP_PKEY_CTX_ctrl( + evp_pkey_ctx_pointer, + LibcryptoConst.EVP_PKEY_RSA, + LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, + LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, + -1, + null() + ) + handle_openssl_error(res) res = libcrypto.EVP_DigestUpdate(evp_md_ctx, data, len(data)) handle_openssl_error(res) @@ -1519,12 +1567,14 @@ def rsa_pss_sign(private_key, data, hash_algorithm): A byte string of the signature """ - if private_key.algorithm != 'rsa': + pkey_alg = private_key.algorithm + + if pkey_alg != 'rsa' and pkey_alg != 'rsassa_pss': raise ValueError(pretty_message( ''' The key specified is not an RSA private key, but %s ''', - private_key.algorithm.upper() + pkey_alg.upper() )) return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) @@ -1637,13 +1687,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): type_name(data) )) + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512']) - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_alg == 'rsa' and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha224", "sha256", "sha384", "sha512"' - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -1653,16 +1706,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): repr(hash_algorithm) )) - if private_key.algorithm != 'rsa' and rsa_pss_padding: + if not pkey_is_rsa and rsa_pss_padding: raise ValueError(pretty_message( ''' PSS padding can only be used with RSA keys - the key provided is a %s key ''', - private_key.algorithm.upper() + pkey_alg.upper() )) - if private_key.algorithm == 'rsa' and hash_algorithm == 'raw': + if pkey_is_rsa and hash_algorithm == 'raw': if len(data) > private_key.byte_size - 11: raise ValueError(pretty_message( ''' @@ -1681,7 +1734,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = libcrypto.RSA_private_encrypt( @@ -1722,14 +1775,14 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): }[hash_algorithm]() if libcrypto_version_info < (1,): - if private_key.algorithm == 'rsa' and rsa_pss_padding: + if pkey_is_rsa and rsa_pss_padding: digest = getattr(hashlib, hash_algorithm)(data).digest() rsa = libcrypto.EVP_PKEY_get1_RSA(private_key.evp_pkey) if is_null(rsa): handle_openssl_error(0) - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) em_buffer = buffer_from_bytes(buffer_size) res = libcrypto.RSA_padding_add_PKCS1_PSS( rsa, @@ -1750,8 +1803,8 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): ) handle_openssl_error(signature_length) - elif private_key.algorithm == 'rsa': - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + elif pkey_is_rsa: + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = new(libcrypto, 'unsigned int *') @@ -1771,7 +1824,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): signature_length = deref(signature_length) - elif private_key.algorithm == 'dsa': + elif pkey_alg == 'dsa': digest = getattr(hashlib, hash_algorithm)(data).digest() dsa = libcrypto.EVP_PKEY_get1_DSA(private_key.evp_pkey) @@ -1788,7 +1841,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): signature_length = libcrypto.i2d_DSA_SIG(dsa_sig, signature_pointer) handle_openssl_error(signature_length) - elif private_key.algorithm == 'ec': + elif pkey_alg == 'ec': digest = getattr(hashlib, hash_algorithm)(data).digest() ec_key = libcrypto.EVP_PKEY_get1_EC_KEY(private_key.evp_pkey) @@ -1806,7 +1859,7 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_openssl_error(signature_length) else: - buffer_size = libcrypto.EVP_PKEY_size(private_key.evp_pkey) + buffer_size = _evp_pkey_get_size(private_key.evp_pkey) signature_buffer = buffer_from_bytes(buffer_size) signature_length = new(libcrypto, 'size_t *', buffer_size) @@ -1834,15 +1887,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_openssl_error(res) # Use the hash algorithm output length as the salt length - res = libcrypto.EVP_PKEY_CTX_ctrl( - evp_pkey_ctx_pointer, - LibcryptoConst.EVP_PKEY_RSA, - LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, - LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, - -1, - null() - ) - handle_openssl_error(res) + if libcrypto_version_info < (3, 0): + res = libcrypto.EVP_PKEY_CTX_ctrl( + evp_pkey_ctx_pointer, + LibcryptoConst.EVP_PKEY_RSA, + LibcryptoConst.EVP_PKEY_OP_SIGN | LibcryptoConst.EVP_PKEY_OP_VERIFY, + LibcryptoConst.EVP_PKEY_CTRL_RSA_PSS_SALTLEN, + -1, + null() + ) + handle_openssl_error(res) res = libcrypto.EVP_DigestUpdate(evp_md_ctx, data, len(data)) handle_openssl_error(res) diff --git a/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py b/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py index f41bca5..d390f89 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/symmetric.py @@ -5,7 +5,7 @@ from .._errors import pretty_message from .._ffi import new, null, is_null, buffer_from_bytes, bytes_from_buffer, deref -from ._libcrypto import libcrypto, LibcryptoConst, handle_openssl_error +from ._libcrypto import libcrypto, libcrypto_legacy_support, LibcryptoConst, handle_openssl_error from ..util import rand_bytes from .._types import type_name, byte_cls @@ -236,6 +236,9 @@ def rc4_encrypt(key, data): A byte string of the ciphertext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC4 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -266,6 +269,9 @@ def rc4_decrypt(key, data): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC4 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -301,6 +307,9 @@ def rc2_cbc_pkcs5_encrypt(key, data, iv): A tuple of two byte strings (iv, ciphertext) """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC2 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -345,6 +354,9 @@ def rc2_cbc_pkcs5_decrypt(key, data, iv): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without RC2 support') + if len(key) < 5 or len(key) > 16: raise ValueError(pretty_message( ''' @@ -487,6 +499,9 @@ def des_cbc_pkcs5_encrypt(key, data, iv): A tuple of two byte strings (iv, ciphertext) """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without DES support') + if len(key) != 8: raise ValueError(pretty_message( ''' @@ -530,6 +545,9 @@ def des_cbc_pkcs5_decrypt(key, data, iv): A byte string of the plaintext """ + if not libcrypto_legacy_support: + raise EnvironmentError('OpenSSL has been compiled without DES support') + if len(key) != 8: raise ValueError(pretty_message( ''' @@ -604,23 +622,9 @@ def _encrypt(cipher, key, data, iv, padding): if cipher != 'rc4' and not padding: # AES in CBC mode can be allowed with no padding if - # the data is an exact multiple of the key size - aes128_no_padding = ( - cipher == 'aes128' and - padding is False and - len(data) % 16 == 0 - ) - aes192_no_padding = ( - cipher == 'aes192' and - padding is False and - len(data) % 24 == 0 - ) - aes256_no_padding = ( - cipher == 'aes256' and - padding is False and - len(data) % 32 == 0 - ) - if aes128_no_padding is False and aes192_no_padding is False and aes256_no_padding is False: + # the data is an exact multiple of the block size + is_aes = cipher in set(['aes128', 'aes192', 'aes256']) + if not is_aes or (is_aes and (len(data) % 16) != 0): raise ValueError('padding must be specified') evp_cipher_ctx = None @@ -730,7 +734,7 @@ def _decrypt(cipher, key, data, iv, padding): type_name(iv) )) - if cipher != 'rc4' and padding is None: + if cipher not in set(['rc4', 'aes128', 'aes192', 'aes256']) and not padding: raise ValueError('padding must be specified') evp_cipher_ctx = None diff --git a/app/lib/package_control/deps/oscrypto/_openssl/tls.py b/app/lib/package_control/deps/oscrypto/_openssl/tls.py index 8d64580..a4a4570 100644 --- a/app/lib/package_control/deps/oscrypto/_openssl/tls.py +++ b/app/lib/package_control/deps/oscrypto/_openssl/tls.py @@ -7,7 +7,7 @@ import select import numbers -from ._libssl import libssl, LibsslConst +from ._libssl import error_code_version_info, libssl, LibsslConst from ._libcrypto import libcrypto, libcrypto_version_info, handle_openssl_error, peek_openssl_error from .. import _backend_config from .._asn1 import Certificate as Asn1Certificate @@ -65,6 +65,25 @@ } +def _homogenize_openssl3_error(error_tuple): + """ + Takes a 3-element tuple from peek_openssl_error() and modifies it + to handle the changes in OpenSSL 3.0. That release removed the + concept of an error function, meaning the second item in the tuple + will always be 0. + + :param error_tuple: + A 3-element tuple of integers + + :return: + A 3-element tuple of integers + """ + + if libcrypto_version_info < (3,): + return error_tuple + return (error_tuple[0], 0, error_tuple[2]) + + class TLSSession(object): """ A TLS session object that multiple TLSSocket objects can share for the @@ -372,7 +391,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to @@ -516,20 +535,26 @@ def _handshake(self): LibsslConst.SSL_F_SSL3_CHECK_CERT_AND_ALGORITHM, LibsslConst.SSL_R_DH_KEY_TOO_SMALL ) + dh_key_info_1 = _homogenize_openssl3_error(dh_key_info_1) + dh_key_info_2 = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_TLS_PROCESS_SKE_DHE, LibsslConst.SSL_R_DH_KEY_TOO_SMALL ) + dh_key_info_2 = _homogenize_openssl3_error(dh_key_info_2) + dh_key_info_3 = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_SSL3_GET_KEY_EXCHANGE, LibsslConst.SSL_R_BAD_DH_P_LENGTH ) + dh_key_info_3 = _homogenize_openssl3_error(dh_key_info_3) + if info == dh_key_info_1 or info == dh_key_info_2 or info == dh_key_info_3: raise_dh_params() - if libcrypto_version_info < (1, 1): + if error_code_version_info < (1, 1): unknown_protocol_info = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO, @@ -541,6 +566,8 @@ def _handshake(self): LibsslConst.SSL_F_SSL3_GET_RECORD, LibsslConst.SSL_R_WRONG_VERSION_NUMBER ) + unknown_protocol_info = _homogenize_openssl3_error(unknown_protocol_info) + if info == unknown_protocol_info: raise_protocol_error(handshake_server_bytes) @@ -549,23 +576,20 @@ def _handshake(self): LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO, LibsslConst.SSL_R_TLSV1_ALERT_PROTOCOL_VERSION ) + tls_version_info_error = _homogenize_openssl3_error(tls_version_info_error) if info == tls_version_info_error: raise_protocol_version() + # There are multiple functions that can result in a handshake failure, + # but our custom handshake parsing code figures out what really happened, + # and what is more, OpenSSL 3 got rid of function codes. Because of this, + # we skip checking the function code. handshake_error_info = ( LibsslConst.ERR_LIB_SSL, - LibsslConst.SSL_F_SSL23_GET_SERVER_HELLO, LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE ) - if info == handshake_error_info: - raise_handshake() - handshake_failure_info = ( - LibsslConst.ERR_LIB_SSL, - LibsslConst.SSL_F_SSL3_READ_BYTES, - LibsslConst.SSL_R_SSLV3_ALERT_HANDSHAKE_FAILURE - ) - if info == handshake_failure_info: + if (info[0], info[2]) == handshake_error_info: saw_client_auth = False for record_type, _, record_data in parse_tls_records(handshake_server_bytes): if record_type != b'\x16': @@ -578,7 +602,7 @@ def _handshake(self): raise_client_auth() raise_handshake() - if libcrypto_version_info < (1, 1): + if error_code_version_info < (1, 1): cert_verify_failed_info = ( LibsslConst.ERR_LIB_SSL, LibsslConst.SSL_F_SSL3_GET_SERVER_CERTIFICATE, @@ -590,6 +614,7 @@ def _handshake(self): LibsslConst.SSL_F_TLS_PROCESS_SERVER_CERTIFICATE, LibsslConst.SSL_R_CERTIFICATE_VERIFY_FAILED ) + cert_verify_failed_info = _homogenize_openssl3_error(cert_verify_failed_info) # It would appear that some versions of OpenSSL (such as on Fedora 30) # don't even have the MD5 digest algorithm included any longer? To @@ -599,6 +624,7 @@ def _handshake(self): LibsslConst.ASN1_F_ASN1_ITEM_VERIFY, LibsslConst.ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM ) + unknown_hash_algo_info = _homogenize_openssl3_error(unknown_hash_algo_info) if info == unknown_hash_algo_info: chain = extract_chain(handshake_server_bytes) @@ -747,7 +773,7 @@ def _raw_write(self): sent = self._socket.send(to_write) except (socket_.error) as e: # Handle ECONNRESET and EPIPE - if e.errno == 104 or e.errno == 32: + if e.errno == 104 or e.errno == 54 or e.errno == 32: raise_disconnect = True # Handle EPROTOTYPE. Newer versions of macOS will return this # if we try to call send() while the socket is being torn down diff --git a/app/lib/package_control/deps/oscrypto/_pkcs1.py b/app/lib/package_control/deps/oscrypto/_pkcs1.py index 2044b84..66f5ed3 100644 --- a/app/lib/package_control/deps/oscrypto/_pkcs1.py +++ b/app/lib/package_control/deps/oscrypto/_pkcs1.py @@ -651,7 +651,7 @@ def raw_rsa_private_crypt(private_key, data): )) algo = private_key.asn1['private_key_algorithm']['algorithm'].native - if algo != 'rsa': + if algo != 'rsa' and algo != 'rsassa_pss': raise ValueError(pretty_message( ''' private_key must be an RSA key, not %s @@ -712,7 +712,7 @@ def raw_rsa_public_crypt(certificate_or_public_key, data): )) algo = certificate_or_public_key.asn1['algorithm']['algorithm'].native - if algo != 'rsa': + if algo != 'rsa' and algo != 'rsassa_pss': raise ValueError(pretty_message( ''' certificate_or_public_key must be an RSA key, not %s diff --git a/app/lib/package_control/deps/oscrypto/_pkcs12.py b/app/lib/package_control/deps/oscrypto/_pkcs12.py index b8f584c..b788178 100644 --- a/app/lib/package_control/deps/oscrypto/_pkcs12.py +++ b/app/lib/package_control/deps/oscrypto/_pkcs12.py @@ -190,7 +190,7 @@ def pkcs12_kdf(hash_algorithm, password, salt, iterations, key_length, id_): i = i[0:start] + i_num2 + i[end:] - # Step 7 (one peice at a time) + # Step 7 (one piece at a time) begin = (num - 1) * u to_copy = min(key_length, u) a = a[0:begin] + a2[0:to_copy] + a[begin + to_copy:] diff --git a/app/lib/package_control/deps/oscrypto/_tls.py b/app/lib/package_control/deps/oscrypto/_tls.py index 181d82b..260e9cf 100644 --- a/app/lib/package_control/deps/oscrypto/_tls.py +++ b/app/lib/package_control/deps/oscrypto/_tls.py @@ -465,6 +465,22 @@ def raise_self_signed(certificate): raise TLSVerificationError(message, certificate) +def raise_lifetime_too_long(certificate): + """ + Raises a TLSVerificationError due to a certificate lifetime exceeding + the CAB forum certificate lifetime limit + + :param certificate: + An asn1crypto.x509.Certificate object + + :raises: + TLSVerificationError + """ + + message = 'Server certificate verification failed - certificate lifetime is too long' + raise TLSVerificationError(message, certificate) + + def raise_expired_not_yet_valid(certificate): """ Raises a TLSVerificationError due to certificate being expired, or not yet diff --git a/app/lib/package_control/deps/oscrypto/_win/_advapi32.py b/app/lib/package_control/deps/oscrypto/_win/_advapi32.py index 5066e11..f2250cb 100644 --- a/app/lib/package_control/deps/oscrypto/_win/_advapi32.py +++ b/app/lib/package_control/deps/oscrypto/_win/_advapi32.py @@ -34,7 +34,7 @@ def open_context_handle(provider, verify_only=True): else: raise ValueError('Invalid provider specified: %s' % provider) - # Ths DSS provider needs a container to allow importing and exporting + # The DSS provider needs a container to allow importing and exporting # private keys, but all of the RSA stuff works fine with CRYPT_VERIFYCONTEXT if verify_only or provider != Advapi32Const.MS_ENH_DSS_DH_PROV: container_name = null() diff --git a/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py new file mode 100644 index 0000000..49932ff --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_advapi32_cffi.py @@ -0,0 +1,145 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'advapi32', + 'get_error', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +ffi.cdef(""" + typedef HANDLE HCRYPTPROV; + typedef HANDLE HCRYPTKEY; + typedef HANDLE HCRYPTHASH; + typedef unsigned int ALG_ID; + + typedef struct _CRYPTOAPI_BLOB { + DWORD cbData; + BYTE *pbData; + } CRYPT_INTEGER_BLOB, CRYPT_OBJID_BLOB, CRYPT_DER_BLOB, CRYPT_ATTR_BLOB; + + typedef struct _CRYPT_ALGORITHM_IDENTIFIER { + LPSTR pszObjId; + CRYPT_OBJID_BLOB Parameters; + } CRYPT_ALGORITHM_IDENTIFIER; + + typedef struct _CRYPT_BIT_BLOB { + DWORD cbData; + BYTE *pbData; + DWORD cUnusedBits; + } CRYPT_BIT_BLOB; + + typedef struct _CERT_PUBLIC_KEY_INFO { + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_BIT_BLOB PublicKey; + } CERT_PUBLIC_KEY_INFO; + + typedef struct _CRYPT_ATTRIBUTE { + LPSTR pszObjId; + DWORD cValue; + CRYPT_ATTR_BLOB *rgValue; + } CRYPT_ATTRIBUTE; + + typedef struct _CRYPT_ATTRIBUTES { + DWORD cAttr; + CRYPT_ATTRIBUTE *rgAttr; + } CRYPT_ATTRIBUTES; + + typedef struct _CRYPT_PRIVATE_KEY_INFO { + DWORD Version; + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_DER_BLOB PrivateKey; + CRYPT_ATTRIBUTES *pAttributes; + } CRYPT_PRIVATE_KEY_INFO; + + typedef struct _PUBLICKEYSTRUC { + BYTE bType; + BYTE bVersion; + WORD reserved; + ALG_ID aiKeyAlg; + } BLOBHEADER, PUBLICKEYSTRUC; + + typedef struct _DSSPUBKEY { + DWORD magic; + DWORD bitlen; + } DSSPUBKEY; + + typedef struct _DSSBLOBHEADER { + PUBLICKEYSTRUC publickeystruc; + DSSPUBKEY dsspubkey; + } DSSBLOBHEADER; + + typedef struct _RSAPUBKEY { + DWORD magic; + DWORD bitlen; + DWORD pubexp; + } RSAPUBKEY; + + typedef struct _RSABLOBHEADER { + PUBLICKEYSTRUC publickeystruc; + RSAPUBKEY rsapubkey; + } RSABLOBHEADER; + + typedef struct _PLAINTEXTKEYBLOB { + BLOBHEADER hdr; + DWORD dwKeySize; + // rgbKeyData omitted since it is a flexible array member + } PLAINTEXTKEYBLOB; + + typedef struct _DSSSEED { + DWORD counter; + BYTE seed[20]; + } DSSSEED; + + BOOL CryptAcquireContextW(HCRYPTPROV *phProv, LPCWSTR pszContainer, LPCWSTR pszProvider, + DWORD dwProvType, DWORD dwFlags); + BOOL CryptReleaseContext(HCRYPTPROV hProv, DWORD dwFlags); + + BOOL CryptImportKey(HCRYPTPROV hProv, BYTE *pbData, DWORD dwDataLen, + HCRYPTKEY hPubKey, DWORD dwFlags, HCRYPTKEY *phKey); + BOOL CryptGenKey(HCRYPTPROV hProv, ALG_ID Algid, DWORD dwFlags, HCRYPTKEY *phKey); + BOOL CryptGetKeyParam(HCRYPTKEY hKey, DWORD dwParam, BYTE *pbData, DWORD *pdwDataLen, DWORD dwFlags); + BOOL CryptSetKeyParam(HCRYPTKEY hKey, DWORD dwParam, void *pbData, DWORD dwFlags); + BOOL CryptExportKey(HCRYPTKEY hKey, HCRYPTKEY hExpKey, DWORD dwBlobType, + DWORD dwFlags, BYTE *pbData, DWORD *pdwDataLen); + BOOL CryptDestroyKey(HCRYPTKEY hKey); + + BOOL CryptCreateHash(HCRYPTPROV hProv, ALG_ID Algid, HCRYPTKEY hKey, + DWORD dwFlags, HCRYPTHASH *phHash); + BOOL CryptHashData(HCRYPTHASH hHash, BYTE *pbData, DWORD dwDataLen, DWORD dwFlags); + BOOL CryptSetHashParam(HCRYPTHASH hHash, DWORD dwParam, BYTE *pbData, DWORD dwFlags); + BOOL CryptSignHashW(HCRYPTHASH hHash, DWORD dwKeySpec, LPCWSTR sDescription, + DWORD dwFlags, BYTE *pbSignature, DWORD *pdwSigLen); + BOOL CryptVerifySignatureW(HCRYPTHASH hHash, BYTE *pbSignature, DWORD dwSigLen, + HCRYPTKEY hPubKey, LPCWSTR sDescription, DWORD dwFlags); + BOOL CryptDestroyHash(HCRYPTHASH hHash); + + BOOL CryptEncrypt(HCRYPTKEY hKey, HCRYPTHASH hHash, BOOL Final, DWORD dwFlags, + BYTE *pbData, DWORD *pdwDataLen, DWORD dwBufLen); + BOOL CryptDecrypt(HCRYPTKEY hKey, HCRYPTHASH hHash, BOOL Final, DWORD dwFlags, + BYTE *pbData, DWORD *pdwDataLen); +""") + + +try: + advapi32 = ffi.dlopen('advapi32.dll') + register_ffi(advapi32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('advapi32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py new file mode 100644 index 0000000..3e9d5ce --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_cng_cffi.py @@ -0,0 +1,120 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +from cffi import FFI + + +__all__ = [ + 'bcrypt', +] + + +ffi = FFI() +ffi.cdef(""" + typedef HANDLE BCRYPT_ALG_HANDLE; + typedef HANDLE BCRYPT_KEY_HANDLE; + typedef ULONG NTSTATUS; + typedef unsigned char *PUCHAR; + typedef unsigned char *PBYTE; + + + typedef struct _BCRYPT_RSAKEY_BLOB { + ULONG Magic; + ULONG BitLength; + ULONG cbPublicExp; + ULONG cbModulus; + ULONG cbPrime1; + ULONG cbPrime2; + } BCRYPT_RSAKEY_BLOB; + + typedef struct _BCRYPT_DSA_KEY_BLOB { + ULONG dwMagic; + ULONG cbKey; + UCHAR Count[4]; + UCHAR Seed[20]; + UCHAR q[20]; + } BCRYPT_DSA_KEY_BLOB; + + typedef struct _BCRYPT_DSA_KEY_BLOB_V2 { + ULONG dwMagic; + ULONG cbKey; + INT hashAlgorithm; + INT standardVersion; + ULONG cbSeedLength; + ULONG cbGroupSize; + UCHAR Count[4]; + } BCRYPT_DSA_KEY_BLOB_V2; + + typedef struct _BCRYPT_ECCKEY_BLOB { + ULONG dwMagic; + ULONG cbKey; + } BCRYPT_ECCKEY_BLOB; + + typedef struct _BCRYPT_PKCS1_PADDING_INFO { + LPCWSTR pszAlgId; + } BCRYPT_PKCS1_PADDING_INFO; + + typedef struct _BCRYPT_PSS_PADDING_INFO { + LPCWSTR pszAlgId; + ULONG cbSalt; + } BCRYPT_PSS_PADDING_INFO; + + typedef struct _BCRYPT_OAEP_PADDING_INFO { + LPCWSTR pszAlgId; + PUCHAR pbLabel; + ULONG cbLabel; + } BCRYPT_OAEP_PADDING_INFO; + + typedef struct _BCRYPT_KEY_DATA_BLOB_HEADER { + ULONG dwMagic; + ULONG dwVersion; + ULONG cbKeyData; + } BCRYPT_KEY_DATA_BLOB_HEADER; + + NTSTATUS BCryptOpenAlgorithmProvider(BCRYPT_ALG_HANDLE *phAlgorithm, LPCWSTR pszAlgId, LPCWSTR pszImplementation, + DWORD dwFlags); + NTSTATUS BCryptCloseAlgorithmProvider(BCRYPT_ALG_HANDLE hAlgorithm, DWORD dwFlags); + NTSTATUS BCryptSetProperty(HANDLE hObject, LPCWSTR pszProperty, ULONG *pbInput, ULONG cbInput, ULONG dwFlags); + + NTSTATUS BCryptImportKeyPair(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE hImportKey, LPCWSTR pszBlobType, + BCRYPT_KEY_HANDLE *phKey, PUCHAR pbInput, ULONG cbInput, ULONG dwFlags); + NTSTATUS BCryptImportKey(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE hImportKey, LPCWSTR pszBlobType, + BCRYPT_KEY_HANDLE *phKey, PUCHAR pbKeyObject, ULONG cbKeyObject, PUCHAR pbInput, ULONG cbInput, + ULONG dwFlags); + NTSTATUS BCryptDestroyKey(BCRYPT_KEY_HANDLE hKey); + + NTSTATUS BCryptVerifySignature(BCRYPT_KEY_HANDLE hKey, void *pPaddingInfo, PUCHAR pbHash, ULONG cbHash, + PUCHAR pbSignature, ULONG cbSignature, ULONG dwFlags); + NTSTATUS BCryptSignHash(BCRYPT_KEY_HANDLE hKey, void * pPaddingInfo, PBYTE pbInput, DWORD cbInput, PBYTE pbOutput, + DWORD cbOutput, DWORD *pcbResult, ULONG dwFlags); + + NTSTATUS BCryptEncrypt(BCRYPT_KEY_HANDLE hKey, PUCHAR pbInput, ULONG cbInput, void *pPaddingInfo, PUCHAR pbIV, + ULONG cbIV, PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); + NTSTATUS BCryptDecrypt(BCRYPT_KEY_HANDLE hKey, PUCHAR pbInput, ULONG cbInput, void *pPaddingInfo, PUCHAR pbIV, + ULONG cbIV, PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); + + NTSTATUS BCryptDeriveKeyPBKDF2(BCRYPT_ALG_HANDLE hPrf, PUCHAR pbPassword, ULONG cbPassword, PUCHAR pbSalt, + ULONG cbSalt, ULONGLONG cIterations, PUCHAR pbDerivedKey, ULONG cbDerivedKey, ULONG dwFlags); + + NTSTATUS BCryptGenRandom(BCRYPT_ALG_HANDLE hAlgorithm, PUCHAR pbBuffer, ULONG cbBuffer, ULONG dwFlags); + + NTSTATUS BCryptGenerateKeyPair(BCRYPT_ALG_HANDLE hAlgorithm, BCRYPT_KEY_HANDLE *phKey, ULONG dwLength, + ULONG dwFlags); + NTSTATUS BCryptFinalizeKeyPair(BCRYPT_KEY_HANDLE hKey, ULONG dwFlags); + NTSTATUS BCryptExportKey(BCRYPT_KEY_HANDLE hKey, BCRYPT_KEY_HANDLE hExportKey, LPCWSTR pszBlobType, + PUCHAR pbOutput, ULONG cbOutput, ULONG *pcbResult, ULONG dwFlags); +""") + + +try: + bcrypt = ffi.dlopen('bcrypt.dll') + register_ffi(bcrypt, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('bcrypt.dll could not be found - Windows XP and Server 2003 are not supported') + raise diff --git a/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py new file mode 100644 index 0000000..3952682 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_crypt32_cffi.py @@ -0,0 +1,188 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'crypt32', + 'get_error', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +if sys.maxsize > 2 ** 32: + ffi.cdef("typedef uint64_t ULONG_PTR;") +else: + ffi.cdef("typedef unsigned long ULONG_PTR;") +ffi.cdef(""" + typedef HANDLE HCERTSTORE; + typedef unsigned char *PBYTE; + + + typedef struct _CRYPTOAPI_BLOB { + DWORD cbData; + PBYTE pbData; + } CRYPTOAPI_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_INTEGER_BLOB; + typedef CRYPTOAPI_BLOB CERT_NAME_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_BIT_BLOB; + typedef CRYPTOAPI_BLOB CRYPT_OBJID_BLOB; + + typedef struct _CRYPT_ALGORITHM_IDENTIFIER { + LPSTR pszObjId; + CRYPT_OBJID_BLOB Parameters; + } CRYPT_ALGORITHM_IDENTIFIER; + + typedef struct _FILETIME { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } FILETIME; + + typedef struct _CERT_PUBLIC_KEY_INFO { + CRYPT_ALGORITHM_IDENTIFIER Algorithm; + CRYPT_BIT_BLOB PublicKey; + } CERT_PUBLIC_KEY_INFO; + + typedef struct _CERT_EXTENSION { + LPSTR pszObjId; + BOOL fCritical; + CRYPT_OBJID_BLOB Value; + } CERT_EXTENSION, *PCERT_EXTENSION; + + typedef struct _CERT_INFO { + DWORD dwVersion; + CRYPT_INTEGER_BLOB SerialNumber; + CRYPT_ALGORITHM_IDENTIFIER SignatureAlgorithm; + CERT_NAME_BLOB Issuer; + FILETIME NotBefore; + FILETIME NotAfter; + CERT_NAME_BLOB Subject; + CERT_PUBLIC_KEY_INFO SubjectPublicKeyInfo; + CRYPT_BIT_BLOB IssuerUniqueId; + CRYPT_BIT_BLOB SubjectUniqueId; + DWORD cExtension; + PCERT_EXTENSION *rgExtension; + } CERT_INFO, *PCERT_INFO; + + typedef struct _CERT_CONTEXT { + DWORD dwCertEncodingType; + PBYTE pbCertEncoded; + DWORD cbCertEncoded; + PCERT_INFO pCertInfo; + HCERTSTORE hCertStore; + } CERT_CONTEXT, *PCERT_CONTEXT; + + typedef struct _CERT_TRUST_STATUS { + DWORD dwErrorStatus; + DWORD dwInfoStatus; + } CERT_TRUST_STATUS, *PCERT_TRUST_STATUS; + + typedef struct _CERT_ENHKEY_USAGE { + DWORD cUsageIdentifier; + LPSTR *rgpszUsageIdentifier; + } CERT_ENHKEY_USAGE, *PCERT_ENHKEY_USAGE; + + typedef struct _CERT_CHAIN_ELEMENT { + DWORD cbSize; + PCERT_CONTEXT pCertContext; + CERT_TRUST_STATUS TrustStatus; + void *pRevocationInfo; + PCERT_ENHKEY_USAGE pIssuanceUsage; + PCERT_ENHKEY_USAGE pApplicationUsage; + LPCWSTR pwszExtendedErrorInfo; + } CERT_CHAIN_ELEMENT, *PCERT_CHAIN_ELEMENT; + + typedef struct _CERT_SIMPLE_CHAIN { + DWORD cbSize; + CERT_TRUST_STATUS TrustStatus; + DWORD cElement; + PCERT_CHAIN_ELEMENT *rgpElement; + void *pTrustListInfo; + BOOL fHasRevocationFreshnessTime; + DWORD dwRevocationFreshnessTime; + } CERT_SIMPLE_CHAIN, *PCERT_SIMPLE_CHAIN; + + typedef struct _CERT_CHAIN_CONTEXT { + DWORD cbSize; + CERT_TRUST_STATUS TrustStatus; + DWORD cChain; + PCERT_SIMPLE_CHAIN *rgpChain; + DWORD cLowerQualityChainContext; + void *rgpLowerQualityChainContext; + BOOL fHasRevocationFreshnessTime; + DWORD dwRevocationFreshnessTime; + } CERT_CHAIN_CONTEXT, *PCERT_CHAIN_CONTEXT; + + typedef struct _CERT_USAGE_MATCH { + DWORD dwType; + CERT_ENHKEY_USAGE Usage; + } CERT_USAGE_MATCH; + + typedef struct _CERT_CHAIN_PARA { + DWORD cbSize; + CERT_USAGE_MATCH RequestedUsage; + } CERT_CHAIN_PARA; + + typedef struct _CERT_CHAIN_POLICY_PARA { + DWORD cbSize; + DWORD dwFlags; + void *pvExtraPolicyPara; + } CERT_CHAIN_POLICY_PARA; + + typedef struct _HTTPSPolicyCallbackData { + DWORD cbSize; + DWORD dwAuthType; + DWORD fdwChecks; + WCHAR *pwszServerName; + } SSL_EXTRA_CERT_CHAIN_POLICY_PARA; + + typedef struct _CERT_CHAIN_POLICY_STATUS { + DWORD cbSize; + DWORD dwError; + LONG lChainIndex; + LONG lElementIndex; + void *pvExtraPolicyStatus; + } CERT_CHAIN_POLICY_STATUS; + + typedef HANDLE HCERTCHAINENGINE; + typedef HANDLE HCRYPTPROV; + + HCERTSTORE CertOpenStore(LPCSTR lpszStoreProvider, DWORD dwMsgAndCertEncodingType, HCRYPTPROV hCryptProv, + DWORD dwFlags, void *pvPara); + BOOL CertAddEncodedCertificateToStore(HCERTSTORE hCertStore, DWORD dwCertEncodingType, BYTE *pbCertEncoded, + DWORD cbCertEncoded, DWORD dwAddDisposition, PCERT_CONTEXT *ppCertContext); + BOOL CertGetCertificateChain(HCERTCHAINENGINE hChainEngine, CERT_CONTEXT *pCertContext, FILETIME *pTime, + HCERTSTORE hAdditionalStore, CERT_CHAIN_PARA *pChainPara, DWORD dwFlags, void *pvReserved, + PCERT_CHAIN_CONTEXT *ppChainContext); + BOOL CertVerifyCertificateChainPolicy(ULONG_PTR pszPolicyOID, PCERT_CHAIN_CONTEXT pChainContext, + CERT_CHAIN_POLICY_PARA *pPolicyPara, CERT_CHAIN_POLICY_STATUS *pPolicyStatus); + void CertFreeCertificateChain(PCERT_CHAIN_CONTEXT pChainContext); + + HCERTSTORE CertOpenSystemStoreW(HANDLE hprov, LPCWSTR szSubsystemProtocol); + PCERT_CONTEXT CertEnumCertificatesInStore(HCERTSTORE hCertStore, CERT_CONTEXT *pPrevCertContext); + BOOL CertCloseStore(HCERTSTORE hCertStore, DWORD dwFlags); + BOOL CertGetEnhancedKeyUsage(CERT_CONTEXT *pCertContext, DWORD dwFlags, CERT_ENHKEY_USAGE *pUsage, DWORD *pcbUsage); +""") + + +try: + crypt32 = ffi.dlopen('crypt32.dll') + register_ffi(crypt32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('crypt32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py new file mode 100644 index 0000000..1ddbae5 --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_kernel32_cffi.py @@ -0,0 +1,44 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'get_error', + 'kernel32', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +ffi.cdef(""" + typedef long long LARGE_INTEGER; + BOOL QueryPerformanceCounter(LARGE_INTEGER *lpPerformanceCount); + + typedef struct _FILETIME { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } FILETIME; + + void GetSystemTimeAsFileTime(FILETIME *lpSystemTimeAsFileTime); +""") + + +try: + kernel32 = ffi.dlopen('kernel32.dll') + register_ffi(kernel32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('kernel32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py b/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py new file mode 100644 index 0000000..2d80e2b --- /dev/null +++ b/app/lib/package_control/deps/oscrypto/_win/_secur32_cffi.py @@ -0,0 +1,129 @@ +# coding: utf-8 +from __future__ import unicode_literals, division, absolute_import, print_function + +import sys + +from .._ffi import register_ffi +from .._types import str_cls +from ..errors import LibraryNotFoundError + +import cffi + + +__all__ = [ + 'get_error', + 'secur32', +] + + +ffi = cffi.FFI() +if cffi.__version_info__ >= (0, 9): + ffi.set_unicode(True) +if sys.maxsize > 2 ** 32: + ffi.cdef("typedef uint64_t ULONG_PTR;") +else: + ffi.cdef("typedef unsigned long ULONG_PTR;") +ffi.cdef(""" + typedef HANDLE HCERTSTORE; + typedef unsigned int ALG_ID; + typedef WCHAR SEC_WCHAR; + typedef unsigned long SECURITY_STATUS; + typedef void *LUID; + typedef void *SEC_GET_KEY_FN; + + typedef struct _SecHandle { + ULONG_PTR dwLower; + ULONG_PTR dwUpper; + } SecHandle; + typedef SecHandle CredHandle; + typedef SecHandle CtxtHandle; + + typedef struct _SCHANNEL_CRED { + DWORD dwVersion; + DWORD cCreds; + void *paCred; + HCERTSTORE hRootStore; + DWORD cMappers; + void **aphMappers; + DWORD cSupportedAlgs; + ALG_ID *palgSupportedAlgs; + DWORD grbitEnabledProtocols; + DWORD dwMinimumCipherStrength; + DWORD dwMaximumCipherStrength; + DWORD dwSessionLifespan; + DWORD dwFlags; + DWORD dwCredFormat; + } SCHANNEL_CRED; + + typedef struct _TimeStamp { + DWORD dwLowDateTime; + DWORD dwHighDateTime; + } TimeStamp; + + typedef struct _SecBuffer { + ULONG cbBuffer; + ULONG BufferType; + BYTE *pvBuffer; + } SecBuffer; + + typedef struct _SecBufferDesc { + ULONG ulVersion; + ULONG cBuffers; + SecBuffer *pBuffers; + } SecBufferDesc; + + typedef struct _SecPkgContext_StreamSizes { + ULONG cbHeader; + ULONG cbTrailer; + ULONG cbMaximumMessage; + ULONG cBuffers; + ULONG cbBlockSize; + } SecPkgContext_StreamSizes; + + typedef struct _CERT_CONTEXT { + DWORD dwCertEncodingType; + BYTE *pbCertEncoded; + DWORD cbCertEncoded; + void *pCertInfo; + HCERTSTORE hCertStore; + } CERT_CONTEXT; + + typedef struct _SecPkgContext_ConnectionInfo { + DWORD dwProtocol; + ALG_ID aiCipher; + DWORD dwCipherStrength; + ALG_ID aiHash; + DWORD dwHashStrength; + ALG_ID aiExch; + DWORD dwExchStrength; + } SecPkgContext_ConnectionInfo; + + SECURITY_STATUS AcquireCredentialsHandleW(SEC_WCHAR *pszPrincipal, SEC_WCHAR *pszPackage, ULONG fCredentialUse, + LUID *pvLogonID, void *pAuthData, SEC_GET_KEY_FN pGetKeyFn, void *pvGetKeyArgument, + CredHandle *phCredential, TimeStamp *ptsExpiry); + SECURITY_STATUS FreeCredentialsHandle(CredHandle *phCredential); + SECURITY_STATUS InitializeSecurityContextW(CredHandle *phCredential, CtxtHandle *phContext, + SEC_WCHAR *pszTargetName, ULONG fContextReq, ULONG Reserved1, ULONG TargetDataRep, + SecBufferDesc *pInput, ULONG Reserved2, CtxtHandle *phNewContext, SecBufferDesc *pOutput, + ULONG *pfContextAttr, TimeStamp *ptsExpiry); + SECURITY_STATUS FreeContextBuffer(void *pvContextBuffer); + SECURITY_STATUS ApplyControlToken(CtxtHandle *phContext, SecBufferDesc *pInput); + SECURITY_STATUS DeleteSecurityContext(CtxtHandle *phContext); + SECURITY_STATUS QueryContextAttributesW(CtxtHandle *phContext, ULONG ulAttribute, void *pBuffer); + SECURITY_STATUS EncryptMessage(CtxtHandle *phContext, ULONG fQOP, SecBufferDesc *pMessage, ULONG MessageSeqNo); + SECURITY_STATUS DecryptMessage(CtxtHandle *phContext, SecBufferDesc *pMessage, ULONG MessageSeqNo, ULONG *pfQOP); +""") + + +try: + secur32 = ffi.dlopen('secur32.dll') + register_ffi(secur32, ffi) + +except (OSError) as e: + if str_cls(e).find('cannot load library') != -1: + raise LibraryNotFoundError('secur32.dll could not be found') + raise + + +def get_error(): + return ffi.getwinerror() diff --git a/app/lib/package_control/deps/oscrypto/_win/asymmetric.py b/app/lib/package_control/deps/oscrypto/_win/asymmetric.py index 9e6eb46..dc985b0 100644 --- a/app/lib/package_control/deps/oscrypto/_win/asymmetric.py +++ b/app/lib/package_control/deps/oscrypto/_win/asymmetric.py @@ -567,6 +567,8 @@ def self_signed(self): if signature_algo == 'rsassa_pkcs1v15': verify_func = rsa_pkcs1v15_verify + elif signature_algo == 'rsassa_pss': + verify_func = rsa_pss_verify elif signature_algo == 'dsa': verify_func = dsa_verify elif signature_algo == 'ecdsa': @@ -1650,8 +1652,10 @@ def _advapi32_load_key(key_object, key_info, container): key_type = 'public' if isinstance(key_info, PublicKeyInfo) else 'private' algo = key_info.algorithm + if algo == 'rsassa_pss': + algo = 'rsa' - if algo == 'rsa': + if algo == 'rsa' or algo == 'rsassa_pss': provider = Advapi32Const.MS_ENH_RSA_AES_PROV else: provider = Advapi32Const.MS_ENH_DSS_DH_PROV @@ -1844,6 +1848,8 @@ def _bcrypt_load_key(key_object, key_info, container, curve_name): key_type = 'public' if isinstance(key_info, PublicKeyInfo) else 'private' algo = key_info.algorithm + if algo == 'rsassa_pss': + algo = 'rsa' try: alg_selector = key_info.curve[1] if algo == 'ec' else algo @@ -2282,7 +2288,9 @@ def rsa_pss_verify(certificate_or_public_key, signature, data, hash_algorithm): OSError - when an error is returned by the OS crypto library """ - if certificate_or_public_key.algorithm != 'rsa': + cp_alg = certificate_or_public_key.algorithm + + if cp_alg != 'rsa' and cp_alg != 'rsassa_pss': raise ValueError('The key specified is not an RSA public key') return _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_padding=True) @@ -2397,13 +2405,16 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ type_name(data) )) + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha256', 'sha384', 'sha512']) - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha256", "sha384", "sha512"' - if certificate_or_public_key.algorithm == 'rsa' and not rsa_pss_padding: + if cp_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -2413,13 +2424,13 @@ def _verify(certificate_or_public_key, signature, data, hash_algorithm, rsa_pss_ repr(hash_algorithm) )) - if certificate_or_public_key.algorithm != 'rsa' and rsa_pss_padding is not False: + if not cp_is_rsa and rsa_pss_padding is not False: raise ValueError(pretty_message( ''' PSS padding may only be used with RSA keys - signing via a %s key was requested ''', - certificate_or_public_key.algorithm.upper() + cp_alg.upper() )) if hash_algorithm == 'raw': @@ -2468,8 +2479,9 @@ def _advapi32_verify(certificate_or_public_key, signature, data, hash_algorithm, """ algo = certificate_or_public_key.algorithm + algo_is_rsa = algo == 'rsa' or algo == 'rsassa_pss' - if algo == 'rsa' and rsa_pss_padding: + if algo_is_rsa and rsa_pss_padding: hash_length = { 'sha1': 20, 'sha224': 28, @@ -2483,7 +2495,7 @@ def _advapi32_verify(certificate_or_public_key, signature, data, hash_algorithm, raise SignatureError('Signature is invalid') return - if algo == 'rsa' and hash_algorithm == 'raw': + if algo_is_rsa and hash_algorithm == 'raw': padded_plaintext = raw_rsa_public_crypt(certificate_or_public_key, signature) try: plaintext = remove_pkcs1v15_signature_padding(certificate_or_public_key.byte_size, padded_plaintext) @@ -2591,7 +2603,10 @@ def _bcrypt_verify(certificate_or_public_key, signature, data, hash_algorithm, r padding_info = null() flags = 0 - if certificate_or_public_key.algorithm == 'rsa': + cp_alg = certificate_or_public_key.algorithm + cp_is_rsa = cp_alg == 'rsa' or cp_alg == 'rsassa_pss' + + if cp_is_rsa: if rsa_pss_padding: flags = BcryptConst.BCRYPT_PAD_PSS padding_info_struct_pointer = struct(bcrypt, 'BCRYPT_PSS_PADDING_INFO') @@ -2694,7 +2709,9 @@ def rsa_pss_sign(private_key, data, hash_algorithm): A byte string of the signature """ - if private_key.algorithm != 'rsa': + pkey_alg = private_key.algorithm + + if pkey_alg != 'rsa' and pkey_alg != 'rsassa_pss': raise ValueError('The key specified is not an RSA private key') return _sign(private_key, data, hash_algorithm, rsa_pss_padding=True) @@ -2797,13 +2814,16 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): type_name(data) )) + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + valid_hash_algorithms = set(['md5', 'sha1', 'sha256', 'sha384', 'sha512']) if private_key.algorithm == 'rsa' and not rsa_pss_padding: valid_hash_algorithms |= set(['raw']) if hash_algorithm not in valid_hash_algorithms: valid_hash_algorithms_error = '"md5", "sha1", "sha256", "sha384", "sha512"' - if private_key.algorithm == 'rsa' and not rsa_pss_padding: + if pkey_is_rsa and not rsa_pss_padding: valid_hash_algorithms_error += ', "raw"' raise ValueError(pretty_message( ''' @@ -2813,13 +2833,13 @@ def _sign(private_key, data, hash_algorithm, rsa_pss_padding=False): repr(hash_algorithm) )) - if private_key.algorithm != 'rsa' and rsa_pss_padding is not False: + if not pkey_is_rsa and rsa_pss_padding is not False: raise ValueError(pretty_message( ''' PSS padding may only be used with RSA keys - signing via a %s key was requested ''', - private_key.algorithm.upper() + pkey_alg.upper() )) if hash_algorithm == 'raw': @@ -2867,12 +2887,13 @@ def _advapi32_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): """ algo = private_key.algorithm + algo_is_rsa = algo == 'rsa' or algo == 'rsassa_pss' - if algo == 'rsa' and hash_algorithm == 'raw': + if algo_is_rsa and hash_algorithm == 'raw': padded_data = add_pkcs1v15_signature_padding(private_key.byte_size, data) return raw_rsa_private_crypt(private_key, padded_data) - if algo == 'rsa' and rsa_pss_padding: + if algo_is_rsa and rsa_pss_padding: hash_length = { 'sha1': 20, 'sha224': 28, @@ -3003,7 +3024,10 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): padding_info = null() flags = 0 - if private_key.algorithm == 'rsa': + pkey_alg = private_key.algorithm + pkey_is_rsa = pkey_alg == 'rsa' or pkey_alg == 'rsassa_pss' + + if pkey_is_rsa: if rsa_pss_padding: hash_length = { 'md5': 16, @@ -3032,7 +3056,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): padding_info_struct.pszAlgId = cast(bcrypt, 'wchar_t *', hash_buffer) padding_info = cast(bcrypt, 'void *', padding_info_struct_pointer) - if private_key.algorithm == 'dsa' and private_key.bit_size > 1024 and hash_algorithm in set(['md5', 'sha1']): + if pkey_alg == 'dsa' and private_key.bit_size > 1024 and hash_algorithm in set(['md5', 'sha1']): raise ValueError(pretty_message( ''' Windows does not support sha1 signatures with DSA keys based on @@ -3056,7 +3080,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): buffer_len = deref(out_len) buffer = buffer_from_bytes(buffer_len) - if private_key.algorithm == 'rsa': + if pkey_is_rsa: padding_info = cast(bcrypt, 'void *', padding_info_struct_pointer) res = bcrypt.BCryptSignHash( @@ -3072,7 +3096,7 @@ def _bcrypt_sign(private_key, data, hash_algorithm, rsa_pss_padding=False): handle_error(res) signature = bytes_from_buffer(buffer, deref(out_len)) - if private_key.algorithm != 'rsa': + if not pkey_is_rsa: # Windows doesn't use the ASN.1 Sequence for DSA/ECDSA signatures, # so we have to convert it here for the verification to work signature = DSASignature.from_p1363(signature).dump() diff --git a/app/lib/package_control/deps/oscrypto/_win/symmetric.py b/app/lib/package_control/deps/oscrypto/_win/symmetric.py index e93cd13..ff23109 100644 --- a/app/lib/package_control/deps/oscrypto/_win/symmetric.py +++ b/app/lib/package_control/deps/oscrypto/_win/symmetric.py @@ -790,8 +790,8 @@ def _encrypt(cipher, key, data, iv, padding): if cipher != 'rc4' and not padding: # AES in CBC mode can be allowed with no padding if - # the data is an exact multiple of the key size - if not (cipher == 'aes' and padding is False and len(data) % len(key) == 0): + # the data is an exact multiple of the block size + if not (cipher == 'aes' and len(data) % 16 == 0): raise ValueError('padding must be specified') if _backend == 'winlegacy': @@ -1014,7 +1014,7 @@ def _decrypt(cipher, key, data, iv, padding): type_name(iv) )) - if cipher != 'rc4' and padding is None: + if cipher not in set(['rc4', 'aes']) and not padding: raise ValueError('padding must be specified') if _backend == 'winlegacy': diff --git a/app/lib/package_control/deps/oscrypto/_win/tls.py b/app/lib/package_control/deps/oscrypto/_win/tls.py index fcb3166..23e2027 100644 --- a/app/lib/package_control/deps/oscrypto/_win/tls.py +++ b/app/lib/package_control/deps/oscrypto/_win/tls.py @@ -388,7 +388,7 @@ def wrap(cls, socket, hostname, session=None): def __init__(self, address, port, timeout=10, session=None): """ :param address: - A unicode string of the domain name or IP address to conenct to + A unicode string of the domain name or IP address to connect to :param port: An integer of the port number to connect to diff --git a/app/lib/package_control/deps/oscrypto/version.py b/app/lib/package_control/deps/oscrypto/version.py index 85da143..b7c352c 100644 --- a/app/lib/package_control/deps/oscrypto/version.py +++ b/app/lib/package_control/deps/oscrypto/version.py @@ -2,5 +2,5 @@ from __future__ import unicode_literals, division, absolute_import, print_function -__version__ = '1.2.1' -__version_info__ = (1, 2, 1) +__version__ = '1.3.0' +__version_info__ = (1, 3, 0) diff --git a/app/lib/package_control/download_manager.py b/app/lib/package_control/download_manager.py index 3cba12a..0c611c3 100644 --- a/app/lib/package_control/download_manager.py +++ b/app/lib/package_control/download_manager.py @@ -1,94 +1,126 @@ +import os import re import socket -from threading import Lock, Timer -from contextlib import contextmanager import sys - -try: - # Python 3 - from urllib.parse import urlparse - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urlparse - str_cls = unicode # noqa +from threading import Lock, Timer +from urllib.parse import urljoin, urlparse from . import __version__ - -from .show_error import show_error -from .console_write import console_write -from .cache import set_cache, get_cache -from .unicode import unicode_from_os from . import text +from .cache import set_cache, get_cache +from .console_write import console_write +from .show_error import show_error from .downloaders import DOWNLOADERS -from .downloaders.urllib_downloader import UrlLibDownloader from .downloaders.binary_not_found_error import BinaryNotFoundError -from .downloaders.rate_limit_exception import RateLimitException from .downloaders.downloader_exception import DownloaderException -from .downloaders.win_downloader_exception import WinDownloaderException -from .downloaders.oscrypto_downloader_exception import OscryptoDownloaderException +from .downloaders.rate_limit_exception import RateLimitException +from .downloaders.rate_limit_exception import RateLimitSkipException from .http_cache import HttpCache +_http_cache = None -# A dict of domains - each points to a list of downloaders _managers = {} +"""A dict of domains - each points to a list of downloaders""" -# How many managers are currently checked out _in_use = 0 +"""How many managers are currently checked out""" -# Make sure connection management doesn't run into threading issues _lock = Lock() +"""Make sure connection management doesn't run into threading issues""" -# A timer used to disconnect all managers after a period of no usage _timer = None +"""A timer used to disconnect all managers after a period of no usage""" -@contextmanager -def downloader(url, settings): +def http_get(url, settings, error_message='', prefer_cached=False): + """ + Performs a HTTP GET request using best matching downloader. + + :param url: + The string URL to download + + :param settings: + The dictionary with downloader settings. + + - ``debug`` + - ``downloader_precedence`` + - ``http_basic_auth`` + - ``http_cache`` + - ``http_cache_length`` + - ``http_proxy`` + - ``https_proxy`` + - ``proxy_username`` + - ``proxy_password`` + - ``user_agent`` + - ``timeout`` + + :param error_message: + The error message to include if the download fails + + :param prefer_cached: + If cached version of the URL content is preferred over a new request + + :raises: + DownloaderException: if there was an error downloading the URL + + :return: + The string contents of the URL + """ + + manager = None + result = None + try: - manager = None manager = _grab(url, settings) - yield manager + result = manager.fetch(url, error_message, prefer_cached) finally: if manager: _release(url, manager) + return result + def _grab(url, settings): - global _managers, _lock, _in_use, _timer + global _http_cache, _managers, _lock, _in_use, _timer - _lock.acquire() - try: + with _lock: if _timer: _timer.cancel() _timer = None parsed = urlparse(url) if not parsed or not parsed.hostname: - raise DownloaderException(u'The URL "%s" is malformed' % url) + raise DownloaderException('The URL "%s" is malformed' % url) hostname = parsed.hostname.lower() if hostname not in _managers: _managers[hostname] = [] if not _managers[hostname]: - _managers[hostname].append(DownloadManager(settings)) + http_cache = None + if settings.get('http_cache'): + # first call defines http cache settings + # It is safe to assume all calls share same settings. + if not _http_cache: + _http_cache = HttpCache(settings.get('http_cache_length', 604800)) + http_cache = _http_cache + + _managers[hostname].append(DownloadManager(settings, http_cache)) _in_use += 1 return _managers[hostname].pop() - finally: - _lock.release() - def _release(url, manager): global _managers, _lock, _in_use, _timer - _lock.acquire() - try: - hostname = urlparse(url).hostname.lower() + with _lock: + parsed = urlparse(url) + if not parsed or not parsed.hostname: + raise DownloaderException('The URL "%s" is malformed' % url) + hostname = parsed.hostname.lower() # This means the package was reloaded between _grab and _release, # so the downloader is using old code and we want to discard it @@ -107,26 +139,100 @@ def _release(url, manager): _timer = Timer(5.0, close_all_connections) _timer.start() - finally: - _lock.release() - def close_all_connections(): - global _managers, _lock, _in_use, _timer + global _http_cache, _managers, _lock, _in_use, _timer - _lock.acquire() - try: + with _lock: if _timer: _timer.cancel() _timer = None - for domain, managers in _managers.items(): + if _http_cache: + _http_cache.prune() + _http_cache = None + + for managers in _managers.values(): for manager in managers: manager.close() _managers = {} - finally: - _lock.release() + +def resolve_urls(root_url, uris): + """ + Convert a list of relative uri's to absolute urls/paths. + + :param root_url: + The root url string + + :param uris: + An iteratable of relative uri's to resolve. + + :returns: + A generator of resolved URLs + """ + + scheme_match = re.match(r'(https?:)//', root_url, re.I) + if scheme_match is None: + root_dir = os.path.dirname(root_url) + else: + root_dir = '' + + for url in uris: + if not url: + continue + if url.startswith('//'): + if scheme_match is not None: + url = scheme_match.group(1) + url + else: + url = 'https:' + url + elif url.startswith('/'): + # We don't allow absolute repositories + continue + elif url.startswith('./') or url.startswith('../'): + if root_dir: + url = os.path.normpath(os.path.join(root_dir, url)) + else: + url = urljoin(root_url, url) + yield url + + +def resolve_url(root_url, url): + """ + Convert a list of relative uri's to absolute urls/paths. + + :param root_url: + The root url string + + :param uris: + An iteratable of relative uri's to resolve. + + :returns: + A generator of resolved URLs + """ + + if not url: + return url + + scheme_match = re.match(r'(https?:)//', root_url, re.I) + if scheme_match is None: + root_dir = os.path.dirname(root_url) + else: + root_dir = '' + + if url.startswith('//'): + if scheme_match is not None: + return scheme_match.group(1) + url + else: + return 'https:' + url + + elif url.startswith('./') or url.startswith('../'): + if root_dir: + return os.path.normpath(os.path.join(root_dir, url)) + else: + return urljoin(root_url, url) + + return url def update_url(url, debug): @@ -150,7 +256,7 @@ def update_url(url, debug): original_url = url url = url.replace('://raw.github.com/', '://raw.githubusercontent.com/') url = url.replace('://nodeload.github.com/', '://codeload.github.com/') - url = re.sub('^(https://codeload.github.com/[^/]+/[^/]+/)zipball(/.*)$', '\\1zip\\2', url) + url = re.sub(r'^(https://codeload\.github\.com/[^/#?]+/[^/#?]+/)zipball(/.*)$', '\\1zip\\2', url) # Fix URLs from old versions of Package Control since we are going to # remove all packages but Package Control from them to force upgrades @@ -159,7 +265,7 @@ def update_url(url, debug): if debug and url != original_url: console_write( - u''' + ''' Fixed URL from %s to %s ''', (original_url, url) @@ -168,20 +274,38 @@ def update_url(url, debug): return url -class DownloadManager(object): +class DownloadManager: - def __init__(self, settings): + def __init__(self, settings, http_cache=None): # Cache the downloader for re-use self.downloader = None - user_agent = settings.get('user_agent') - if user_agent and user_agent.find('%s') != -1: - settings['user_agent'] = user_agent % __version__ - - self.settings = settings - if settings.get('http_cache'): - cache_length = settings.get('http_cache_length', 604800) - self.settings['cache'] = HttpCache(cache_length) + keys_to_copy = { + 'debug', + 'downloader_precedence', + 'http_basic_auth', + 'http_proxy', + 'https_proxy', + 'proxy_username', + 'proxy_password', + 'user_agent', + 'timeout', + } + + # Copy required settings to avoid manipulating caller's environment. + # It's needed as e.g. `cache_length` is defined with different meaning in PackageManager's + # settings. Also `cache` object shouldn't be propagated to caller. + self.settings = {key: value for key, value in settings.items() if key in keys_to_copy} + + # add package control version to user agent + user_agent = self.settings.get('user_agent') + if user_agent and '%s' in user_agent: + self.settings['user_agent'] = user_agent % __version__ + + # assign global http cache storage driver + if http_cache: + self.settings['cache'] = http_cache + self.settings['cache_length'] = http_cache.ttl def close(self): if self.downloader: @@ -224,7 +348,7 @@ def fetch(self, url, error_message, prefer_cached=False): downloader_precedence = self.settings.get( 'downloader_precedence', { - "windows": ["wininet", "oscrypto"], + "windows": ["wininet", "oscrypto", "urllib"], "osx": ["urllib", "oscrypto", "curl"], "linux": ["urllib", "oscrypto", "curl", "wget"] } @@ -233,7 +357,7 @@ def fetch(self, url, error_message, prefer_cached=False): if not isinstance(downloader_list, list) or len(downloader_list) == 0: error_string = text.format( - u''' + ''' No list of preferred downloaders specified in the "downloader_precedence" setting for the platform "%s" ''', @@ -246,17 +370,17 @@ def fetch(self, url, error_message, prefer_cached=False): if not self.downloader or ( (is_ssl and not self.downloader.supports_ssl()) or (not is_ssl and not self.downloader.supports_plaintext())): + for downloader_name in downloader_list: - if downloader_name not in DOWNLOADERS: - # We ignore oscrypto not being present on Linux since it - # can't be used with on Linux with Sublime Text 3 - if sys.version_info[:2] == (3, 3) and \ - sys.platform == 'linux' and \ - downloader_name == 'oscrypto': + try: + downloader_class = DOWNLOADERS[downloader_name] + if downloader_class is None: continue + + except KeyError: error_string = text.format( - u''' + ''' The downloader "%s" from the "downloader_precedence" setting for the platform "%s" is invalid ''', @@ -266,19 +390,20 @@ def fetch(self, url, error_message, prefer_cached=False): raise DownloaderException(error_string) try: - downloader = DOWNLOADERS[downloader_name](self.settings) + downloader = downloader_class(self.settings) if is_ssl and not downloader.supports_ssl(): continue if not is_ssl and not downloader.supports_plaintext(): continue self.downloader = downloader break - except (BinaryNotFoundError): + + except BinaryNotFoundError: pass if not self.downloader: error_string = text.format( - u''' + ''' None of the preferred downloaders can download %s. This is usually either because the ssl module is unavailable @@ -294,9 +419,11 @@ def fetch(self, url, error_message, prefer_cached=False): raise DownloaderException(error_string.replace('\n\n', ' ')) url = url.replace(' ', '%20') - hostname = urlparse(url).hostname - if hostname: - hostname = hostname.lower() + parsed = urlparse(url) + if not parsed or not parsed.hostname: + raise DownloaderException('The URL "%s" is malformed' % url) + hostname = parsed.hostname.lower() + timeout = self.settings.get('timeout', 3) rate_limited_domains = get_cache('rate_limited_domains', []) @@ -317,99 +444,47 @@ def fetch(self, url, error_message, prefer_cached=False): try: ip = socket.gethostbyname(hostname) except (socket.gaierror) as e: - ip = unicode_from_os(e) + ip = str(e) except (TypeError): ip = None console_write( - u''' + ''' Download Debug URL: %s Timeout: %s Resolved IP: %s ''', - (url, str_cls(timeout), ip) + (url, str(timeout), ip) ) if ipv6: console_write( - u' Resolved IPv6: %s', + ' Resolved IPv6: %s', ipv6, prefix=False ) if hostname in rate_limited_domains: - error_string = u'Skipping due to hitting rate limit for %s' % hostname + exception = RateLimitSkipException(hostname) if self.settings.get('debug'): - console_write( - u' %s', - error_string, - prefix=False - ) - raise DownloaderException(error_string) + console_write(' %s' % exception, prefix=False) + raise exception try: return self.downloader.download(url, error_message, timeout, 3, prefer_cached) except (RateLimitException) as e: - rate_limited_domains.append(hostname) - set_cache('rate_limited_domains', rate_limited_domains, self.settings.get('cache_length')) - - console_write( - u''' - Hit rate limit of %s for %s. Skipping all futher download - requests for this domain. - ''', - (e.limit, e.domain) + set_cache( + 'rate_limited_domains', + rate_limited_domains, + self.settings.get('cache_length', 604800) ) - raise - except (OscryptoDownloaderException) as e: console_write( - u''' - Attempting to use Urllib downloader due to Oscrypto error: %s + ''' + %s Skipping all further download requests for this domain. ''', - str_cls(e) + str(e) ) - - self.downloader = UrlLibDownloader(self.settings) - # Try again with the new downloader! - return self.fetch(url, error_message, prefer_cached) - - except (WinDownloaderException) as e: - - console_write( - u''' - Attempting to use Urllib downloader due to WinINet error: %s - ''', - e - ) - - # Here we grab the proxy info extracted from WinInet to fill in - # the Package Control settings if those are not present. This should - # hopefully make a seamless fallback for users who run into weird - # windows errors related to network communication. - wininet_proxy = self.downloader.proxy or '' - wininet_proxy_username = self.downloader.proxy_username or '' - wininet_proxy_password = self.downloader.proxy_password or '' - - http_proxy = self.settings.get('http_proxy', '') - https_proxy = self.settings.get('https_proxy', '') - proxy_username = self.settings.get('proxy_username', '') - proxy_password = self.settings.get('proxy_password', '') - - settings = self.settings.copy() - if not http_proxy and wininet_proxy: - settings['http_proxy'] = wininet_proxy - if not https_proxy and wininet_proxy: - settings['https_proxy'] = wininet_proxy - - has_proxy = settings.get('http_proxy') or settings.get('https_proxy') - if has_proxy and not proxy_username and wininet_proxy_username: - settings['proxy_username'] = wininet_proxy_username - if has_proxy and not proxy_password and wininet_proxy_password: - settings['proxy_password'] = wininet_proxy_password - - self.downloader = UrlLibDownloader(settings) - # Try again with the new downloader! - return self.fetch(url, error_message, prefer_cached) + raise diff --git a/app/lib/package_control/downloaders/__init__.py b/app/lib/package_control/downloaders/__init__.py index 91d763c..158bd4b 100644 --- a/app/lib/package_control/downloaders/__init__.py +++ b/app/lib/package_control/downloaders/__init__.py @@ -1,25 +1,46 @@ import sys +from ..console_write import console_write + from .urllib_downloader import UrlLibDownloader from .curl_downloader import CurlDownloader from .wget_downloader import WgetDownloader DOWNLOADERS = { + 'oscrypto': None, 'urllib': UrlLibDownloader, 'curl': CurlDownloader, 'wget': WgetDownloader } -# oscrypto can fail badly on Linux in the Sublime Text 3 environment due to -# trying to mix the statically-linked OpenSSL in plugin_host with the OpenSSL -# loaded from the operating system. On Python 3.8 we dynamically link OpenSSL, -# so it just needs to be configured properly, which is handled in -# oscrypto_downloader.py. -if sys.platform != 'linux' or sys.version_info[:2] != (3, 3) or \ - sys.executable != 'python3': - from .oscrypto_downloader import OscryptoDownloader - DOWNLOADERS['oscrypto'] = OscryptoDownloader +# oscrypto can fail badly +# 1. on Linux in the Sublime Text 3 environment due to trying to mix the +# statically-linked OpenSSL in plugin_host with the OpenSSL loaded from the +# operating system. On Python 3.8 we dynamically link OpenSSL, so it just needs +# to be configured properly, which is handled in oscrypto_downloader.py. +# 2. on MacOS ARM plattform due to whatever reason. Due to maintanance state of +# oscrypto, start fading it out by disabling it on python 3.8 (ST4) +if sys.platform != 'linux' and sys.version_info[:2] == (3, 3): + try: + from .oscrypto_downloader import OscryptoDownloader + DOWNLOADERS['oscrypto'] = OscryptoDownloader + except Exception as e: + console_write( + ''' + OscryptoDownloader not available! %s + ''', + str(e) + ) if sys.platform == 'win32': - from .wininet_downloader import WinINetDownloader - DOWNLOADERS['wininet'] = WinINetDownloader + try: + from .wininet_downloader import WinINetDownloader + DOWNLOADERS['wininet'] = WinINetDownloader + except Exception as e: + DOWNLOADERS['wininet'] = None + console_write( + ''' + WinINetDownloader not available! %s + ''', + str(e) + ) diff --git a/app/lib/package_control/downloaders/background_downloader.py b/app/lib/package_control/downloaders/background_downloader.py deleted file mode 100644 index a689d8e..0000000 --- a/app/lib/package_control/downloaders/background_downloader.py +++ /dev/null @@ -1,63 +0,0 @@ -import threading - - -class BackgroundDownloader(threading.Thread): - - """ - Downloads information from one or more URLs in the background. - Normal usage is to use one BackgroundDownloader per domain name. - - :param settings: - A dict containing at least the following fields: - `cache_length`, - `debug`, - `timeout`, - `user_agent`, - `http_proxy`, - `https_proxy`, - `proxy_username`, - `proxy_password` - - :param providers: - An array of providers that can download the URLs - """ - - def __init__(self, settings, providers): - self.settings = settings - self.urls = [] - self.providers = providers - self.used_providers = {} - threading.Thread.__init__(self) - - def add_url(self, url): - """ - Adds a URL to the list to download - - :param url: - The URL to download info about - """ - - self.urls.append(url) - - def get_provider(self, url): - """ - Returns the provider for the URL specified - - :param url: - The URL to return the provider for - - :return: - The provider object for the URL - """ - - return self.used_providers.get(url) - - def run(self): - for url in self.urls: - for provider_class in self.providers: - if provider_class.match_url(url): - provider = provider_class(url, self.settings) - break - - provider.prefetch() - self.used_providers[url] = provider diff --git a/app/lib/package_control/downloaders/basic_auth_downloader.py b/app/lib/package_control/downloaders/basic_auth_downloader.py index 333b0b0..9150127 100644 --- a/app/lib/package_control/downloaders/basic_auth_downloader.py +++ b/app/lib/package_control/downloaders/basic_auth_downloader.py @@ -1,14 +1,9 @@ import base64 -try: - # Python 3 - from urllib.parse import urlparse -except (ImportError): - # Python 2 - from urlparse import urlparse +from urllib.parse import urlparse -class BasicAuthDownloader(object): +class BasicAuthDownloader: """ A base for downloaders to add an HTTP basic auth header @@ -64,7 +59,6 @@ def get_username_password(self, url): domain_name = urlparse(url).netloc auth_settings = self.settings.get('http_basic_auth') - domain_name = urlparse(url).netloc if auth_settings and isinstance(auth_settings, dict): params = auth_settings.get(domain_name) if params and isinstance(params, (list, tuple)) and len(params) == 2: diff --git a/app/lib/package_control/downloaders/binary_not_found_error.py b/app/lib/package_control/downloaders/binary_not_found_error.py index 7e343ad..acff446 100644 --- a/app/lib/package_control/downloaders/binary_not_found_error.py +++ b/app/lib/package_control/downloaders/binary_not_found_error.py @@ -1,17 +1,3 @@ -import sys - - class BinaryNotFoundError(Exception): """If a necessary executable is not found in the PATH on the system""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/caching_downloader.py b/app/lib/package_control/downloaders/caching_downloader.py index af92f69..14191f8 100644 --- a/app/lib/package_control/downloaders/caching_downloader.py +++ b/app/lib/package_control/downloaders/caching_downloader.py @@ -4,15 +4,8 @@ from ..console_write import console_write -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - -class CachingDownloader(object): +class CachingDownloader: """ A base downloader that will use a caching backend to cache HTTP requests @@ -89,7 +82,7 @@ def cache_result(self, method, url, status, headers, content): if not cache: if debug: console_write( - u''' + ''' Skipping cache since there is no cache object ''' ) @@ -98,7 +91,7 @@ def cache_result(self, method, url, status, headers, content): if method.lower() != 'get': if debug: console_write( - u''' + ''' Skipping cache since the HTTP method != GET ''' ) @@ -110,7 +103,7 @@ def cache_result(self, method, url, status, headers, content): if status not in (200, 304): if debug: console_write( - u''' + ''' Skipping cache since the HTTP status code not one of: 200, 304 ''' ) @@ -123,7 +116,7 @@ def cache_result(self, method, url, status, headers, content): if cached_content: if debug: console_write( - u''' + ''' Using cached content for %s from %s ''', (url, cache.path(key)) @@ -148,7 +141,7 @@ def cache_result(self, method, url, status, headers, content): if headers.get('content-type') in ('application/zip', 'application/octet-stream'): if debug: console_write( - u''' + ''' Skipping cache since the response is a zip file ''' ) @@ -166,7 +159,7 @@ def cache_result(self, method, url, status, headers, content): info_key = self.generate_key(url, '.info') if debug: console_write( - u''' + ''' Caching %s in %s ''', (url, cache.path(key)) @@ -191,7 +184,7 @@ def generate_key(self, url, suffix=''): A string key for the URL """ - if isinstance(url, str_cls): + if isinstance(url, str): url = url.encode('utf-8') key = hashlib.md5(url).hexdigest() @@ -214,7 +207,7 @@ def retrieve_cached(self, url): if not cache: if debug: console_write( - u''' + ''' Skipping cache since there is no cache object ''' ) @@ -225,7 +218,7 @@ def retrieve_cached(self, url): cached_content = cache.get(key) if cached_content and debug: console_write( - u''' + ''' Using cached content for %s from %s ''', (url, cache.path(key)) diff --git a/app/lib/package_control/downloaders/cli_downloader.py b/app/lib/package_control/downloaders/cli_downloader.py index cebbb22..31365a0 100644 --- a/app/lib/package_control/downloaders/cli_downloader.py +++ b/app/lib/package_control/downloaders/cli_downloader.py @@ -7,7 +7,7 @@ from .binary_not_found_error import BinaryNotFoundError -class CliDownloader(object): +class CliDownloader: """ Base for downloaders that use a command line program @@ -43,13 +43,16 @@ def find_binary(self, name): # This is mostly for OS X, which seems to launch ST with a # minimal set of environmental variables dirs.append('/usr/local/bin') + executable = name + else: + executable = name + ".exe" for dir_ in dirs: - path = os.path.join(dir_, name) + path = os.path.join(dir_, executable) if os.path.exists(path): return path - raise BinaryNotFoundError('The binary %s could not be located' % name) + raise BinaryNotFoundError('The binary %s could not be located' % executable) def execute(self, args): """ @@ -67,19 +70,24 @@ def execute(self, args): if self.settings.get('debug'): console_write( - u''' + ''' Trying to execute command %s ''', create_cmd(args) ) - proc = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + startupinfo = None + if os.name == 'nt': + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - output = proc.stdout.read() - self.stderr = proc.stderr.read() - returncode = proc.wait() - if returncode != 0: - error = NonCleanExitError(returncode) + proc = subprocess.Popen( + args, startupinfo=startupinfo, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + + output, self.stderr = proc.communicate() + + if proc.returncode != 0: + error = NonCleanExitError(proc.returncode) error.stderr = self.stderr error.stdout = output raise error diff --git a/app/lib/package_control/downloaders/curl_downloader.py b/app/lib/package_control/downloaders/curl_downloader.py index da4af55..d0e8c1b 100644 --- a/app/lib/package_control/downloaders/curl_downloader.py +++ b/app/lib/package_control/downloaders/curl_downloader.py @@ -2,23 +2,15 @@ import re import os -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - +from ..ca_certs import get_ca_bundle_path from ..console_write import console_write -from ..open_compat import open_compat, read_compat from .cli_downloader import CliDownloader from .non_clean_exit_error import NonCleanExitError from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class CurlDownloader(CliDownloader, DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): @@ -83,7 +75,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): command = [ self.curl, '--connect-timeout', - str_cls(int(timeout)), + str(int(timeout)), '-sSL', '--tlsv1', # We have to capture the headers to check for rate limit info @@ -109,7 +101,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) command.extend(['--cacert', bundle_path]) @@ -125,7 +117,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if debug: console_write( - u''' + ''' Curl Debug Proxy http_proxy: %s https_proxy: %s @@ -139,7 +131,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): command.append('--proxy-anyauth') if proxy_username or proxy_password: - command.extend(['-U', u"%s:%s" % (proxy_username, proxy_password)]) + command.extend(['-U', "%s:%s" % (proxy_username, proxy_password)]) if http_proxy: os.putenv('http_proxy', http_proxy) @@ -154,8 +146,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): try: output = self.execute(command) - with open_compat(self.tmp_file, 'r') as f: - headers_str = read_compat(f) + with open(self.tmp_file, 'r') as fobj: + headers_str = fobj.read() self.clean_tmp_file() message = 'OK' @@ -214,21 +206,21 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url ) continue - download_error = u'HTTP error ' + code + download_error = 'HTTP error ' + code elif e.returncode == 7: # If the user could not connect, check for ipv6 errors and # if so, force curl to use ipv4. Apparently some users have # network configuration where curl will try ipv6 and resolve # it, but their ISP won't actually route it. - full_debug = u"\n".join([section['contents'] for section in debug_sections]) + full_debug = "\n".join([section['contents'] for section in debug_sections]) ipv6_error = re.search( r'^\s*connect to ([0-9a-f]+(:+[0-9a-f]+)+) port \d+ failed: Network is unreachable', full_debug, @@ -237,7 +229,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if ipv6_error and tries != 0: if debug: console_write( - u''' + ''' Downloading %s failed because the ipv6 address %s was not reachable, retrying using ipv4 ''', @@ -247,20 +239,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue elif e.returncode == 6: - download_error = u'URL error host not found' + download_error = 'URL error host not found' elif e.returncode == 28: # GitHub and BitBucket seem to time out a lot if tries and debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url ) continue - error_string = u'%s %s downloading %s.' % (error_message, download_error, url) + error_string = '%s %s downloading %s.' % (error_message, download_error, url) break @@ -276,9 +268,9 @@ def print_debug(self, sections): for section in sections: type = section['type'] - indented_contents = section['contents'].replace(u"\n", u"\n ") + indented_contents = section['contents'].replace("\n", "\n ") console_write( - u''' + ''' Curl HTTP Debug %s %s ''', @@ -321,34 +313,34 @@ def split_debug(self, string): section = 'General' last_section = None - stderr = u'' + stderr = '' debug_sections = [] - debug_section = u'' + debug_section = '' for line in string.splitlines(): # Placeholder for body of request - if line and line[0:2] == u'{ ': + if line and line[0:2] == '{ ': continue - if line and line[0:18] == u'} [data not shown]': + if line and line[0:18] == '} [data not shown]': continue if len(line) > 1: subtract = 0 - if line[0:2] == u'* ': - section = u'General' + if line[0:2] == '* ': + section = 'General' subtract = 2 - elif line[0:2] == u'> ': - section = u'Write' + elif line[0:2] == '> ': + section = 'Write' subtract = 2 - elif line[0:2] == u'< ': - section = u'Read' + elif line[0:2] == '< ': + section = 'Read' subtract = 2 line = line[subtract:] # If the line does not start with "* ", "< ", "> " or " " # then it is a real stderr message - if subtract == 0 and line[0:2] != u' ': - stderr += line.rstrip() + u' ' + if subtract == 0 and line[0:2] != ' ': + stderr += line.rstrip() + ' ' continue if line.strip() == '': @@ -359,9 +351,9 @@ def split_debug(self, string): 'type': section, 'contents': debug_section.rstrip() }) - debug_section = u'' + debug_section = '' - debug_section += u"%s\n" % line + debug_section += "%s\n" % line last_section = section if len(debug_section.rstrip()) > 0: diff --git a/app/lib/package_control/downloaders/decoding_downloader.py b/app/lib/package_control/downloaders/decoding_downloader.py index 3396931..6434c9b 100644 --- a/app/lib/package_control/downloaders/decoding_downloader.py +++ b/app/lib/package_control/downloaders/decoding_downloader.py @@ -1,12 +1,6 @@ import gzip import zlib - -try: - # Python 3 - from io import BytesIO as StringIO -except (ImportError): - # Python 2 - from StringIO import StringIO +from io import BytesIO try: import bz2 @@ -16,7 +10,7 @@ from .downloader_exception import DownloaderException -class DecodingDownloader(object): +class DecodingDownloader: """ A base for downloaders that provides the ability to decode bzip2ed, gzipped @@ -55,9 +49,9 @@ def decode_response(self, encoding, response): if bz2: return bz2.decompress(response) else: - raise DownloaderException(u'Received bzip2 file contents, but was unable to import the bz2 module') + raise DownloaderException('Received bzip2 file contents, but was unable to import the bz2 module') elif encoding == 'gzip': - return gzip.GzipFile(fileobj=StringIO(response)).read() + return gzip.GzipFile(fileobj=BytesIO(response)).read() elif encoding == 'deflate': decompresser = zlib.decompressobj(-zlib.MAX_WBITS) return decompresser.decompress(response) + decompresser.flush() diff --git a/app/lib/package_control/downloaders/downloader_exception.py b/app/lib/package_control/downloaders/downloader_exception.py index c727bb3..c76043c 100644 --- a/app/lib/package_control/downloaders/downloader_exception.py +++ b/app/lib/package_control/downloaders/downloader_exception.py @@ -1,17 +1,3 @@ -import sys - - class DownloaderException(Exception): """If a downloader could not download a URL""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/http_error.py b/app/lib/package_control/downloaders/http_error.py index 40af08a..10e09dd 100644 --- a/app/lib/package_control/downloaders/http_error.py +++ b/app/lib/package_control/downloaders/http_error.py @@ -1,6 +1,3 @@ -import sys - - class HttpError(Exception): """If a downloader was able to download a URL, but the result was not a 200 or 304""" @@ -8,14 +5,3 @@ class HttpError(Exception): def __init__(self, message, code): self.code = code super(HttpError, self).__init__(message) - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/limiting_downloader.py b/app/lib/package_control/downloaders/limiting_downloader.py index f4befab..61056b6 100644 --- a/app/lib/package_control/downloaders/limiting_downloader.py +++ b/app/lib/package_control/downloaders/limiting_downloader.py @@ -1,16 +1,9 @@ -try: - # Python 3 - from urllib.parse import urlparse - str_cls = str -except (ImportError): - # Python 2 - from urlparse import urlparse - str_cls = unicode # noqa +from urllib.parse import urlparse from .rate_limit_exception import RateLimitException -class LimitingDownloader(object): +class LimitingDownloader: """ A base for downloaders that checks for rate limiting headers. @@ -34,6 +27,6 @@ def handle_rate_limit(self, headers, url): limit_remaining = headers.get('x-ratelimit-remaining', '1') limit = headers.get('x-ratelimit-limit', '1') - if str_cls(limit_remaining) == '0': + if str(limit_remaining) == '0': hostname = urlparse(url).hostname raise RateLimitException(hostname, limit) diff --git a/app/lib/package_control/downloaders/non_clean_exit_error.py b/app/lib/package_control/downloaders/non_clean_exit_error.py index 3088a59..b1c93b9 100644 --- a/app/lib/package_control/downloaders/non_clean_exit_error.py +++ b/app/lib/package_control/downloaders/non_clean_exit_error.py @@ -1,13 +1,3 @@ -import sys - -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - - class NonCleanExitError(Exception): """ @@ -20,13 +10,5 @@ class NonCleanExitError(Exception): def __init__(self, returncode): self.returncode = returncode - def __unicode__(self): - return str_cls(self.returncode) - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') + return str(self.returncode) diff --git a/app/lib/package_control/downloaders/non_http_error.py b/app/lib/package_control/downloaders/non_http_error.py index 8f3054e..8dc5432 100644 --- a/app/lib/package_control/downloaders/non_http_error.py +++ b/app/lib/package_control/downloaders/non_http_error.py @@ -1,17 +1,3 @@ -import sys - - class NonHttpError(Exception): """If a downloader had a non-clean exit, but it was not due to an HTTP error""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/downloaders/oscrypto_downloader.py b/app/lib/package_control/downloaders/oscrypto_downloader.py index 0f0d4c9..b6934e9 100644 --- a/app/lib/package_control/downloaders/oscrypto_downloader.py +++ b/app/lib/package_control/downloaders/oscrypto_downloader.py @@ -3,60 +3,28 @@ from __future__ import unicode_literals, division, absolute_import, print_function import base64 -import re -import sys -import os import hashlib +import os +import re import socket +from urllib.parse import urlparse +from urllib.request import parse_keqv_list, parse_http_list +from .. import text +from ..ca_certs import get_user_ca_bundle_path from ..console_write import console_write -from ..unicode import unicode_from_os -from ..open_compat import open_compat, read_compat +from ..deps.asn1crypto.util import OrderedDict +from ..deps.asn1crypto import pem, x509 from .downloader_exception import DownloaderException from .oscrypto_downloader_exception import OscryptoDownloaderException -from ..ca_certs import get_user_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader -from .. import text - -from ..deps.asn1crypto.util import OrderedDict -from ..deps.asn1crypto import pem, x509 -from ..deps.oscrypto import use_ctypes, use_openssl - -use_ctypes() - -# On Linux we need to use the version of OpenSSL included with Sublime Text -# to prevent conflicts between two different versions of OpenSSL being -# dynamically linked. On ST3, we can't use oscrypto for OpenSSL stuff since -# it has OpenSSL statically linked, and we can't dlopen() that. -# ST 4081 broke sys.executable to return "sublime_text", but other 4xxx builds -# will contain "plugin_host". -if sys.version_info == (3, 8) and sys.platform == 'linux' and ( - 'sublime_text' in sys.executable or - 'plugin_host' in sys.executable): - install_dir = os.path.dirname(sys.executable) - use_openssl( - os.path.join(install_dir, 'libcrypto.so.1.1'), - os.path.join(install_dir, 'libssl.so.1.1') - ) +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader from ..deps.oscrypto import tls # noqa from ..deps.oscrypto import errors as oscrypto_errors # noqa -if sys.version_info < (3,): - from urlparse import urlparse - - from urllib2 import parse_keqv_list, parse_http_list - str_cls = unicode # noqa - int_types = (int, long) # noqa -else: - from urllib.parse import urlparse - from urllib.request import parse_keqv_list, parse_http_list - str_cls = str - int_types = int - class OscryptoDownloader(DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): @@ -181,9 +149,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if code == 304: return self.cache_result('get', url, code, resp_headers, b'') - if code == 301: + if code == 301 or code == 302: location = resp_headers.get('location') - if not isinstance(location, str_cls): + if not isinstance(location, str): raise OscryptoDownloaderException('Missing or duplicate Location HTTP header') if not re.match(r'https?://', location): if not location.startswith('/'): @@ -224,7 +192,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s TLS verification error %s downloading %s. ''', - (error_message, str_cls(e), url) + (error_message, str(e), url) ) except (oscrypto_errors.TLSDisconnectError): @@ -245,7 +213,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s TLS error %s downloading %s. ''', - (error_message, str_cls(e), url) + (error_message, str(e), url) ) except (socket.error): @@ -269,7 +237,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ''' %s OS error %s downloading %s. ''', - (error_message, unicode_from_os(e), url) + (error_message, str(e), url) ) raise @@ -360,8 +328,8 @@ def setup_socket(self): user_ca_bundle_path = get_user_ca_bundle_path(self.settings) if os.path.exists(user_ca_bundle_path): try: - with open_compat(user_ca_bundle_path, 'rb') as f: - file_data = read_compat(f) + with open(user_ca_bundle_path, 'rb') as fobj: + file_data = fobj.read() if len(file_data) > 0: for type_name, headers, der_bytes in pem.unarmor(file_data, multiple=True): extra_trust_roots.append(x509.Certificate.load(der_bytes)) @@ -371,7 +339,7 @@ def setup_socket(self): Oscrypto Debug General Error parsing certs file %s: %s ''', - (user_ca_bundle_path, str_cls(e)) + (user_ca_bundle_path, str(e)) ) session = tls.TLSSession(extra_trust_roots=extra_trust_roots) @@ -518,7 +486,7 @@ def parse_content_length(self, headers): """ content_length = headers.get('content-length') - if isinstance(content_length, str_cls) and len(content_length) > 0: + if isinstance(content_length, str) and len(content_length) > 0: content_length = int(content_length) return content_length @@ -600,7 +568,7 @@ def dump_certificate(self, cert): if curve_info[0] == 'named': public_key_algo += ' ' + curve_info[1] else: - public_key_algo += ' ' + str_cls(cert.public_key.bit_size) + public_key_algo += ' ' + str(cert.public_key.bit_size) console_write( ''' Oscrypto Server TLS Certificate @@ -647,7 +615,7 @@ def do_proxy_connect(self, headers=None): close = False for header in ('connection', 'proxy-connection'): value = resp_headers.get(header) - if isinstance(value, str_cls) and value.lower() == 'close': + if isinstance(value, str) and value.lower() == 'close': close = True if close: diff --git a/app/lib/package_control/downloaders/rate_limit_exception.py b/app/lib/package_control/downloaders/rate_limit_exception.py index 6830faf..6d9ce3d 100644 --- a/app/lib/package_control/downloaders/rate_limit_exception.py +++ b/app/lib/package_control/downloaders/rate_limit_exception.py @@ -10,5 +10,19 @@ class RateLimitException(DownloaderException): def __init__(self, domain, limit): self.domain = domain self.limit = limit - message = u'Rate limit of %s exceeded for %s' % (limit, domain) - super(RateLimitException, self).__init__(message) + + def __str__(self): + return 'Hit rate limit of %s for %s.' % (self.limit, self.domain) + + +class RateLimitSkipException(DownloaderException): + + """ + An exception for when skipping requests due to rate limit of an API has been exceeded. + """ + + def __init__(self, domain): + self.domain = domain + + def __str__(self): + return 'Skipping %s due to rate limit.' % self.domain diff --git a/app/lib/package_control/downloaders/urllib_downloader.py b/app/lib/package_control/downloaders/urllib_downloader.py index ebd312e..9455a91 100644 --- a/app/lib/package_control/downloaders/urllib_downloader.py +++ b/app/lib/package_control/downloaders/urllib_downloader.py @@ -1,54 +1,27 @@ import re -import sys - -# Monkey patches various Python 2 issues with urllib2 -from .. import http # noqa - -try: - # Python 3 - from http.client import HTTPException, BadStatusLine - from urllib.request import ( - build_opener, - HTTPPasswordMgrWithDefaultRealm, - ProxyBasicAuthHandler, - ProxyDigestAuthHandler, - ProxyHandler, - Request, - ) - from urllib.error import HTTPError, URLError - import urllib.request as urllib_compat -except (ImportError): - # Python 2 - from httplib import HTTPException, BadStatusLine - from urllib2 import ( - build_opener, - HTTPPasswordMgrWithDefaultRealm, - ProxyBasicAuthHandler, - ProxyDigestAuthHandler, - ProxyHandler, - Request, - ) - from urllib2 import HTTPError, URLError - import urllib2 as urllib_compat - -try: - # Python 3.3 - import ConnectionError -except (ImportError): - # Python 2.6-3.2 - from socket import error as ConnectionError +import ssl +from http.client import HTTPException, BadStatusLine +from urllib.request import ( + build_opener, + HTTPPasswordMgrWithDefaultRealm, + ProxyBasicAuthHandler, + ProxyDigestAuthHandler, + ProxyHandler, + Request, +) +from urllib.error import HTTPError, URLError +from socket import error as ConnectionError +from .. import text +from ..ca_certs import get_ca_bundle_path, get_user_ca_bundle_path from ..console_write import console_write -from ..unicode import unicode_from_os from ..http.validating_https_handler import ValidatingHTTPSHandler from ..http.debuggable_http_handler import DebuggableHTTPHandler from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader -from .. import text +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class UrlLibDownloader(DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): @@ -151,6 +124,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): return self.cache_result('get', url, http_file.getcode(), http_file.headers, result) + except (ssl.CertificateError) as e: + error_string = 'Certificate validation for %s failed: %s' % (url, str(e)) + except (HTTPException) as e: # Since we use keep-alives, it is possible the other end closed # the connection, and we may just need to re-open @@ -164,10 +140,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): exception_type = e.__class__.__name__ error_string = text.format( - u''' + ''' %s HTTP exception %s (%s) downloading %s. ''', - (error_message, exception_type, unicode_from_os(e), url) + (error_message, exception_type, str(e), url) ) except (HTTPError) as e: @@ -179,14 +155,14 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.handle_rate_limit(e.headers, url) # Handle cached responses - if unicode_from_os(e.code) == '304': + if str(e.code) == '304': return self.cache_result('get', url, int(e.code), e.headers, b'') # Bitbucket and Github return 503 a decent amount - if unicode_from_os(e.code) == '503' and tries != 0: + if str(e.code) == '503' and tries != 0: if tries and debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -194,20 +170,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s HTTP error %s downloading %s. ''', - (error_message, unicode_from_os(e.code), url) + (error_message, str(e.code), url) ) except (URLError) as e: # Bitbucket and Github timeout a decent amount - if unicode_from_os(e.reason) == 'The read operation timed out' \ - or unicode_from_os(e.reason) == 'timed out': + if str(e.reason) == 'The read operation timed out' \ + or str(e.reason) == 'timed out': if tries and debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url @@ -215,10 +191,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s URL error %s downloading %s. ''', - (error_message, unicode_from_os(e.reason), url) + (error_message, str(e.reason), url) ) except (ConnectionError): @@ -226,7 +202,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # thus getting new handlers and a new connection if debug: console_write( - u''' + ''' Connection went away while trying to download %s, trying again ''', url @@ -240,8 +216,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): break if error_string is None: - plural = u's' if tried > 1 else u'' - error_string = u'Unable to download %s after %d attempt%s' % (url, tried, plural) + plural = 's' if tried > 1 else '' + error_string = 'Unable to download %s after %d attempt%s' % (url, tried, plural) raise DownloaderException(error_string) @@ -295,17 +271,17 @@ def setup_opener(self, url, timeout): if https_proxy: password_manager.add_password(None, https_proxy, proxy_username, proxy_password) - handlers = [proxy_handler] - - basic_auth_handler = ProxyBasicAuthHandler(password_manager) - digest_auth_handler = ProxyDigestAuthHandler(password_manager) - handlers.extend([digest_auth_handler, basic_auth_handler]) + handlers = [ + proxy_handler, + ProxyBasicAuthHandler(password_manager), + ProxyDigestAuthHandler(password_manager) + ] debug = self.settings.get('debug') if debug: console_write( - u''' + ''' Urllib Debug Proxy http_proxy: %s https_proxy: %s @@ -315,18 +291,33 @@ def setup_opener(self, url, timeout): (http_proxy, https_proxy, proxy_username, proxy_password) ) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: - bundle_path = get_ca_bundle_path(self.settings) - bundle_path = bundle_path.encode(sys.getfilesystemencoding()) - handlers.append(ValidatingHTTPSHandler( - ca_certs=bundle_path, - debug=debug, - passwd=password_manager, - user_agent=self.settings.get('user_agent') - )) + if hasattr(ssl.SSLContext, 'load_default_certs'): + # python 3.8 ssl module is able to load CA from native OS + # certificate stores, just need to merge in user defined CA + # No need to create home grown merged CA bundle anymore. + handlers.append(ValidatingHTTPSHandler( + ca_certs=None, + extra_ca_certs=get_user_ca_bundle_path(self.settings), + debug=debug, + passwd=password_manager, + user_agent=self.settings.get('user_agent') + )) + + else: + # python 3.3 ssl module is not able to access OS cert stores + handlers.append(ValidatingHTTPSHandler( + ca_certs=get_ca_bundle_path(self.settings), + extra_ca_certs=None, + debug=debug, + passwd=password_manager, + user_agent=self.settings.get('user_agent') + )) + else: - handlers.append(DebuggableHTTPHandler(debug=debug, passwd=password_manager)) + handlers.append(DebuggableHTTPHandler(debug=debug)) + self.opener = build_opener(*handlers) def supports_ssl(self): @@ -336,7 +327,7 @@ def supports_ssl(self): :return: If the object supports HTTPS requests """ - return 'ssl' in sys.modules and hasattr(urllib_compat, 'HTTPSHandler') + return True def supports_plaintext(self): """ diff --git a/app/lib/package_control/downloaders/wget_downloader.py b/app/lib/package_control/downloaders/wget_downloader.py index 6a6b0b7..d010990 100644 --- a/app/lib/package_control/downloaders/wget_downloader.py +++ b/app/lib/package_control/downloaders/wget_downloader.py @@ -1,26 +1,18 @@ -import tempfile -import re import os +import re +import sys +import tempfile -try: - # Python 2 - str_cls = unicode -except (NameError): - # Python 3 - str_cls = str - +from ..ca_certs import get_ca_bundle_path from ..console_write import console_write -from ..unicode import unicode_from_os -from ..open_compat import open_compat, read_compat from .cli_downloader import CliDownloader from .non_http_error import NonHttpError from .non_clean_exit_error import NonCleanExitError from .downloader_exception import DownloaderException -from ..ca_certs import get_ca_bundle_path -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader class WgetDownloader(CliDownloader, DecodingDownloader, LimitingDownloader, CachingDownloader, BasicAuthDownloader): @@ -85,7 +77,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.tmp_file = tempfile.NamedTemporaryFile().name command = [ self.wget, - '--connect-timeout=' + str_cls(int(timeout)), + '--connect-timeout=' + str(int(timeout)), '-o', self.tmp_file, '-O', @@ -113,10 +105,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): for name, value in request_headers.items(): command.extend(['--header', "%s: %s" % (name, value)]) - secure_url_match = re.match('^https://([^/]+)', url) + secure_url_match = re.match(r'^https://([^/#?]+)', url) if secure_url_match is not None: bundle_path = get_ca_bundle_path(self.settings) - command.append(u'--ca-certificate=' + bundle_path) + command.append('--ca-certificate=' + bundle_path) command.append('-S') if self.debug: @@ -130,13 +122,13 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): proxy_password = self.settings.get('proxy_password') if proxy_username: - command.append(u"--proxy-user=%s" % proxy_username) + command.append("--proxy-user=%s" % proxy_username) if proxy_password: - command.append(u"--proxy-password=%s" % proxy_password) + command.append("--proxy-password=%s" % proxy_password) if self.debug: console_write( - u''' + ''' Wget Debug Proxy http_proxy: %s https_proxy: %s @@ -180,7 +172,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and self.debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -191,20 +183,20 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): except (NonHttpError) as e: - download_error = unicode_from_os(e) + download_error = str(e) # GitHub and BitBucket seem to time out a lot if download_error.find('timed out') != -1: if tries and self.debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url ) continue - error_string = u'%s %s downloading %s.' % (error_message, download_error, url) + error_string = '%s %s downloading %s.' % (error_message, download_error, url) break @@ -249,8 +241,8 @@ def parse_output(self, clean_run): HTTP header names. """ - with open_compat(self.tmp_file, 'r') as f: - output = read_compat(f).splitlines() + with open(self.tmp_file, 'r', encoding=sys.getdefaultencoding()) as fobj: + output = fobj.read().splitlines() self.clean_tmp_file() debug_missing = False @@ -295,7 +287,7 @@ def parse_output(self, clean_run): continue if section != last_section: - console_write(u'Wget HTTP Debug %s', section) + console_write('Wget HTTP Debug %s', section) if section == 'Read': if debug_missing: @@ -304,7 +296,7 @@ def parse_output(self, clean_run): else: header_lines.append(line) - console_write(u' %s', line, prefix=False) + console_write(' %s', line, prefix=False) last_section = section else: diff --git a/app/lib/package_control/downloaders/wininet_downloader.py b/app/lib/package_control/downloaders/wininet_downloader.py index e61d42f..da7da77 100644 --- a/app/lib/package_control/downloaders/wininet_downloader.py +++ b/app/lib/package_control/downloaders/wininet_downloader.py @@ -1,29 +1,22 @@ from ctypes import windll, wintypes import ctypes -import re import datetime -import struct # To prevent import errors in thread with datetime import locale # noqa +import re +import struct +from urllib.parse import urlparse -from ..console_write import console_write -from ..unicode import unicode_from_os from .. import text -from .non_http_error import NonHttpError +from ..console_write import console_write from .http_error import HttpError +from .non_http_error import NonHttpError from .downloader_exception import DownloaderException from .win_downloader_exception import WinDownloaderException -from .decoding_downloader import DecodingDownloader -from .limiting_downloader import LimitingDownloader from .basic_auth_downloader import BasicAuthDownloader from .caching_downloader import CachingDownloader - -try: - # Python 3 - from urllib.parse import urlparse -except (ImportError): - # Python 2 - from urlparse import urlparse +from .decoding_downloader import DecodingDownloader +from .limiting_downloader import LimitingDownloader wininet = windll.wininet @@ -201,7 +194,7 @@ def close(self): if self.debug: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' WinINet %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -209,7 +202,7 @@ def close(self): ) if changed_state_back: console_write( - u' Changed Internet Explorer back to Work Offline', + ' Changed Internet Explorer back to Work Offline', prefix=False ) @@ -272,12 +265,12 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): username = url_info.username password = url_info.password - if not username and not password: - username, password = self.get_username_password() - request_headers = { 'Accept-Encoding': self.supported_encodings() } + if not username and not password: + request_headers.update(self.build_auth_header(url)) + request_headers = self.add_conditional_headers(url, request_headers) created_connection = False @@ -323,7 +316,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not self.network_connection: error_string = text.format( - u''' + ''' %s %s during network phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -367,7 +360,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not self.tcp_connection: error_string = text.format( - u''' + ''' %s %s during connection phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -401,7 +394,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): else: if self.debug: console_write( - u''' + ''' WinINet %s Debug General Re-using connection to %s on port %s for request #%s ''', @@ -431,9 +424,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): http_connection = wininet.HttpOpenRequestW( self.tcp_connection, - u'GET', + 'GET', path, - u'HTTP/1.1', + 'HTTP/1.1', None, None, http_flags, @@ -441,7 +434,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): ) if not http_connection: error_string = text.format( - u''' + ''' %s %s during HTTP connection phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -450,8 +443,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): request_header_lines = [] for header, value in request_headers.items(): - request_header_lines.append(u"%s: %s" % (header, value)) - request_header_lines = u"\r\n".join(request_header_lines) + request_header_lines.append("%s: %s" % (header, value)) + request_header_lines = "\r\n".join(request_header_lines) success = wininet.HttpSendRequestW( http_connection, @@ -463,7 +456,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not success: error_string = text.format( - u''' + ''' %s %s during HTTP write phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -474,7 +467,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): self.cache_proxy_info() if self.debug: console_write( - u''' + ''' WinINet Debug Proxy proxy: %s proxy bypass: %s @@ -494,7 +487,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if self.debug and created_connection: if changed_to_online: console_write( - u''' + ''' WinINet HTTP Debug General Internet Explorer was set to Work Offline, temporarily going online ''' @@ -523,16 +516,16 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): issue_date = self.convert_filetime_to_datetime(cert_struct.ftStart) issue_date = issue_date.strftime('%a, %d %b %Y %H:%M:%S GMT') else: - issue_date = u"No issue date" + issue_date = "No issue date" if cert_struct.ftExpiry.dwLowDateTime != 0 and cert_struct.ftExpiry.dwHighDateTime != 0: expiration_date = self.convert_filetime_to_datetime(cert_struct.ftExpiry) expiration_date = expiration_date.strftime('%a, %d %b %Y %H:%M:%S GMT') else: - expiration_date = u"No expiration date" + expiration_date = "No expiration date" console_write( - u''' + ''' WinINet HTTPS Debug General Server SSL Certificate: subject: %s @@ -542,8 +535,8 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): expire date: %s ''', ( - u', '.join(subject_parts), - u', '.join(issuer_parts), + ', '.join(subject_parts), + ', '.join(issuer_parts), common_name, issue_date, expiration_date @@ -556,11 +549,11 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): other_headers = [] for header, value in request_headers.items(): - other_headers.append(u'%s: %s' % (header, value)) - indented_headers = u'\n '.join(other_headers) + other_headers.append('%s: %s' % (header, value)) + indented_headers = '\n '.join(other_headers) console_write( - u''' + ''' WinINet %s Debug Write GET %s HTTP/1.1 User-Agent: %s @@ -597,7 +590,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): if not success: if ctypes.GetLastError() != self.ERROR_INSUFFICIENT_BUFFER: error_string = text.format( - u''' + ''' %s %s during header read phase of downloading %s. ''', (error_message, self.extract_error(), url) @@ -614,9 +607,9 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): headers = headers.decode('iso-8859-1').rstrip("\r\n").split("\r\n") if self.debug: - indented_headers = u'\n '.join(headers) + indented_headers = '\n '.join(headers) console_write( - u''' + ''' WinINet %s Debug Read %s ''', @@ -643,7 +636,7 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): # GitHub and BitBucket seem to rate limit via 503 if tries and self.debug: console_write( - u''' + ''' Downloading %s was rate limited, trying again ''', url @@ -663,10 +656,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): except (NonHttpError, HttpError) as e: # GitHub and BitBucket seem to time out a lot - if unicode_from_os(e).find('timed out') != -1: + if str(e).find('timed out') != -1: if tries and self.debug: console_write( - u''' + ''' Downloading %s timed out, trying again ''', url @@ -674,10 +667,10 @@ def download(self, url, error_message, timeout, tries, prefer_cached=False): continue error_string = text.format( - u''' + ''' %s %s downloading %s. ''', - (error_message, unicode_from_os(e), url) + (error_message, str(e), url) ) finally: @@ -720,26 +713,26 @@ def extract_error(self): error_num = ctypes.GetLastError() raw_error_string = ctypes.FormatError(error_num) - error_string = unicode_from_os(raw_error_string) + error_string = str(raw_error_string) # Try to fill in some known errors - if error_string == u"": + if error_string == "": error_lookup = { - 12007: u'host not found', - 12029: u'connection refused', - 12057: u'error checking for server certificate revocation', - 12169: u'invalid secure certificate', - 12157: u'secure channel error, server not providing SSL', - 12002: u'operation timed out' + 12007: 'host not found', + 12029: 'connection refused', + 12057: 'error checking for server certificate revocation', + 12169: 'invalid secure certificate', + 12157: 'secure channel error, server not providing SSL', + 12002: 'operation timed out' } if error_num in error_lookup: error_string = error_lookup[error_num] - if error_string == u"": - return u"(errno %s)" % error_num + if error_string == "": + return "(errno %s)" % error_num error_string = error_string[0].upper() + error_string[1:] - return u"%s (errno %s)" % (error_string, error_num) + return "%s (errno %s)" % (error_string, error_num) def supports_ssl(self): """ @@ -774,8 +767,8 @@ def cache_proxy_info(self): self.proxy_username = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_USERNAME) self.proxy_password = self.read_option(self.tcp_connection, self.INTERNET_OPTION_PROXY_PASSWORD) else: - self.proxy_username = u'' - self.proxy_password = u'' + self.proxy_username = '' + self.proxy_password = '' def read_option(self, handle, option): """ diff --git a/app/lib/package_control/file_not_found_error.py b/app/lib/package_control/file_not_found_error.py deleted file mode 100644 index 5d67655..0000000 --- a/app/lib/package_control/file_not_found_error.py +++ /dev/null @@ -1,17 +0,0 @@ -import sys - - -class FileNotFoundError(Exception): - - """If a file is not found""" - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/http/__init__.py b/app/lib/package_control/http/__init__.py index d57f2ca..e69de29 100644 --- a/app/lib/package_control/http/__init__.py +++ b/app/lib/package_control/http/__init__.py @@ -1,64 +0,0 @@ -import sys - -try: - # Python 2 - import urllib2 - import httplib - - # Monkey patch AbstractBasicAuthHandler to prevent infinite recursion - def non_recursive_http_error_auth_reqed(self, authreq, host, req, headers): - authreq = headers.get(authreq, None) - - if not hasattr(self, 'retried'): - self.retried = 0 - - if self.retried > 5: - raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed", headers, None) - else: - self.retried += 1 - - if authreq: - mo = urllib2.AbstractBasicAuthHandler.rx.search(authreq) - if mo: - scheme, quote, realm = mo.groups() - if scheme.lower() == 'basic': - return self.retry_http_basic_auth(host, req, realm) - - urllib2.AbstractBasicAuthHandler.http_error_auth_reqed = non_recursive_http_error_auth_reqed - - # Money patch urllib2.Request and httplib.HTTPConnection so that - # HTTPS proxies work in Python 2.6.1-2 - if sys.version_info < (2, 6, 3): - - urllib2.Request._tunnel_host = None - - def py268_set_proxy(self, host, type): - if self.type == 'https' and not self._tunnel_host: - self._tunnel_host = self.host - else: - self.type = type - # The _Request prefix is to handle python private name mangling - self._Request__r_host = self._Request__original - self.host = host - urllib2.Request.set_proxy = py268_set_proxy - - if sys.version_info < (2, 6, 5): - - def py268_set_tunnel(self, host, port=None, headers=None): - """ Sets up the host and the port for the HTTP CONNECT Tunnelling. - - The headers argument should be a mapping of extra HTTP headers - to send with the CONNECT request. - """ - self._tunnel_host = host - self._tunnel_port = port - if headers: - self._tunnel_headers = headers - else: - self._tunnel_headers.clear() - httplib.HTTPConnection._set_tunnel = py268_set_tunnel - - -except (ImportError): - # Python 3 does not need to be patched - pass diff --git a/app/lib/package_control/http/debuggable_http_connection.py b/app/lib/package_control/http/debuggable_http_connection.py index 504b20e..e92932a 100644 --- a/app/lib/package_control/http/debuggable_http_connection.py +++ b/app/lib/package_control/http/debuggable_http_connection.py @@ -1,11 +1,5 @@ import socket - -try: - # Python 3 - from http.client import HTTPConnection -except (ImportError): - # Python 2 - from httplib import HTTPConnection +from http.client import HTTPConnection from ..console_write import console_write from .debuggable_http_response import DebuggableHTTPResponse @@ -21,12 +15,6 @@ class DebuggableHTTPConnection(HTTPConnection): _debug_protocol = 'HTTP' def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kwargs): - self.passwd = kwargs.get('passwd') - - # Python 2.6.1 on OS X 10.6 does not include these - self._tunnel_host = None - self._tunnel_port = None - self._tunnel_headers = {} if 'debug' in kwargs and kwargs['debug']: self.debuglevel = 5 elif 'debuglevel' in kwargs: @@ -37,7 +25,7 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw def connect(self): if self.debuglevel == -1: console_write( - u''' + ''' Urllib %s Debug General Connecting to %s on port %s ''', @@ -58,9 +46,9 @@ def send(self, string): if reset_debug or self.debuglevel == -1: if len(string.strip()) > 0: unicode_string = string.strip().decode('iso-8859-1') - indented_headers = u'\n '.join(unicode_string.splitlines()) + indented_headers = '\n '.join(unicode_string.splitlines()) console_write( - u''' + ''' Urllib %s Debug Write %s ''', @@ -70,10 +58,8 @@ def send(self, string): self.debuglevel = reset_debug def request(self, method, url, body=None, headers={}): - original_headers = headers.copy() - # By default urllib2 and urllib.request override the Connection header, # however, it is preferred to be able to re-use it - original_headers['Connection'] = 'Keep-Alive' + headers['Connection'] = 'Keep-Alive' - HTTPConnection.request(self, method, url, body, original_headers) + HTTPConnection.request(self, method, url, body, headers) diff --git a/app/lib/package_control/http/debuggable_http_handler.py b/app/lib/package_control/http/debuggable_http_handler.py index 76cd97b..6391efc 100644 --- a/app/lib/package_control/http/debuggable_http_handler.py +++ b/app/lib/package_control/http/debuggable_http_handler.py @@ -1,9 +1,4 @@ -try: - # Python 3 - from urllib.request import HTTPHandler -except (ImportError): - # Python 2 - from urllib2 import HTTPHandler +from urllib.request import HTTPHandler from .debuggable_http_connection import DebuggableHTTPConnection from .persistent_handler import PersistentHandler @@ -22,11 +17,9 @@ def __init__(self, debuglevel=0, debug=False, **kwargs): self._debuglevel = 5 else: self._debuglevel = debuglevel - self.passwd = kwargs.get('passwd') def http_open(self, req): def http_class_wrapper(host, **kwargs): - kwargs['passwd'] = self.passwd if 'debuglevel' not in kwargs: kwargs['debuglevel'] = self._debuglevel return DebuggableHTTPConnection(host, **kwargs) diff --git a/app/lib/package_control/http/debuggable_http_response.py b/app/lib/package_control/http/debuggable_http_response.py index e5bfeba..05fa861 100644 --- a/app/lib/package_control/http/debuggable_http_response.py +++ b/app/lib/package_control/http/debuggable_http_response.py @@ -1,11 +1,4 @@ -try: - # Python 3 - from http.client import HTTPResponse, IncompleteRead - str_cls = str -except (ImportError): - # Python 2 - from httplib import HTTPResponse, IncompleteRead - str_cls = unicode # noqa +from http.client import HTTPResponse, IncompleteRead from ..console_write import console_write @@ -39,16 +32,16 @@ def begin(self): headers.append("%s: %s" % (header, self.msg[header])) versions = { - 9: u'HTTP/0.9', - 10: u'HTTP/1.0', - 11: u'HTTP/1.1' + 9: 'HTTP/0.9', + 10: 'HTTP/1.0', + 11: 'HTTP/1.1' } - status_line = u'%s %s %s' % (versions[self.version], str_cls(self.status), self.reason) + status_line = '%s %s %s' % (versions[self.version], str(self.status), self.reason) headers.insert(0, status_line) - indented_headers = u'\n '.join(headers) + indented_headers = '\n '.join(headers) console_write( - u''' + ''' Urllib %s Debug Read %s ''', diff --git a/app/lib/package_control/http/invalid_certificate_exception.py b/app/lib/package_control/http/invalid_certificate_exception.py index 19438cd..fa5fe87 100644 --- a/app/lib/package_control/http/invalid_certificate_exception.py +++ b/app/lib/package_control/http/invalid_certificate_exception.py @@ -1,13 +1,5 @@ -import sys - -try: - # Python 3 - from http.client import HTTPException - from urllib.error import URLError -except (ImportError): - # Python 2 - from httplib import HTTPException - from urllib2 import URLError +from http.client import HTTPException +from urllib.error import URLError class InvalidCertificateException(HTTPException, URLError): @@ -23,14 +15,3 @@ def __init__(self, host, cert, reason): self.reason = reason.rstrip() message = 'Host %s returned an invalid certificate (%s) %s' % (self.host, self.reason, self.cert) HTTPException.__init__(self, message.rstrip()) - - def __unicode__(self): - return self.args[0] - - def __str__(self): - if sys.version_info < (3,): - return self.__bytes__() - return self.__unicode__() - - def __bytes__(self): - return self.__unicode__().encode('utf-8') diff --git a/app/lib/package_control/http/persistent_handler.py b/app/lib/package_control/http/persistent_handler.py index 056692d..6ac0112 100644 --- a/app/lib/package_control/http/persistent_handler.py +++ b/app/lib/package_control/http/persistent_handler.py @@ -1,13 +1,5 @@ -import sys import socket - -try: - # Python 3 - from urllib.error import URLError -except ImportError: - # Python 2 - from urllib2 import URLError - from urllib import addinfourl +from urllib.error import URLError from ..console_write import console_write @@ -21,7 +13,7 @@ def close(self): if self._debuglevel == 5: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' Urllib %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -41,10 +33,7 @@ def do_open(self, http_class, req): # Large portions from Python 3.3 Lib/urllib/request.py and # Python 2.6 Lib/urllib2.py - if sys.version_info >= (3,): - host = req.host - else: - host = req.get_host() + host = req.host if not host: raise URLError('no host given') @@ -60,26 +49,17 @@ def do_open(self, http_class, req): h = self.connection if self._debuglevel == 5: console_write( - u''' + ''' Urllib %s Debug General Re-using connection to %s on port %s for request #%s ''', (h._debug_protocol, h.host, h.port, self.use_count) ) - if sys.version_info >= (3,): - headers = dict(req.unredirected_hdrs) - headers.update(dict((k, v) for k, v in req.headers.items() - if k not in headers)) - headers = dict((name.title(), val) for name, val in headers.items()) - - else: - h.set_debuglevel(self._debuglevel) - - headers = dict(req.headers) - headers.update(req.unredirected_hdrs) - headers = dict( - (name.title(), val) for name, val in headers.items()) + headers = dict(req.unredirected_hdrs) + headers.update(dict((k, v) for k, v in req.headers.items() + if k not in headers)) + headers = dict((name.title(), val) for name, val in headers.items()) if req._tunnel_host and not self.connection: tunnel_headers = {} @@ -88,16 +68,10 @@ def do_open(self, http_class, req): tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] del headers[proxy_auth_hdr] - if sys.version_info >= (3,): - h.set_tunnel(req._tunnel_host, headers=tunnel_headers) - else: - h._set_tunnel(req._tunnel_host, headers=tunnel_headers) + h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: - if sys.version_info >= (3,): - h.request(req.get_method(), req.selector, req.data, headers) - else: - h.request(req.get_method(), req.get_selector(), req.data, headers) + h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # timeout error h.close() raise URLError(err) @@ -111,7 +85,7 @@ def do_open(self, http_class, req): if self._debuglevel == 5: s = '' if self.use_count == 1 else 's' console_write( - u''' + ''' Urllib %s Debug General Closing connection to %s on port %s after %s request%s ''', @@ -120,15 +94,6 @@ def do_open(self, http_class, req): self.use_count = 0 self.connection = None - if sys.version_info >= (3,): - r.url = req.get_full_url() - r.msg = r.reason - return r - - r.recv = r.read - fp = socket._fileobject(r, close=True) - - resp = addinfourl(fp, r.msg, req.get_full_url()) - resp.code = r.status - resp.msg = r.reason - return resp + r.url = req.get_full_url() + r.msg = r.reason + return r diff --git a/app/lib/package_control/http/validating_https_connection.py b/app/lib/package_control/http/validating_https_connection.py index a1d60c5..714c111 100644 --- a/app/lib/package_control/http/validating_https_connection.py +++ b/app/lib/package_control/http/validating_https_connection.py @@ -1,20 +1,12 @@ -import re -import socket import base64 import hashlib import os -import sys - -try: - # Python 3 - from http.client import HTTPS_PORT - from urllib.request import parse_keqv_list, parse_http_list - x509 = None -except (ImportError): - # Python 2 - from httplib import HTTPS_PORT - from urllib2 import parse_keqv_list, parse_http_list - from ..deps.asn1crypto import x509 +import re +import socket +import ssl + +from http.client import HTTPS_PORT +from urllib.request import parse_keqv_list, parse_http_list from ..console_write import console_write from .debuggable_https_response import DebuggableHTTPSResponse @@ -22,404 +14,390 @@ from .invalid_certificate_exception import InvalidCertificateException -# The following code is wrapped in a try because the Linux versions of Sublime -# Text do not include the ssl module due to the fact that different distros -# have different versions -try: - import ssl +class ValidatingHTTPSConnection(DebuggableHTTPConnection): + + """ + A custom HTTPConnection class that validates SSL certificates, and + allows proxy authentication for HTTPS connections. + """ + + default_port = HTTPS_PORT + + response_class = DebuggableHTTPSResponse + _debug_protocol = 'HTTPS' + + def __init__(self, host, port=None, ca_certs=None, extra_ca_certs=None, **kwargs): + passed_args = {} + if 'timeout' in kwargs: + passed_args['timeout'] = kwargs['timeout'] + if 'debug' in kwargs: + passed_args['debug'] = kwargs['debug'] + DebuggableHTTPConnection.__init__(self, host, port, **passed_args) + + self.passwd = kwargs.get('passwd') + + if 'user_agent' in kwargs: + self.user_agent = kwargs['user_agent'] + + # build ssl context + + context = ssl.SSLContext( + ssl.PROTOCOL_TLS_CLIENT if hasattr(ssl, 'PROTOCOL_TLS_CLIENT') else ssl.PROTOCOL_SSLv23) - class ValidatingHTTPSConnection(DebuggableHTTPConnection): + if hasattr(context, 'minimum_version'): + context.minimum_version = ssl.TLSVersion.TLSv1 + else: + context.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 \ + | ssl.OP_NO_COMPRESSION | ssl.OP_CIPHER_SERVER_PREFERENCE + context.verify_mode = ssl.CERT_REQUIRED + if hasattr(context, 'check_hostname'): + context.check_hostname = True + if hasattr(context, 'post_handshake_auth'): + context.post_handshake_auth = True + + if ca_certs: + context.load_verify_locations(ca_certs) + self.ca_certs = ca_certs + elif hasattr(context, 'load_default_certs'): + context.load_default_certs(ssl.Purpose.SERVER_AUTH) + self.ca_certs = "OS native store" + else: + raise InvalidCertificateException(self.host, self.port, "CA missing") + + if extra_ca_certs: + try: + context.load_verify_locations(extra_ca_certs) + except Exception: + pass + + self._context = context + + def get_valid_hosts_for_cert(self, cert): """ - A custom HTTPConnection class that validates SSL certificates, and - allows proxy authentication for HTTPS connections. + Returns a list of valid hostnames for an SSL certificate + + :param cert: A dict from SSLSocket.getpeercert() + + :return: An array of hostnames """ - default_port = HTTPS_PORT + if 'subjectAltName' in cert: + return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns'] + else: + return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname'] - response_class = DebuggableHTTPSResponse - _debug_protocol = 'HTTPS' + def validate_cert_host(self, cert, hostname): + """ + Checks if the cert is valid for the hostname - # The ssl.SSLContext() for the connection - Python 3 only - ctx = None + :param cert: A dict from SSLSocket.getpeercert() - def __init__(self, host, port=None, key_file=None, cert_file=None, ca_certs=None, **kwargs): - passed_args = {} - if 'timeout' in kwargs: - passed_args['timeout'] = kwargs['timeout'] - if 'debug' in kwargs: - passed_args['debug'] = kwargs['debug'] - DebuggableHTTPConnection.__init__(self, host, port, **passed_args) + :param hostname: A string hostname to check - self.passwd = kwargs.get('passwd') - self.key_file = key_file - self.cert_file = cert_file - self.ca_certs = ca_certs - if 'user_agent' in kwargs: - self.user_agent = kwargs['user_agent'] - if self.ca_certs: - self.cert_reqs = ssl.CERT_REQUIRED - else: - self.cert_reqs = ssl.CERT_NONE + :return: A boolean if the cert is valid for the hostname + """ - def get_valid_hosts_for_cert(self, cert): - """ - Returns a list of valid hostnames for an SSL certificate + hosts = self.get_valid_hosts_for_cert(cert) + for host in hosts: + host_re = host.replace('.', r'\.').replace('*', r'[^.]*') + if re.search('^%s$' % (host_re,), hostname, re.I): + return True + return False - :param cert: A dict from SSLSocket.getpeercert() + # Compatibility for python 3.3 vs 3.8 + # python 3.8 replaced _set_hostport() by _get_hostport() + if not hasattr(DebuggableHTTPConnection, '_set_hostport'): - :return: An array of hostnames - """ + def _set_hostport(self, host, port): + (self.host, self.port) = self._get_hostport(host, port) + self._validate_host(self.host) - if 'subjectAltName' in cert: - return [x[1] for x in cert['subjectAltName'] if x[0].lower() == 'dns'] - else: - return [x[0][1] for x in cert['subject'] if x[0][0].lower() == 'commonname'] + def _tunnel(self): + """ + This custom _tunnel method allows us to read and print the debug + log for the whole response before throwing an error, and adds + support for proxy authentication + """ - def validate_cert_host(self, cert, hostname): - """ - Checks if the cert is valid for the hostname + self._proxy_host = self.host + self._proxy_port = self.port + self._set_hostport(self._tunnel_host, self._tunnel_port) - :param cert: A dict from SSLSocket.getpeercert() + self._tunnel_headers['Host'] = "%s:%s" % (self.host, self.port) + self._tunnel_headers['User-Agent'] = self.user_agent + self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive' - :param hostname: A string hostname to check + request = "CONNECT %s:%d HTTP/1.1\r\n" % (self.host, self.port) + for header, value in self._tunnel_headers.items(): + request += "%s: %s\r\n" % (header, value) + request += "\r\n" - :return: A boolean if the cert is valid for the hostname - """ + request = bytes(request, 'iso-8859-1') - hosts = self.get_valid_hosts_for_cert(cert) - for host in hosts: - host_re = host.replace('.', r'\.').replace('*', r'[^.]*') - if re.search('^%s$' % (host_re,), hostname, re.I): - return True - return False + self.send(request) - def _tunnel(self): - """ - This custom _tunnel method allows us to read and print the debug - log for the whole response before throwing an error, and adds - support for proxy authentication - """ + response = self.response_class(self.sock, method=self._method) + (version, code, message) = response._read_status() - self._proxy_host = self.host - self._proxy_port = self.port - self._set_hostport(self._tunnel_host, self._tunnel_port) + status_line = "%s %s %s" % (version, code, message.rstrip()) + headers = [status_line] - self._tunnel_headers['Host'] = u"%s:%s" % (self.host, self.port) - self._tunnel_headers['User-Agent'] = self.user_agent - self._tunnel_headers['Proxy-Connection'] = 'Keep-Alive' + content_length = 0 + close_connection = False + while True: + line = response.fp.readline() - request = "CONNECT %s:%d HTTP/1.1\r\n" % (self.host, self.port) - for header, value in self._tunnel_headers.items(): - request += "%s: %s\r\n" % (header, value) - request += "\r\n" + line = line.decode('iso-8859-1') - if sys.version_info >= (3,): - request = bytes(request, 'iso-8859-1') + if line == '\r\n': + break - self.send(request) + headers.append(line.rstrip()) - response = self.response_class(self.sock, method=self._method) - (version, code, message) = response._read_status() + parts = line.rstrip().split(': ', 1) + name = parts[0].lower() + value = parts[1].lower().strip() + if name == 'content-length': + content_length = int(value) - status_line = u"%s %s %s" % (version, code, message.rstrip()) - headers = [status_line] + if name in ['connection', 'proxy-connection'] and value == 'close': + close_connection = True - content_length = 0 - close_connection = False - while True: - line = response.fp.readline() + if self.debuglevel in [-1, 5]: + indented_headers = '\n '.join(headers) + console_write( + ''' + Urllib %s Debug Read + %s + ''', + (self._debug_protocol, indented_headers) + ) - if sys.version_info >= (3,): - line = line.decode('iso-8859-1') + # Handle proxy auth for SSL connections since regular urllib punts on this + if code == 407 and self.passwd and 'Proxy-Authorization' not in self._tunnel_headers: + if content_length: + response._safe_read(content_length) - if line == '\r\n': - break + supported_auth_methods = {} + for line in headers: + parts = line.split(': ', 1) + if parts[0].lower() != 'proxy-authenticate': + continue + details = parts[1].split(' ', 1) + supported_auth_methods[details[0].lower()] = details[1] if len(details) > 1 else '' - headers.append(line.rstrip()) + username, password = self.passwd.find_user_password(None, "%s:%s" % ( + self._proxy_host, self._proxy_port)) - parts = line.rstrip().split(': ', 1) - name = parts[0].lower() - value = parts[1].lower().strip() - if name == 'content-length': - content_length = int(value) + if 'digest' in supported_auth_methods: + response_value = self.build_digest_response( + supported_auth_methods['digest'], username, password) + if response_value: + self._tunnel_headers['Proxy-Authorization'] = "Digest %s" % response_value - if name in ['connection', 'proxy-connection'] and value == 'close': - close_connection = True + elif 'basic' in supported_auth_methods: + response_value = "%s:%s" % (username, password) + response_value = base64.b64encode(response_value.encode('utf-8')).decode('utf-8') + self._tunnel_headers['Proxy-Authorization'] = "Basic %s" % response_value.strip() - if self.debuglevel in [-1, 5]: - indented_headers = u'\n '.join(headers) - console_write( - u''' - Urllib %s Debug Read - %s - ''', - (self._debug_protocol, indented_headers) - ) + if 'Proxy-Authorization' in self._tunnel_headers: + self.host = self._proxy_host + self.port = self._proxy_port - # Handle proxy auth for SSL connections since regular urllib punts on this - if code == 407 and self.passwd and 'Proxy-Authorization' not in self._tunnel_headers: - if content_length: - response._safe_read(content_length) - - supported_auth_methods = {} - for line in headers: - parts = line.split(': ', 1) - if parts[0].lower() != 'proxy-authenticate': - continue - details = parts[1].split(' ', 1) - supported_auth_methods[details[0].lower()] = details[1] if len(details) > 1 else '' - - username, password = self.passwd.find_user_password(None, "%s:%s" % ( - self._proxy_host, self._proxy_port)) - - if 'digest' in supported_auth_methods: - response_value = self.build_digest_response( - supported_auth_methods['digest'], username, password) - if response_value: - self._tunnel_headers['Proxy-Authorization'] = u"Digest %s" % response_value - - elif 'basic' in supported_auth_methods: - response_value = u"%s:%s" % (username, password) - response_value = base64.b64encode(response_value.encode('utf-8')).decode('utf-8') - self._tunnel_headers['Proxy-Authorization'] = u"Basic %s" % response_value.strip() - - if 'Proxy-Authorization' in self._tunnel_headers: - self.host = self._proxy_host - self.port = self._proxy_port - - # If the proxy wanted the connection closed, we need to make a new connection - if close_connection: - self.sock.close() - self.sock = socket.create_connection((self.host, self.port), self.timeout) - - return self._tunnel() - - if code != 200: - self.close() - raise socket.error("Tunnel connection failed: %d %s" % (code, message.strip())) - - def build_digest_response(self, fields, username, password): - """ - Takes a Proxy-Authenticate: Digest header and creates a response - header - - :param fields: - The string portion of the Proxy-Authenticate header after - "Digest " - - :param username: - The username to use for the response - - :param password: - The password to use for the response - - :return: - None if invalid Proxy-Authenticate header, otherwise the - string of fields for the Proxy-Authorization: Digest header - """ - - fields = parse_keqv_list(parse_http_list(fields)) - - realm = fields.get('realm') - nonce = fields.get('nonce') - qop = fields.get('qop') - algorithm = fields.get('algorithm') - if algorithm: - algorithm = algorithm.lower() - opaque = fields.get('opaque') - - if algorithm in ['md5', None]: - def md5hash(string): - return hashlib.md5(string).hexdigest() - hash = md5hash - - elif algorithm == 'sha': - def sha1hash(string): - return hashlib.sha1(string).hexdigest() - hash = sha1hash - - else: - return None - - host_port = u"%s:%s" % (self.host, self.port) - - a1 = "%s:%s:%s" % (username, realm, password) - a2 = "CONNECT:%s" % host_port - ha1 = hash(a1) - ha2 = hash(a2) - - if qop is None: - response = hash(u"%s:%s:%s" % (ha1, nonce, ha2)) - elif qop == 'auth': - nc = '00000001' - cnonce = hash(os.urandom(8))[:8] - response = hash(u"%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2)) - else: - return None - - response_fields = { - 'username': username, - 'realm': realm, - 'nonce': nonce, - 'response': response, - 'uri': host_port - } - if algorithm: - response_fields['algorithm'] = algorithm - if qop == 'auth': - response_fields['nc'] = nc - response_fields['cnonce'] = cnonce - response_fields['qop'] = qop - if opaque: - response_fields['opaque'] = opaque - - return ', '.join([u"%s=\"%s\"" % (field, response_fields[field]) for field in response_fields]) - - def connect(self): - """ - Adds debugging and SSL certification validation - """ + # If the proxy wanted the connection closed, we need to make a new connection + if close_connection: + self.sock.close() + self.sock = socket.create_connection((self.host, self.port), self.timeout) - if self.debuglevel == -1: - console_write( - u''' - Urllib HTTPS Debug General - Connecting to %s on port %s - ''', - (self.host, self.port) - ) + return self._tunnel() - self.sock = socket.create_connection((self.host, self.port), self.timeout) - if self._tunnel_host: - self._tunnel() + if code != 200: + self.close() + raise socket.error("Tunnel connection failed: %d %s" % (code, message.strip())) - if self.debuglevel == -1: - console_write( - u''' - Urllib HTTPS Debug General - Upgrading connection to SSL using CA certs file at %s - ''', - self.ca_certs.decode(sys.getfilesystemencoding()) - ) + def build_digest_response(self, fields, username, password): + """ + Takes a Proxy-Authenticate: Digest header and creates a response + header - hostname = self.host.split(':', 0)[0] - - # Python 3 supports SNI when using an SSLContext - if sys.version_info >= (3,): - proto = ssl.PROTOCOL_SSLv23 - if sys.version_info >= (3, 6): - proto = ssl.PROTOCOL_TLS - self.ctx = ssl.SSLContext(proto) - if sys.version_info < (3, 7): - self.ctx.options = ssl.OP_ALL | ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 - else: - self.ctx.minimum_version = ssl.TLSVersion.TLSv1 - self.ctx.verify_mode = self.cert_reqs - self.ctx.load_verify_locations(self.ca_certs) - # We don't call load_cert_chain() with self.key_file and self.cert_file - # since that is for servers, and this code only supports client mode - if self.debuglevel == -1: - console_write( - u''' - Using hostname "%s" for TLS SNI extension - ''', - hostname, - indent=' ', - prefix=False - ) - self.sock = self.ctx.wrap_socket( - self.sock, - server_hostname=hostname - ) + :param fields: + The string portion of the Proxy-Authenticate header after + "Digest " - else: - self.sock = ssl.wrap_socket( - self.sock, - keyfile=self.key_file, - certfile=self.cert_file, - cert_reqs=self.cert_reqs, - ca_certs=self.ca_certs, - ssl_version=ssl.PROTOCOL_SSLv23 - ) + :param username: + The username to use for the response + + :param password: + The password to use for the response + + :return: + None if invalid Proxy-Authenticate header, otherwise the + string of fields for the Proxy-Authorization: Digest header + """ + + fields = parse_keqv_list(parse_http_list(fields)) + + realm = fields.get('realm') + nonce = fields.get('nonce') + qop = fields.get('qop') + algorithm = fields.get('algorithm') + if algorithm: + algorithm = algorithm.lower() + opaque = fields.get('opaque') + + if algorithm in ['md5', None]: + def md5hash(string): + return hashlib.md5(string).hexdigest() + hash = md5hash + + elif algorithm == 'sha': + def sha1hash(string): + return hashlib.sha1(string).hexdigest() + hash = sha1hash + + else: + return None + + host_port = "%s:%s" % (self.host, self.port) + + a1 = "%s:%s:%s" % (username, realm, password) + a2 = "CONNECT:%s" % host_port + ha1 = hash(a1) + ha2 = hash(a2) + + if qop is None: + response = hash("%s:%s:%s" % (ha1, nonce, ha2)) + elif qop == 'auth': + nc = '00000001' + cnonce = hash(os.urandom(8))[:8] + response = hash("%s:%s:%s:%s:%s:%s" % (ha1, nonce, nc, cnonce, qop, ha2)) + else: + return None + + response_fields = { + 'username': username, + 'realm': realm, + 'nonce': nonce, + 'response': response, + 'uri': host_port + } + if algorithm: + response_fields['algorithm'] = algorithm + if qop == 'auth': + response_fields['nc'] = nc + response_fields['cnonce'] = cnonce + response_fields['qop'] = qop + if opaque: + response_fields['opaque'] = opaque + + return ', '.join(["%s=\"%s\"" % (field, response_fields[field]) for field in response_fields]) + + def connect(self): + """ + Adds debugging and SSL certification validation + """ + + if self.debuglevel == -1: + console_write( + ''' + Urllib HTTPS Debug General + Connecting to %s on port %s + ''', + (self.host, self.port) + ) + + self.sock = socket.create_connection((self.host, self.port), self.timeout) + if self._tunnel_host: + self._tunnel() + + if self.debuglevel == -1: + console_write( + ''' + Urllib HTTPS Debug General + Upgrading connection to SSL using CA certs from %s + ''', + self.ca_certs + ) + + hostname = self.host.split(':', 0)[0] + + # We don't call load_cert_chain() with self.key_file and self.cert_file + # since that is for servers, and this code only supports client mode + if self.debuglevel == -1: + console_write( + ''' + Using hostname "%s" for TLS SNI extension + ''', + hostname, + indent=' ', + prefix=False + ) + self.sock = self._context.wrap_socket( + self.sock, + server_hostname=hostname + ) + + if self.debuglevel == -1: + cipher_info = self.sock.cipher() + console_write( + ''' + Successfully upgraded connection to %s:%s with SSL + Using %s with cipher %s + ''', + (self.host, self.port, cipher_info[1], cipher_info[0]), + indent=' ', + prefix=False + ) + + # This debugs and validates the SSL certificate + if self._context.verify_mode & ssl.CERT_REQUIRED: + cert = self.sock.getpeercert() if self.debuglevel == -1: - cipher_info = self.sock.cipher() + subjectMap = { + 'organizationName': 'O', + 'commonName': 'CN', + 'organizationalUnitName': 'OU', + 'countryName': 'C', + 'serialNumber': 'serialNumber', + 'commonName': 'CN', + 'localityName': 'L', + 'stateOrProvinceName': 'S', + '1.3.6.1.4.1.311.60.2.1.2': 'incorporationState', + '1.3.6.1.4.1.311.60.2.1.3': 'incorporationCountry' + } + subject_list = list(cert['subject']) + subject_list.reverse() + subject_parts = [] + for pair in subject_list: + if pair[0][0] in subjectMap: + field_name = subjectMap[pair[0][0]] + else: + field_name = pair[0][0] + subject_parts.append(field_name + '=' + pair[0][1]) + console_write( - u''' - Successfully upgraded connection to %s:%s with SSL - Using %s with cipher %s + ''' + Server SSL certificate: + subject: %s ''', - (self.host, self.port, cipher_info[1], cipher_info[0]), + ','.join(subject_parts), indent=' ', prefix=False ) - - # This debugs and validates the SSL certificate - if self.cert_reqs & ssl.CERT_REQUIRED: - cert = self.sock.getpeercert() - # Python 2.6 doesn't seem to parse the subject alt name, so - # we parse the raw DER certificate and grab the info ourself - if x509: - der_cert = self.sock.getpeercert(True) - cert_object = x509.Certificate.load(der_cert) - if cert_object.subject_alt_name_value: - subject_alt_names = [] - for general_name in cert_object.subject_alt_name_value: - if general_name.name != 'dns_name': - continue - if 'commonName' not in cert or general_name.native != cert['commonName']: - subject_alt_names.append(('DNS', general_name.native)) - if subject_alt_names: - cert['subjectAltName'] = tuple(subject_alt_names) - + if 'subjectAltName' in cert: + alt_names = [c[1] for c in cert['subjectAltName']] + alt_names = ', '.join(alt_names) + console_write(' subject alt name: %s', alt_names, prefix=False) + if 'notAfter' in cert: + console_write(' expire date: %s', cert['notAfter'], prefix=False) + + if not self.validate_cert_host(cert, hostname): if self.debuglevel == -1: - subjectMap = { - 'organizationName': 'O', - 'commonName': 'CN', - 'organizationalUnitName': 'OU', - 'countryName': 'C', - 'serialNumber': 'serialNumber', - 'commonName': 'CN', - 'localityName': 'L', - 'stateOrProvinceName': 'S', - '1.3.6.1.4.1.311.60.2.1.2': 'incorporationState', - '1.3.6.1.4.1.311.60.2.1.3': 'incorporationCountry' - } - subject_list = list(cert['subject']) - subject_list.reverse() - subject_parts = [] - for pair in subject_list: - if pair[0][0] in subjectMap: - field_name = subjectMap[pair[0][0]] - else: - field_name = pair[0][0] - subject_parts.append(field_name + '=' + pair[0][1]) - - console_write( - u''' - Server SSL certificate: - subject: %s - ''', - ','.join(subject_parts), - indent=' ', - prefix=False - ) - if 'subjectAltName' in cert: - alt_names = [c[1] for c in cert['subjectAltName']] - alt_names = ', '.join(alt_names) - console_write(u' subject alt name: %s', alt_names, prefix=False) - if 'notAfter' in cert: - console_write(u' expire date: %s', cert['notAfter'], prefix=False) - - if not self.validate_cert_host(cert, hostname): - if self.debuglevel == -1: - console_write(u' Certificate INVALID', prefix=False) - - raise InvalidCertificateException(hostname, cert, 'hostname mismatch') + console_write(' Certificate INVALID', prefix=False) - if self.debuglevel == -1: - console_write(u' Certificate validated for %s', hostname, prefix=False) + raise InvalidCertificateException(hostname, cert, 'hostname mismatch') -except (ImportError): - pass + if self.debuglevel == -1: + console_write(' Certificate validated for %s', hostname, prefix=False) diff --git a/app/lib/package_control/http/validating_https_handler.py b/app/lib/package_control/http/validating_https_handler.py index 5c569fb..10c50b9 100644 --- a/app/lib/package_control/http/validating_https_handler.py +++ b/app/lib/package_control/http/validating_https_handler.py @@ -1,63 +1,40 @@ -try: - # Python 3 - from urllib.error import URLError - import urllib.request as urllib_compat -except (ImportError): - # Python 2 - from urllib2 import URLError - import urllib2 as urllib_compat - - -# The following code is wrapped in a try because the Linux versions of Sublime -# Text do not include the ssl module due to the fact that different distros -# have different versions -try: - import ssl - - from .validating_https_connection import ValidatingHTTPSConnection - from .invalid_certificate_exception import InvalidCertificateException - from .persistent_handler import PersistentHandler - - if hasattr(urllib_compat, 'HTTPSHandler'): - class ValidatingHTTPSHandler(PersistentHandler, urllib_compat.HTTPSHandler): - - """ - A urllib handler that validates SSL certificates for HTTPS requests - """ - - def __init__(self, **kwargs): - # This is a special value that will not trigger the standard debug - # functionality, but custom code where we can format the output - self._debuglevel = 0 - if 'debug' in kwargs and kwargs['debug']: - self._debuglevel = 5 - elif 'debuglevel' in kwargs: - self._debuglevel = kwargs['debuglevel'] - self._connection_args = kwargs - - def https_open(self, req): - def http_class_wrapper(host, **kwargs): - full_kwargs = dict(self._connection_args) - full_kwargs.update(kwargs) - return ValidatingHTTPSConnection(host, **full_kwargs) - - try: - return self.do_open(http_class_wrapper, req) - except URLError as e: - if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1: - raise InvalidCertificateException(req.host, '', - e.reason.args[1]) - raise - - https_request = urllib_compat.AbstractHTTPHandler.do_request_ - else: - raise ImportError() - -except (ImportError) as e: - - import_error = e - - class ValidatingHTTPSHandler(): - - def __init__(self, **kwargs): - raise import_error +import ssl +from urllib.error import URLError +import urllib.request as urllib_compat + +from .validating_https_connection import ValidatingHTTPSConnection +from .invalid_certificate_exception import InvalidCertificateException +from .persistent_handler import PersistentHandler + + +class ValidatingHTTPSHandler(PersistentHandler, urllib_compat.HTTPSHandler): + + """ + A urllib handler that validates SSL certificates for HTTPS requests + """ + + def __init__(self, **kwargs): + # This is a special value that will not trigger the standard debug + # functionality, but custom code where we can format the output + self._debuglevel = 0 + if 'debug' in kwargs and kwargs['debug']: + self._debuglevel = 5 + elif 'debuglevel' in kwargs: + self._debuglevel = kwargs['debuglevel'] + self._connection_args = kwargs + + def https_open(self, req): + def http_class_wrapper(host, **kwargs): + full_kwargs = dict(self._connection_args) + full_kwargs.update(kwargs) + return ValidatingHTTPSConnection(host, **full_kwargs) + + try: + return self.do_open(http_class_wrapper, req) + except URLError as e: + if type(e.reason) == ssl.SSLError and e.reason.args[0] == 1: + raise InvalidCertificateException(req.host, '', + e.reason.args[1]) + raise + + https_request = urllib_compat.AbstractHTTPHandler.do_request_ diff --git a/app/lib/package_control/http_cache.py b/app/lib/package_control/http_cache.py index a0ba634..8ddbf10 100644 --- a/app/lib/package_control/http_cache.py +++ b/app/lib/package_control/http_cache.py @@ -1,35 +1,45 @@ -# Not shared with Package Control - import os +import time -from datetime import datetime, timedelta +from . import sys_path -from ..connection import connection +class HttpCache: -class HttpCache(object): """ A data store for caching HTTP response data. """ def __init__(self, ttl): - self.clear(int(ttl)) + """ + Constructs a new instance. + :param ttl: + The number of seconds a cache entry should be valid for + """ + self.ttl = float(ttl) + self.base_path = os.path.join(sys_path.pc_cache_dir(), 'http_cache') + os.makedirs(self.base_path, exist_ok=True) - def clear(self, ttl): + def prune(self): """ Removes all cache entries older than the TTL :param ttl: The number of seconds a cache entry should be valid for """ - - ttl = int(ttl) - cutoff = datetime.utcnow() - timedelta(seconds=ttl) - - with connection() as cursor: - cursor.execute("DELETE FROM http_cache_entries WHERE last_modified < %s", [cutoff]) - + try: + for filename in os.listdir(self.base_path): + path = os.path.join(self.base_path, filename) + # There should not be any folders in the cache dir, but we + # ignore to prevent an exception + if os.path.isdir(path): + continue + if os.stat(path).st_atime < time.time() - self.ttl: + os.unlink(path) + + except FileNotFoundError: + pass def get(self, key): """ @@ -41,20 +51,23 @@ def get(self, key): :return: The (binary) cached value, or False """ + try: + cache_file = os.path.join(self.base_path, key) - with connection() as cursor: - cursor.execute("SELECT content FROM http_cache_entries WHERE key = %s", [key]) - row = cursor.fetchone() - if not row: - return False + # update filetime to prevent unmodified cache files + # from being deleted, if they are frequently accessed. + # NOTE: try to rely on OS updating access time (`os.stat(path).st_atime`) + # os.utime(cache_file) - return row['content'].tobytes() + with open(cache_file, 'rb') as fobj: + return fobj.read() + except FileNotFoundError: + return False def has(self, key): - with connection() as cursor: - cursor.execute("SELECT key FROM http_cache_entries WHERE key = %s", [key]) - return cursor.fetchone() != None + cache_file = os.path.join(self.base_path, key) + return os.path.exists(cache_file) def path(self, key): """ @@ -67,7 +80,7 @@ def path(self, key): The absolute filesystem path to the cache file """ - return "SELECT * FROM http_cache_entries WHERE key = '%s'" % key + return os.path.join(self.base_path, key) def set(self, key, content): """ @@ -80,10 +93,6 @@ def set(self, key, content): The (binary) content to cache """ - if self.has(key): - sql = "UPDATE http_cache_entries SET content = %s, last_modified = CURRENT_TIMESTAMP WHERE key = %s" - else: - sql = "INSERT INTO http_cache_entries (content, last_modified, key) VALUES (%s, CURRENT_TIMESTAMP, %s)" - - with connection() as cursor: - cursor.execute(sql, [content, key]) + cache_file = os.path.join(self.base_path, key) + with open(cache_file, 'wb') as f: + f.write(content) diff --git a/app/lib/package_control/open_compat.py b/app/lib/package_control/open_compat.py deleted file mode 100644 index 5aacd26..0000000 --- a/app/lib/package_control/open_compat.py +++ /dev/null @@ -1,39 +0,0 @@ -import os -import sys - -from .file_not_found_error import FileNotFoundError - -try: - str_cls = unicode -except (NameError): - str_cls = str - - -def open_compat(path, mode='r'): - if mode in ['r', 'rb'] and not os.path.exists(path): - raise FileNotFoundError(u"The file \"%s\" could not be found" % path) - - if sys.version_info >= (3,): - encoding = 'utf-8' - errors = 'replace' - if mode in ['rb', 'wb', 'ab']: - encoding = None - errors = None - return open(path, mode, encoding=encoding, errors=errors) - - else: - return open(path, mode) - - -def read_compat(file_obj): - if sys.version_info >= (3,): - return file_obj.read() - else: - return str_cls(file_obj.read(), 'utf-8', errors='replace') - - -def write_compat(file_obj, value): - if sys.version_info >= (3,): - return file_obj.write(str(value)) - else: - return file_obj.write(str_cls(value).encode('utf-8')) diff --git a/app/lib/package_control/package_version.py b/app/lib/package_control/package_version.py new file mode 100644 index 0000000..5219d1d --- /dev/null +++ b/app/lib/package_control/package_version.py @@ -0,0 +1,137 @@ +import re + +from .console_write import console_write +from .pep440 import PEP440Version, PEP440InvalidVersionError + + +class PackageVersion(PEP440Version): + __slots__ = ["_str"] + + _date_time_regex = re.compile(r"^\d{4}\.\d{2}\.\d{2}(?:\.\d{2}\.\d{2}\.\d{2})?$") + + def __init__(self, ver): + """ + Initialize a ``PackageVersion`` instance. + + The initializer acts as compatibility layer to convert legacy version schemes + into a ``PEP440Version``. + + If the version is based on a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss. + + :param ver: + A string, dict with 'version' key, or a SemVer object + + :raises: + TypeError, if ver is not a ``str``. + ValueError, if ver is no valid version string + """ + + if not isinstance(ver, str): + raise TypeError("{!r} is not a string".format(ver)) + + # Store original version string with `v` trimmed to maintain backward compatibility + # with regards to not normalize it. + # The one and only use case is to keep existing CI tests working without change. + if ver[0] == 'v': + self._str = ver[1:] + else: + self._str = ver + + # We prepend 0 to all date-based version numbers so that developers + # may switch to explicit versioning from GitHub/GitLab/BitBucket + # versioning based on commit dates. + # + # The resulting semver is alwass 0.0.1 with timestamp being used + # as build number, so any explicitly choosen version (via tags) will + # be greater, once a package moves from branch to tag based releases. + # + # The result looks like: + # 0.0.1+2020.07.15.10.50.38 + match = self._date_time_regex.match(ver) + if match: + ver = "0.0.1+" + ver + + try: + super().__init__(ver) + except PEP440InvalidVersionError: + # maybe semver with incompatible pre-release tag + # if, so treat it as dev build with local version + if "-" in ver: + ver, pre = ver.split("-", 1) + if ver and pre: + super().__init__(ver + "-dev+" + pre) + return + raise + + def __str__(self): + return self._str + + +def version_match_prefix(version, filter_prefix): + """ + Create a SemVer for a given version, if it matches filter_prefix. + + :param version: + The version string to match + + :param filter_prefix: + The prefix to match versions against + + :returns: + SemVer, if version is valid and matches given filter_prefix + None, if version is invalid or doesn't match filter_prefix + """ + + try: + if filter_prefix: + if version.startswith(filter_prefix): + return PackageVersion(version[len(filter_prefix):]) + else: + return PackageVersion(version) + except ValueError: + pass + return None + + +def version_sort(sortable, *fields, **kwargs): + """ + Sorts a list that is a list of versions, or dicts with a 'version' key. + Can also secondly sort by another field. + + :param sortable: + The list to sort + + :param *fields: + If sortable is a list of dicts, perform secondary sort via these fields, + in order + + :param **kwargs: + Keyword args to pass on to sorted() + + :return: + A copy of sortable that is sorted according to SemVer rules + """ + + def _version_sort_key(item): + if isinstance(item, dict): + if "version" not in item: + raise TypeError("%s is not a package or library release" % item) + result = PackageVersion(item["version"]) + if fields: + result = (result,) + for field in fields: + result += (item[field],) + return result + + return PackageVersion(item) + + try: + return sorted(sortable, key=_version_sort_key, **kwargs) + except ValueError as e: + console_write( + """ + Error sorting versions - %s + """, + e, + ) + return [] diff --git a/app/lib/package_control/pep440.py b/app/lib/package_control/pep440.py new file mode 100644 index 0000000..2f7af33 --- /dev/null +++ b/app/lib/package_control/pep440.py @@ -0,0 +1,645 @@ +""" +A PEP440 complient version module for use by Package Control. + +Note: + +This module implements ``PEP440Version`` and ``PEP440VersionSpecifier`` +using independent implementations and regex patterns to parse their string +representation, even though both share a lot. + +The reason for this kind of inlining is targetting best possible performance +for creating and compairing versions, rather than strictly following a +questionable DRY approach. + +Instantiation for each object consists of only 2 main steps: + +1. parse and validate input string using a single regular expression. +2. convert match groups into nested tuple representation, as primary + data storage and comparing key. + +The patterns include additional pre-release tag names +(e.g: ``patch``, ``prerelease``, ``developmment``, ``test``) +to maintain compatibility with various existing packages on packagecontrol.io +""" +import re + +__all__ = [ + "PEP440InvalidVersionError", + "PEP440InvalidVersionSpecifierError", + "PEP440Version", + "PEP440VersionSpecifier", + "check_version" +] + +_local_version_separators = re.compile(r"[-._]") + + +def _norm_tuples(a, b): + """ + Accepts two tuples of PEP440 version numbers and extends them until they + are the same length. This allows for comparisons between them. + + Notes: + + - prerelease segment is padded + - local version don't need padding as shorter sort before longer + + :param a: + A tuple from ``PEP440Version`` + of the format: ``(epoch, release, prerelease, local)`` + + :param b: + A tuple from ``PEP440Version`` + of the format: ``(epoch, release, prerelease, local)`` + + :return: + Two potentially modified tuples, (a, b) + """ + # pad release + ar = a[1] + br = b[1] + + arl = len(ar) + brl = len(br) + + if arl < brl: + while len(ar) < brl: + ar += (0,) + a = a[:1] + (ar,) + a[2:] + + elif arl > brl: + while arl > len(br): + br += (0,) + b = b[:1] + (br,) + b[2:] + + return a, b + + +def _trim_tuples(spec, ver): + """ + Trim version to match specification's length. + + :param spec: + A tuple from ``PEP440VersionSpecifier``, representing a version prefix. + e.g.: ``(epoch, (major [, minor [, micro] ] ) )`` + + :param ver: + A tuple from ``PEP440Version`` + + :returns: + A tuple of prefix and trimmed version. + """ + segs = len(spec[1]) + release = ver[1][:segs] + while len(release) < segs: + release += (0,) + return spec, (ver[0], release) + + +def _version_info(epoch, ver, pre, local, verbose=False): + """ + Create a ``__version_info__`` tuple representation. + + :param epoch: + The epoch + + :param ver: + A tuple of integers representing the version + + :param pre: + A tuple of tuples of integers representing pre-releases + + :param local: + Local version representation. + + :returns: + A tuple of (major, minor, micro, 'pre', 'post', 'dev') + """ + info = ver + + if pre and pre[0][0] != 0: + if verbose: + tag = ("dev", "alpha", "beta", "rc", "", "post") + else: + tag = ("dev", "a", "b", "rc", "", "post") + for t, n in pre: + if t != 0: + info += (tag[t + 4], n) + else: + info += ("final",) + + if local: + info += (".".join(str(n) if n > -1 else s for n, s in local),) + + return info + + +def _version_string(epoch, ver, pre, local, prefix=False, verbose=False): + """ + Create a normalized string representation. + + :param epoch: + The epoch + + :param ver: + A tuple of integers representing the version + + :param pre: + A tuple of tuples of integers representing pre-releases + + :param local: + Local version representation. + + :returns: + String representation of the version. + """ + string = str(epoch) + "!" if epoch else "" + string += ".".join(map(str, ver)) + + if prefix: + return string + ".*" + + if pre and pre[0][0] != 0: + if verbose: + tag = ("-dev{}", "-alpha{}", "-beta{}", "-rc{}", "", "-post{}") + else: + tag = (".dev{}", "a{}", "b{}", "rc{}", "", ".post{}") + for t, n in pre: + if t != 0: + string += tag[t + 4].format(n) + + if local: + string += "+" + ".".join(str(n) if n > -1 else s for n, s in local) + + return string + + +class PEP440InvalidVersionError(ValueError): + pass + + +class PEP440Version: + __slots__ = ["_tup"] + + _regex = re.compile( + r""" + ^\s* + v? + (?:(?P[0-9]+)!)? # epoch + (?P[0-9]+(?:\.[0-9]+)*) # release segment + (?P
                                              # pre-release
+            [-_.]?
+            (?Palpha|a|beta|b|prerelease|preview|pre|c|rc)
+            [-_.]?
+            (?P[0-9]+)?
+        )?
+        (?P                                             # post release
+            (?:-(?P[0-9]+))
+            |
+            (?:
+                [-_.]?
+                (?Ppatch|post|rev|r)
+                [-_.]?
+                (?P[0-9]+)?
+            )
+        )?
+        (?P                                              # dev release
+            [-_.]?
+            (?Pdevelopment|develop|devel|dev)
+            [-_.]?
+            (?P[0-9]+)?
+        )?
+        (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?        # local version
+        \s*$
+        """,
+        re.VERBOSE,
+    )
+
+    def __init__(self, string):
+        """
+        Constructs a new ``PEP440Version`` instance.
+
+        :param string:
+            An unicode string of the pep44ß version.
+        """
+        match = self._regex.match(string.lower())
+        if not match:
+            raise PEP440InvalidVersionError("'{}' is not a valid PEP440 version string".format(string))
+
+        (
+            epoch,
+            release,
+            pre,
+            pre_l,
+            pre_n,
+            post,
+            post_n1,
+            _,
+            post_n2,
+            dev,
+            _,
+            dev_n,
+            local,
+        ) = match.groups()
+
+        epoch = int(epoch or 0)
+        release = tuple(map(int, release.split(".")))
+
+        prerelease = ()
+
+        if pre:
+            if pre_l == "a" or pre_l == "alpha":
+                pre_l = -3
+            elif pre_l == "b" or pre_l == "beta":
+                pre_l = -2
+            else:
+                pre_l = -1
+            prerelease += ((pre_l, int(pre_n or 0)),)
+
+        if post:
+            prerelease += ((1, int(post_n1 or post_n2 or 0)),)
+
+        if dev:
+            prerelease += ((-4, int(dev_n or 0)),)
+
+        while len(prerelease) < 3:
+            prerelease += ((0, 0),)
+
+        tup = ()
+        if local:
+            # Versions with a local segment need that segment parsed to implement
+            # the sorting rules in PEP440.
+            # - Alpha numeric segments sort before numeric segments
+            # - Alpha numeric segments sort lexicographically
+            # - Numeric segments sort numerically
+            # - Shorter versions sort before longer versions when the prefixes
+            #   match exactly
+            for seg in _local_version_separators.split(local):
+                try:
+                    tup += ((int(seg), ""),)
+                except ValueError:
+                    tup += ((-1, seg),)
+
+        local = tup
+
+        self._tup = (epoch, release, prerelease, local)
+
+    def __repr__(self):
+        return "<{0.__class__.__name__}('{0!s}')>".format(self)
+
+    def __str__(self):
+        return self.version_string()
+
+    def __eq__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a == b
+
+    def __ne__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a != b
+
+    def __lt__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a < b
+
+    def __le__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a <= b
+
+    def __gt__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a > b
+
+    def __ge__(self, rhs):
+        a, b = _norm_tuples(self._tup, rhs._tup)
+        return a >= b
+
+    def __hash__(self):
+        return hash(self._tup)
+
+    def version_info(self, verbose=False):
+        return _version_info(*self._tup, verbose=verbose)
+
+    def version_string(self, verbose=False):
+        return _version_string(*self._tup, verbose=verbose)
+
+    @property
+    def epoch(self):
+        return self._tup[0]
+
+    @property
+    def release(self):
+        return self._tup[1]
+
+    @property
+    def major(self):
+        try:
+            return self._tup[1][0]
+        except IndexError:
+            return 0
+
+    @property
+    def minor(self):
+        try:
+            return self._tup[1][1]
+        except IndexError:
+            return 0
+
+    @property
+    def micro(self):
+        try:
+            return self._tup[1][2]
+        except IndexError:
+            return 0
+
+    @property
+    def prerelease(self):
+        tup = ()
+        pre = self._tup[2]
+        if pre and pre[0][0] != 0:
+            tag = ("dev", "a", "b", "rc", "", "post")
+            for t, n in pre:
+                if t != 0:
+                    tup += (tag[t + 4], n)
+
+        return tup
+
+    @property
+    def local(self):
+        return ".".join(str(n) if n > -1 else s for n, s in self._tup[3])
+
+    @property
+    def is_final(self):
+        """Version represents a final release."""
+        return self._tup[2][0][0] == 0
+
+    @property
+    def is_dev(self):
+        """Version represents a pre release."""
+        return any(t[0] == -4 for t in self._tup[2])
+
+    @property
+    def is_prerelease(self):
+        """Version represents a pre release."""
+        return self._tup[2][0][0] < 0
+
+    @property
+    def is_postrelease(self):
+        """Version represents a post final release."""
+        return self._tup[2][0][0] > 0
+
+
+class PEP440InvalidVersionSpecifierError(ValueError):
+    pass
+
+
+class PEP440VersionSpecifier:
+    __slots__ = ["_operator", "_prefix", "_prereleases", "_tup"]
+
+    _regex = re.compile(
+        r"""
+        ^\s*
+        (?: (?P===|==|!=|~=|<=?|>=?) \s* )?                 # operator
+        v?
+        (?:(?P[0-9]+)!)?                             # epoch
+        (?P[0-9]+(?:\.[0-9]+)*)                    # release segment
+        (?:
+            \.(?P\*)                              # prefix-release
+            |
+            (?P
                                        # pre-release
+                [-_.]?
+                (?Palpha|a|beta|b|preview|pre|c|rc)
+                [-_.]?
+                (?P[0-9]+)?
+            )?
+            (?P                                       # post release
+                (?:-(?P[0-9]+))
+                |
+                (?:
+                    [-_.]?
+                    (?Ppost|rev|r)
+                    [-_.]?
+                    (?P[0-9]+)?
+                )
+            )?
+            (?P                                        # dev release
+                [-_.]?
+                (?Pdev)
+                [-_.]?
+                (?P[0-9]+)?
+            )?
+            (?:\+(?P[a-z0-9]+(?:[-_.][a-z0-9]+)*))?  # local version
+        )
+        \s*$
+        """,
+        re.VERBOSE,
+    )
+
+    _op_str = ("", "===", "==", "!=", "~=", "<", "<=", ">", ">=")
+
+    OP_ITY = 1
+    OP_EQ = 2
+    OP_NE = 3
+    OP_CPE = 4
+    OP_LT = 5
+    OP_LTE = 6
+    OP_GT = 7
+    OP_GTE = 8
+
+    def __init__(self, string, prereleases=True):
+        """
+        Constructs a new ``PEP440VersionSpecifier`` instance.
+
+        :param string:
+            An unicode string of the pep44ß version specifier.
+        """
+        match = self._regex.match(string.lower())
+        if not match:
+            raise PEP440InvalidVersionSpecifierError(
+                "'{}' is not a valid PEP 440 version specifier string".format(string)
+            )
+
+        (
+            op,
+            epoch,
+            release,
+            wildcard,
+            pre,
+            pre_l,
+            pre_n,
+            post,
+            post_n1,
+            _,
+            post_n2,
+            dev,
+            _,
+            dev_n,
+            local,
+        ) = match.groups()
+
+        self._operator = self._op_str.index(op) if op else self.OP_EQ
+        self._prefix = bool(wildcard)
+        self._prereleases = prereleases
+
+        epoch = int(epoch or 0)
+        release = tuple(map(int, release.split(".")))
+
+        if self._prefix:
+            if self._operator not in (self.OP_EQ, self.OP_NE):
+                raise PEP440InvalidVersionSpecifierError(
+                    "'{}' is not a valid PEP 440 version specifier string".format(string)
+                )
+
+            self._tup = (epoch, release)
+            return
+
+        if self._operator == self.OP_CPE and len(release) < 2:
+            raise PEP440InvalidVersionSpecifierError(
+                "'{}' is not a valid PEP 440 version specifier string".format(string)
+            )
+
+        prerelease = ()
+
+        if pre:
+            if pre_l == "a" or pre_l == "alpha":
+                pre_l = -3
+            elif pre_l == "b" or pre_l == "beta":
+                pre_l = -2
+            else:
+                pre_l = -1
+            prerelease += ((pre_l, int(pre_n or 0)),)
+
+        if post:
+            prerelease += ((1, int(post_n1 or post_n2 or 0)),)
+
+        if dev:
+            prerelease += ((-4, int(dev_n or 0)),)
+
+        while len(prerelease) < 3:
+            prerelease += ((0, 0),)
+
+        tup = ()
+        if local:
+            if self._operator not in (self.OP_EQ, self.OP_NE, self.OP_ITY):
+                raise PEP440InvalidVersionSpecifierError(
+                    "'{}' is not a valid PEP 440 version specifier string".format(string)
+                )
+
+            for seg in _local_version_separators.split(local):
+                try:
+                    tup += ((int(seg), ""),)
+                except ValueError:
+                    tup += ((-1, seg),)
+        local = tup
+
+        self._tup = (epoch, release, prerelease, local)
+
+    def __repr__(self):
+        return "<{0.__class__.__name__}('{0!s}')>".format(self)
+
+    def __str__(self):
+        return self._op_str[self._operator] + self.version_string()
+
+    def __contains__(self, version):
+        return self.contains(version)
+
+    def __hash__(self):
+        return hash((self._operator, self._tup))
+
+    def contains(self, version):
+        """
+        Ensures the version matches this specifier
+
+        :param version:
+            A ``PEP440Version`` object to check.
+
+        :return:
+            Returns ``True`` if ``version`` satisfies the ``specifier``.
+        """
+        if not self._prereleases and version.is_prerelease:
+            return False
+
+        if self._prefix:
+            # The specifier is a version prefix (aka. wildcard present).
+            # Trim and normalize version to ( epoch, ( major [, minor [, micro ] ] ) ),
+            # so it matches exactly the specifier's length.
+
+            self_tup, ver_tup = _trim_tuples(self._tup, version._tup)
+
+            if self._operator == self.OP_EQ:
+                return ver_tup == self._tup
+
+            if self._operator == self.OP_NE:
+                return ver_tup != self._tup
+
+        else:
+            if self._operator == self.OP_ITY:
+                return version.version_string(False) == self.version_string(False)
+
+            self_tup, ver_tup = _norm_tuples(self._tup, version._tup)
+
+            if self._operator == self.OP_CPE:
+                # Compatible releases have an equivalent combination of >= and ==.
+                # That is that ~=2.2 is equivalent to >=2.2,==2.*.
+                if ver_tup < self_tup:
+                    return False
+
+                # create prefix specifier with last digit removed.
+                self_tup, ver_tup = _trim_tuples((self._tup[0], self._tup[1][:-1]), version._tup)
+                return ver_tup == self_tup
+
+            if self._operator == self.OP_EQ:
+                return ver_tup == self_tup
+
+            if self._operator == self.OP_NE:
+                return ver_tup != self_tup
+
+            if self._operator == self.OP_GTE:
+                return ver_tup >= self_tup
+
+            if self._operator == self.OP_GT:
+                # TODO:
+                #  - parse local version and include into comparison result
+                #  - drop only invalid local versions
+                return ver_tup[:2] > self_tup[:2]
+
+            if self._operator == self.OP_LTE:
+                return ver_tup <= self_tup
+
+            if self._operator == self.OP_LT:
+                # TODO:
+                #  - parse local version and include into comparison result
+                #  - drop only invalid local versions
+                return ver_tup[:2] < self_tup[:2]
+
+        raise PEP440InvalidVersionSpecifierError(
+            "Invalid PEP 440 version specifier operator: {!r}".format(self._operator)
+        )
+
+    def filter(self, iterable):
+        return filter(self.contains, iterable)
+
+    def version_string(self, verbose=False):
+        return _version_string(*self._tup, prefix=self._prefix, verbose=verbose)
+
+
+def check_version(spec, version, include_prereleases=False):
+    """
+    Check if version satisfies specifications
+
+    :param spec:
+        The pep440 version specifier string.
+
+    :param version:
+        The pep440 version string or ``PEP440Version`` ojbect to check.
+
+    :param include_prereleases:
+        If ``True`` succeed also, if version is a pre-release.
+        If ``False`` (default) succeed only, if version is a final release.
+
+    :returns:
+        Returns ``True`` if ``version`` satisfies the ``specifier``.
+    """
+    if isinstance(version, str):
+        version = PEP440Version(version)
+    return PEP440VersionSpecifier(spec, include_prereleases).contains(version)
diff --git a/app/lib/package_control/providers/__init__.py b/app/lib/package_control/providers/__init__.py
index 7549f9e..416ccd0 100644
--- a/app/lib/package_control/providers/__init__.py
+++ b/app/lib/package_control/providers/__init__.py
@@ -3,7 +3,7 @@
 from .github_user_provider import GitHubUserProvider
 from .gitlab_repository_provider import GitLabRepositoryProvider
 from .gitlab_user_provider import GitLabUserProvider
-from .repository_provider import RepositoryProvider
+from .json_repository_provider import JsonRepositoryProvider
 
 from .channel_provider import ChannelProvider
 
@@ -14,7 +14,7 @@
     GitHubUserProvider,
     GitLabRepositoryProvider,
     GitLabUserProvider,
-    RepositoryProvider
+    JsonRepositoryProvider
 ]
 
 CHANNEL_PROVIDERS = [ChannelProvider]
diff --git a/app/lib/package_control/providers/base_repository_provider.py b/app/lib/package_control/providers/base_repository_provider.py
new file mode 100644
index 0000000..f3d14a6
--- /dev/null
+++ b/app/lib/package_control/providers/base_repository_provider.py
@@ -0,0 +1,129 @@
+class BaseRepositoryProvider:
+    """
+    Base repository downloader that fetches package info
+
+    This base class acts as interface to ensure all providers expose the same
+    set of methods. All providers should therefore derive from this base class.
+
+    The structure of the JSON a repository should contain is located in
+    example-packages.json.
+
+    :param repo_url:
+        The URL of the package repository
+
+    :param settings:
+        A dict containing at least the following fields:
+          `cache_length`,
+          `debug`,
+          `timeout`,
+          `user_agent`
+        Optional fields:
+          `http_proxy`,
+          `https_proxy`,
+          `proxy_username`,
+          `proxy_password`,
+          `query_string_params`
+    """
+
+    __slots__ = [
+        'broken_libriaries',
+        'broken_packages',
+        'failed_sources',
+        'libraries',
+        'packages',
+        'repo_url',
+        'settings',
+    ]
+
+    def __init__(self, repo_url, settings):
+        self.broken_libriaries = {}
+        self.broken_packages = {}
+        self.failed_sources = {}
+        self.libraries = None
+        self.packages = None
+        self.repo_url = repo_url
+        self.settings = settings
+
+    @classmethod
+    def match_url(cls, repo_url):
+        """
+        Indicates if this provider can handle the provided repo_url
+        """
+
+        return True
+
+    def prefetch(self):
+        """
+        Go out and perform HTTP operations, caching the result
+        """
+
+        [name for name, info in self.get_packages()]
+
+    def fetch(self):
+        """
+        Retrieves and loads the JSON for other methods to use
+
+        :raises:
+            NotImplementedError: when called
+        """
+
+        raise NotImplementedError()
+
+    def get_broken_libraries(self):
+        """
+        List of library names for libraries that are missing information
+
+        :return:
+            A generator of ("Library Name", Exception()) tuples
+        """
+
+        return self.broken_libriaries.items()
+
+    def get_broken_packages(self):
+        """
+        List of package names for packages that are missing information
+
+        :return:
+            A generator of ("Package Name", Exception()) tuples
+        """
+
+        return self.broken_packages.items()
+
+    def get_failed_sources(self):
+        """
+        List of any URLs that could not be accessed while accessing this repository
+
+        :return:
+            A generator of ("https://example.com", Exception()) tuples
+        """
+
+        return self.failed_sources.items()
+
+    def get_libraries(self, invalid_sources=None):
+        """
+        For API-compatibility with RepositoryProvider
+        """
+
+        return {}.items()
+
+    def get_packages(self, invalid_sources=None):
+        """
+        For API-compatibility with RepositoryProvider
+        """
+
+        return {}.items()
+
+    def get_sources(self):
+        """
+        Return a list of current URLs that are directly referenced by the repo
+
+        :return:
+            A list of URLs
+        """
+
+        return [self.repo_url]
+
+    def get_renamed_packages(self):
+        """For API-compatibility with RepositoryProvider"""
+
+        return {}
diff --git a/app/lib/package_control/providers/bitbucket_repository_provider.py b/app/lib/package_control/providers/bitbucket_repository_provider.py
index 2179624..be1e496 100644
--- a/app/lib/package_control/providers/bitbucket_repository_provider.py
+++ b/app/lib/package_control/providers/bitbucket_repository_provider.py
@@ -1,13 +1,15 @@
-import re
-
 from ..clients.bitbucket_client import BitBucketClient
-from ..downloaders.downloader_exception import DownloaderException
 from ..clients.client_exception import ClientException
-from .provider_exception import ProviderException
-
+from ..downloaders.downloader_exception import DownloaderException
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import (
+    GitProviderDownloadInfoException,
+    GitProviderRepoInfoException,
+    ProviderException,
+)
 
-class BitBucketRepositoryProvider():
 
+class BitBucketRepositoryProvider(BaseRepositoryProvider):
     """
     Allows using a public BitBucket repository as the source for a single package.
     For legacy purposes, this can also be treated as the source for a Package
@@ -32,57 +34,24 @@ class BitBucketRepositoryProvider():
           `http_basic_auth`
     """
 
-    def __init__(self, repo, settings):
-        self.cache = {}
-        self.repo = repo
-        self.settings = settings
-        self.failed_sources = {}
-
     @classmethod
-    def match_url(cls, repo):
-        """Indicates if this provider can handle the provided repo"""
-
-        return re.search('^https?://bitbucket.org/([^/]+/[^/]+)/?$', repo) is not None
-
-    def prefetch(self):
-        """
-        Go out and perform HTTP operations, caching the result
-
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
+    def match_url(cls, repo_url):
         """
+        Indicates if this provider can handle the provided repo_url
 
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
-        """
-        List of any URLs that could not be accessed while accessing this repository
+        :param repo_url:
+            The URL to the repository, in one of the forms:
+                https://bitbucket.org/{user}/{repo}.git
+                https://bitbucket.org/{user}/{repo}
+                https://bitbucket.org/{user}/{repo}/
+                https://bitbucket.org/{user}/{repo}/src/{branch}
+                https://bitbucket.org/{user}/{repo}/src/{branch}/
 
         :return:
-            A generator of ("https://bitbucket.org/user/repo", Exception()) tuples
+            True if repo_url matches an supported scheme.
         """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_broken_dependencies(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_dependencies(self, ):
-        "For API-compatibility with RepositoryProvider"
-
-        return {}.items()
+        user, repo, _ = BitBucketClient.user_repo_branch(repo_url)
+        return bool(user and repo)
 
     def get_packages(self, invalid_sources=None):
         """
@@ -91,10 +60,6 @@ def get_packages(self, invalid_sources=None):
         :param invalid_sources:
             A list of URLs that should be ignored
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
         :return:
             A generator of
             (
@@ -126,24 +91,28 @@ def get_packages(self, invalid_sources=None):
             tuples
         """
 
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
+        if self.packages is not None:
+            for key, value in self.packages.items():
                 yield (key, value)
             return
 
-        client = BitBucketClient(self.settings)
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
 
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
+        client = BitBucketClient(self.settings)
 
         try:
-            repo_info = client.repo_info(self.repo)
+            repo_info = client.repo_info(self.repo_url)
+            if not repo_info:
+                raise GitProviderRepoInfoException(self)
+
+            downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch'])
+            if not downloads:
+                raise GitProviderDownloadInfoException(self)
 
-            releases = []
-            for download in client.download_info(self.repo):
+            for download in downloads:
                 download['sublime_text'] = '*'
                 download['platforms'] = ['*']
-                releases.append(download)
 
             name = repo_info['name']
             details = {
@@ -151,35 +120,19 @@ def get_packages(self, invalid_sources=None):
                 'description': repo_info['description'],
                 'homepage': repo_info['homepage'],
                 'author': repo_info['author'],
-                'last_modified': releases[0].get('date'),
-                'releases': releases,
+                'last_modified': downloads[0].get('date'),
+                'releases': downloads,
                 'previous_names': [],
                 'labels': [],
-                'sources': [self.repo],
+                'sources': [self.repo_url],
                 'readme': repo_info['readme'],
                 'issues': repo_info['issues'],
                 'donate': repo_info['donate'],
                 'buy': None
             }
-            self.cache['get_packages'] = {name: details}
+            self.packages = {name: details}
             yield (name, details)
 
         except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources[self.repo] = e
-            self.cache['get_packages'] = {}
-            raise StopIteration()
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs
-        """
-
-        return [self.repo]
-
-    def get_renamed_packages(self):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}
+            self.failed_sources[self.repo_url] = e
+            self.packages = {}
diff --git a/app/lib/package_control/providers/channel_provider.py b/app/lib/package_control/providers/channel_provider.py
index 1373c80..05a79d4 100644
--- a/app/lib/package_control/providers/channel_provider.py
+++ b/app/lib/package_control/providers/channel_provider.py
@@ -1,26 +1,28 @@
 import json
 import os
 import re
+from itertools import chain
 
-try:
-    # Python 3
-    from urllib.parse import urljoin
-    str_cls = str
-except (ImportError):
-    # Python 2
-    from urlparse import urljoin
-    str_cls = unicode  # noqa
-
-from .. import text
 from ..console_write import console_write
+from ..download_manager import http_get, resolve_urls, update_url
+from ..package_version import version_sort
 from .provider_exception import ProviderException
-from .schema_compat import platforms_to_releases
-from ..download_manager import downloader, update_url
-from ..versions import version_sort
+from .schema_version import SchemaVersion
+
+
+class InvalidChannelFileException(ProviderException):
+
+    def __init__(self, channel, reason_message):
+        super().__init__(
+            'Channel %s does not appear to be a valid channel file because'
+            ' %s' % (channel.channel_url, reason_message))
 
 
-class ChannelProvider():
+class UncachedChannelRepositoryError(ProviderException):
+    pass
 
+
+class ChannelProvider:
     """
     Retrieves a channel and provides an API into the information
 
@@ -30,7 +32,7 @@ class ChannelProvider():
     BitBucket APIs and getting around not-infrequent HTTP 503 errors from
     those APIs.
 
-    :param channel:
+    :param channel_url:
         The URL of the channel
 
     :param settings:
@@ -48,16 +50,28 @@ class ChannelProvider():
           `http_basic_auth`
     """
 
-    def __init__(self, channel, settings):
-        self.channel_info = None
-        self.schema_version = '0.0'
-        self.schema_major_version = 0
-        self.channel = channel
+    __slots__ = [
+        'channel_url',
+        'schema_version',
+        'repositories',
+        'libraries_cache',
+        'packages_cache',
+        'settings',
+    ]
+
+    def __init__(self, channel_url, settings):
+        self.channel_url = channel_url
+        self.schema_version = SchemaVersion('4.0.0')
+        self.repositories = None
+        self.libraries_cache = {}
+        self.packages_cache = {}
         self.settings = settings
 
     @classmethod
-    def match_url(cls, channel):
-        """Indicates if this provider can handle the provided channel"""
+    def match_url(cls, channel_url):
+        """
+        Indicates if this provider can handle the provided channel_url.
+        """
 
         return True
 
@@ -77,92 +91,52 @@ def fetch(self):
         Retrieves and loads the JSON for other methods to use
 
         :raises:
-            ProviderException: when an error occurs with the channel contents
+            InvalidChannelFileException: when parsing or validation file content fails
+            ProviderException: when an error occurs trying to open a file
             DownloaderException: when an error occurs trying to open a URL
         """
 
-        if self.channel_info is not None:
+        if self.repositories is not None:
             return
 
-        if re.match('https?://', self.channel, re.I):
-            with downloader(self.channel, self.settings) as manager:
-                channel_json = manager.fetch(self.channel, 'Error downloading channel.')
+        if re.match(r'https?://', self.channel_url, re.I):
+            json_string = http_get(self.channel_url, self.settings, 'Error downloading channel.')
 
         # All other channels are expected to be filesystem paths
         else:
-            if not os.path.exists(self.channel):
-                raise ProviderException(u'Error, file %s does not exist' % self.channel)
+            if not os.path.exists(self.channel_url):
+                raise ProviderException('Error, file %s does not exist' % self.channel_url)
 
             if self.settings.get('debug'):
                 console_write(
-                    u'''
+                    '''
                     Loading %s as a channel
                     ''',
-                    self.channel
+                    self.channel_url
                 )
 
             # We open as binary so we get bytes like the DownloadManager
-            with open(self.channel, 'rb') as f:
-                channel_json = f.read()
+            with open(self.channel_url, 'rb') as f:
+                json_string = f.read()
 
         try:
-            channel_info = json.loads(channel_json.decode('utf-8'))
-        except (ValueError):
-            raise ProviderException(u'Error parsing JSON from channel %s.' % self.channel)
-
-        schema_error = u'Channel %s does not appear to be a valid channel file because ' % self.channel
-
-        if 'schema_version' not in channel_info:
-            raise ProviderException(u'%s the "schema_version" JSON key is missing.' % schema_error)
+            channel_info = json.loads(json_string.decode('utf-8'))
+        except ValueError:
+            raise InvalidChannelFileException(self, 'parsing JSON failed.')
 
         try:
-            self.schema_version = channel_info.get('schema_version')
-            if isinstance(self.schema_version, int):
-                self.schema_version = float(self.schema_version)
-            if isinstance(self.schema_version, float):
-                self.schema_version = str_cls(self.schema_version)
-        except (ValueError):
-            raise ProviderException(u'%s the "schema_version" is not a valid number.' % schema_error)
-
-        if self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']:
-            raise ProviderException(text.format(
-                u'''
-                %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0.
-                ''',
-                schema_error
-            ))
-
-        version_parts = self.schema_version.split('.')
-        self.schema_major_version = int(version_parts[0])
-
-        # Fix any out-dated repository URLs in the package cache
-        debug = self.settings.get('debug')
-        packages_key = 'packages_cache' if self.schema_major_version >= 2 else 'packages'
-        if packages_key in channel_info:
-            original_cache = channel_info[packages_key]
-            new_cache = {}
-            for repo in original_cache:
-                new_cache[update_url(repo, debug)] = original_cache[repo]
-            channel_info[packages_key] = new_cache
-
-        self.channel_info = channel_info
-
-    def get_name_map(self):
-        """
-        :raises:
-            ProviderException: when an error occurs with the channel contents
-            DownloaderException: when an error occurs trying to open a URL
-
-        :return:
-            A dict of the mapping for URL slug -> package name
-        """
+            schema_version = SchemaVersion(channel_info['schema_version'])
+        except KeyError:
+            raise InvalidChannelFileException(self, 'the "schema_version" JSON key is missing.')
+        except ValueError as e:
+            raise InvalidChannelFileException(self, e)
 
-        self.fetch()
-
-        if self.schema_major_version >= 2:
-            return {}
+        if 'repositories' not in channel_info:
+            raise InvalidChannelFileException(self, 'the "repositories" JSON key is missing.')
 
-        return self.channel_info.get('package_name_map', {})
+        self.repositories = self._migrate_repositories(channel_info, schema_version)
+        self.packages_cache = self._migrate_packages_cache(channel_info, schema_version)
+        self.libraries_cache = self._migrate_libraries_cache(channel_info, schema_version)
 
     def get_renamed_packages(self):
         """
@@ -176,19 +150,15 @@ def get_renamed_packages(self):
 
         self.fetch()
 
-        if self.schema_major_version >= 2:
-            output = {}
-            if 'packages_cache' in self.channel_info:
-                for repo in self.channel_info['packages_cache']:
-                    for package in self.channel_info['packages_cache'][repo]:
-                        previous_names = package.get('previous_names', [])
-                        if not isinstance(previous_names, list):
-                            previous_names = [previous_names]
-                        for previous_name in previous_names:
-                            output[previous_name] = package['name']
-            return output
+        output = {}
+        for package in chain(*self.packages_cache.values()):
+            previous_names = package.get('previous_names', [])
+            if not isinstance(previous_names, list):
+                previous_names = [previous_names]
+            for previous_name in previous_names:
+                output[previous_name] = package['name']
 
-        return self.channel_info.get('renamed_packages', {})
+        return output
 
     def get_repositories(self):
         """
@@ -202,45 +172,7 @@ def get_repositories(self):
 
         self.fetch()
 
-        if 'repositories' not in self.channel_info:
-            raise ProviderException(text.format(
-                u'''
-                Channel %s does not appear to be a valid channel file because
-                the "repositories" JSON key is missing.
-                ''',
-                self.channel
-            ))
-
-        # Determine a relative root so repositories can be defined
-        # relative to the location of the channel file.
-        scheme_match = re.match('(https?:)//', self.channel, re.I)
-        if scheme_match is None:
-            relative_base = os.path.dirname(self.channel)
-            is_http = False
-        else:
-            is_http = True
-
-        debug = self.settings.get('debug')
-        output = []
-        repositories = self.channel_info.get('repositories', [])
-        for repository in repositories:
-            if repository.startswith('//'):
-                if scheme_match is not None:
-                    repository = scheme_match.group(1) + repository
-                else:
-                    repository = 'https:' + repository
-            elif repository.startswith('/'):
-                # We don't allow absolute repositories
-                continue
-            elif repository.startswith('./') or repository.startswith('../'):
-                if is_http:
-                    repository = urljoin(self.channel, repository)
-                else:
-                    repository = os.path.join(relative_base, repository)
-                    repository = os.path.normpath(repository)
-            output.append(update_url(repository, debug))
-
-        return output
+        return self.repositories
 
     def get_sources(self):
         """
@@ -253,145 +185,232 @@ def get_sources(self):
 
         return self.get_repositories()
 
-    def get_packages(self, repo):
+    def get_packages(self, repo_url):
         """
         Provides access to the repository info that is cached in a channel
 
-        :param repo:
+        :param repo_url:
             The URL of the repository to get the cached info of
 
         :raises:
-            ProviderException: when an error occurs with the channel contents
             DownloaderException: when an error occurs trying to open a URL
+            UncachedChannelRepositoryError when no cache entry exists for repo_url
 
         :return:
-            A dict in the format:
-            {
-                'Package Name': {
+            A generator of
+            (
+                'Package Name',
+                {
                     'name': name,
                     'description': description,
                     'author': author,
                     'homepage': homepage,
+                    'previous_names': [old_name, ...],
+                    'labels': [label, ...],
+                    'readme': url,
+                    'issues': url,
+                    'donate': url,
+                    'buy': url,
                     'last_modified': last modified date,
                     'releases': [
                         {
-                            'sublime_text': '*',
-                            'platforms': ['*'],
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'python_versions': ['3.3', '3.8'],
                             'url': url,
                             'date': date,
-                            'version': version
+                            'version': version,
+                            'libraries': [library name, ...]
                         }, ...
-                    ],
-                    'previous_names': [old_name, ...],
-                    'labels': [label, ...],
-                    'readme': url,
-                    'issues': url,
-                    'donate': url,
-                    'buy': url
-                },
-                ...
-            }
+                    ]
+                }
+            )
+            tuples
         """
 
         self.fetch()
 
-        repo = update_url(repo, self.settings.get('debug'))
-
-        # The 2.0 channel schema renamed the key cached package info was
-        # stored under in order to be more clear to new users.
-        packages_key = 'packages_cache' if self.schema_major_version >= 2 else 'packages'
-
-        if self.channel_info.get(packages_key, False) is False:
-            return {}
-
-        if self.channel_info[packages_key].get(repo, False) is False:
-            return {}
-
-        output = {}
-        for package in self.channel_info[packages_key][repo]:
-            copy = package.copy()
-
-            # In schema version 2.0, we store a list of dicts containing info
-            # about all available releases. These include "version" and
-            # "platforms" keys that are used to pick the download for the
-            # current machine.
-            if self.schema_major_version < 2:
-                copy['releases'] = platforms_to_releases(copy, self.settings.get('debug'))
-                del copy['platforms']
-            else:
-                last_modified = None
-                for release in copy.get('releases', []):
-                    date = release.get('date')
-                    if not last_modified or (date and date > last_modified):
-                        last_modified = date
-                copy['last_modified'] = last_modified
-
-            defaults = {
-                'buy': None,
-                'issues': None,
-                'labels': [],
-                'previous_names': [],
-                'readme': None,
-                'donate': None
-            }
-            for field in defaults:
-                if field not in copy:
-                    copy[field] = defaults[field]
-
-            copy['releases'] = version_sort(copy['releases'], 'platforms', reverse=True)
-
-            output[copy['name']] = copy
+        if repo_url not in self.packages_cache:
+            raise UncachedChannelRepositoryError(repo_url)
 
-        return output
+        for package in self.packages_cache[repo_url]:
+            if package['releases']:
+                yield (package['name'], package)
 
-    def get_dependencies(self, repo):
+    def get_libraries(self, repo_url):
         """
-        Provides access to the dependency info that is cached in a channel
+        Provides access to the library info that is cached in a channel
 
-        :param repo:
+        :param repo_url:
             The URL of the repository to get the cached info of
 
         :raises:
-            ProviderException: when an error occurs with the channel contents
             DownloaderException: when an error occurs trying to open a URL
+            UncachedChannelRepositoryError when no cache entry exists for repo_url
 
         :return:
-            A dict in the format:
-            {
-                'Dependency Name': {
+            A generator of
+            (
+                'Library Name',
+                {
                     'name': name,
-                    'load_order': two digit string,
                     'description': description,
                     'author': author,
                     'issues': URL,
                     'releases': [
                         {
-                            'sublime_text': '*',
-                            'platforms': ['*'],
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'python_versions': ['3.3', '3.8'],
                             'url': url,
-                            'date': date,
                             'version': version,
-                            'sha256': hex_hash
+                            'sha256': hex hash
                         }, ...
                     ]
-                },
-                ...
-            }
+                }
+            )
+            tuples
         """
 
         self.fetch()
 
-        repo = update_url(repo, self.settings.get('debug'))
+        if repo_url not in self.libraries_cache:
+            raise UncachedChannelRepositoryError(repo_url)
 
-        if self.channel_info.get('dependencies_cache', False) is False:
-            return {}
+        for library in self.libraries_cache[repo_url]:
+            if library['releases']:
+                yield (library['name'], library)
 
-        if self.channel_info['dependencies_cache'].get(repo, False) is False:
-            return {}
+    def get_broken_packages(self):
+        """
+        Provide package names without releases.
 
-        output = {}
-        for dependency in self.channel_info['dependencies_cache'][repo]:
-            dependency['releases'] = version_sort(dependency['releases'], 'platforms', reverse=True)
-            output[dependency['name']] = dependency
+        :raises:
+            ProviderException: when an error occurs with the channel contents
+            DownloaderException: when an error occurs trying to open a URL
 
-        return output
+        :return:
+            A generator of 'package names'
+        """
+
+        self.fetch()
+
+        for package in chain(*self.packages_cache.values()):
+            if not package['releases']:
+                yield package['name']
+
+    def get_broken_libraries(self):
+        """
+        Provide library names without releases.
+
+        :raises:
+            ProviderException: when an error occurs with the channel contents
+            DownloaderException: when an error occurs trying to open a URL
+
+        :return:
+            A generator of 'library names'
+        """
+
+        self.fetch()
+
+        for library in chain(*self.libraries_cache.values()):
+            if not library['releases']:
+                yield library['name']
+
+    def _migrate_repositories(self, channel_info, schema_version):
+
+        debug = self.settings.get('debug')
+
+        return [
+            update_url(url, debug)
+            for url in resolve_urls(self.channel_url, channel_info['repositories'])
+        ]
+
+    def _migrate_packages_cache(self, channel_info, schema_version):
+        """
+        Transform input packages cache to scheme version 4.0.0
+
+        :param channel_info:
+            The input channel information of any scheme version
+
+        :param schema_version:
+            The schema version of the input channel information
+
+        :returns:
+            packages_cache object of scheme version 4.0.0
+        """
+
+        debug = self.settings.get('debug')
+
+        package_cache = channel_info.get('packages_cache', {})
+
+        defaults = {
+            'buy': None,
+            'issues': None,
+            'labels': [],
+            'previous_names': [],
+            'readme': None,
+            'donate': None
+        }
+
+        for package in chain(*package_cache.values()):
+
+            for field in defaults:
+                if field not in package:
+                    package[field] = defaults[field]
+
+            # Workaround for packagecontrol.io, which adds `authors` instead of `author`
+            # to cached packages and libraries.
+            if 'authors' in package:
+                package['author'] = package.pop('authors')
+
+            releases = version_sort(package.get('releases', []), 'platforms', reverse=True)
+            package['releases'] = releases
+            package['last_modified'] = releases[0]['date'] if releases else None
+
+            # The 4.0.0 channel schema renamed the `dependencies` key to `libraries`.
+            if schema_version.major < 4:
+                for release in package['releases']:
+                    if 'dependencies' in release:
+                        release['libraries'] = release.pop('dependencies')
+
+        # Fix any out-dated repository URLs in packages cache
+        return {update_url(name, debug): info for name, info in package_cache.items()}
+
+    def _migrate_libraries_cache(self, channel_info, schema_version):
+        """
+        Transform input libraries cache to scheme version 4.0.0
+
+        :param channel_info:
+            The input channel information of any scheme version
+
+        :param schema_version:
+            The schema version of the input channel information
+
+        :returns:
+            libraries_cache object of scheme version 4.0.0
+        """
+
+        debug = self.settings.get('debug')
+
+        if schema_version.major < 4:
+            # The 4.0.0 channel schema renamed the key cached package info was
+            # stored under in order to be more clear to new users.
+            libraries_cache = channel_info.pop('dependencies_cache', {})
+
+            # The 4.0.0 channel scheme drops 'load_order' from each library
+            # and adds a required 'python_versions' list to each release.
+            for library in chain(*libraries_cache.values()):
+                del library['load_order']
+                for release in library['releases']:
+                    release['python_versions'] = ['3.3']
+                library['releases'] = version_sort(library['releases'], 'platforms', reverse=True)
+
+        else:
+            libraries_cache = channel_info.get('libraries_cache', {})
+
+            for library in chain(*libraries_cache.values()):
+                library['releases'] = version_sort(library['releases'], 'platforms', reverse=True)
+
+        # Fix any out-dated repository URLs in libraries cache
+        return {update_url(name, debug): info for name, info in libraries_cache.items()}
diff --git a/app/lib/package_control/providers/github_repository_provider.py b/app/lib/package_control/providers/github_repository_provider.py
index 59a0eaf..c69b6f6 100644
--- a/app/lib/package_control/providers/github_repository_provider.py
+++ b/app/lib/package_control/providers/github_repository_provider.py
@@ -1,19 +1,23 @@
 import re
 
+from ..clients.client_exception import ClientException
 from ..clients.github_client import GitHubClient
 from ..downloaders.downloader_exception import DownloaderException
-from ..clients.client_exception import ClientException
-from .provider_exception import ProviderException
-
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import (
+    GitProviderDownloadInfoException,
+    GitProviderRepoInfoException,
+    ProviderException,
+)
 
-class GitHubRepositoryProvider():
 
+class GitHubRepositoryProvider(BaseRepositoryProvider):
     """
     Allows using a public GitHub repository as the source for a single package.
     For legacy purposes, this can also be treated as the source for a Package
     Control "repository".
 
-    :param repo:
+    :param repo_url:
         The public web URL to the GitHub repository. Should be in the format
         `https://github.com/user/package` for the master branch, or
         `https://github.com/user/package/tree/{branch_name}` for any other
@@ -34,60 +38,28 @@ class GitHubRepositoryProvider():
           `http_basic_auth`
     """
 
-    def __init__(self, repo, settings):
-        self.cache = {}
+    def __init__(self, repo_url, settings):
         # Clean off the trailing .git to be more forgiving
-        self.repo = re.sub(r'\.git$', '', repo)
-        self.settings = settings
-        self.failed_sources = {}
+        super().__init__(re.sub(r'\.git$', '', repo_url), settings)
 
     @classmethod
-    def match_url(cls, repo):
-        """Indicates if this provider can handle the provided repo"""
-
-        master = re.search('^https?://github.com/[^/]+/[^/]+/?$', repo)
-        branch = re.search('^https?://github.com/[^/]+/[^/]+/tree/[^/]+/?$', repo)
-        return master is not None or branch is not None
-
-    def prefetch(self):
+    def match_url(cls, repo_url):
         """
-        Go out and perform HTTP operations, caching the result
+        Indicates if this provider can handle the provided repo_url
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-        """
-
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
-        """
-        List of any URLs that could not be accessed while accessing this repository
+        :param repo_url:
+            The URL to the repository, in one of the forms:
+                https://github.com/{user}/{repo}.git
+                https://github.com/{user}/{repo}
+                https://github.com/{user}/{repo}/
+                https://github.com/{user}/{repo}/tree/{branch}
+                https://github.com/{user}/{repo}/tree/{branch}/
 
         :return:
-            A generator of ("https://github.com/user/repo", Exception()) tuples
-        """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_broken_dependencies(self):
-        """
-        For API-compatibility with RepositoryProvider
+            True if repo_url matches an supported scheme.
         """
-
-        return {}.items()
-
-    def get_dependencies(self, ):
-        "For API-compatibility with RepositoryProvider"
-
-        return {}.items()
+        user, repo, _ = GitHubClient.user_repo_branch(repo_url)
+        return bool(user and repo)
 
     def get_packages(self, invalid_sources=None):
         """
@@ -96,10 +68,6 @@ def get_packages(self, invalid_sources=None):
         :param invalid_sources:
             A list of URLs that should be ignored
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
         :return:
             A generator of
             (
@@ -131,24 +99,28 @@ def get_packages(self, invalid_sources=None):
             tuples
         """
 
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
+        if self.packages is not None:
+            for key, value in self.packages.items():
                 yield (key, value)
             return
 
-        client = GitHubClient(self.settings)
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
 
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
+        client = GitHubClient(self.settings)
 
         try:
-            repo_info = client.repo_info(self.repo)
+            repo_info = client.repo_info(self.repo_url)
+            if not repo_info:
+                raise GitProviderRepoInfoException(self)
+
+            downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch'])
+            if not downloads:
+                raise GitProviderDownloadInfoException(self)
 
-            releases = []
-            for download in client.download_info(self.repo):
+            for download in downloads:
                 download['sublime_text'] = '*'
                 download['platforms'] = ['*']
-                releases.append(download)
 
             name = repo_info['name']
             details = {
@@ -156,35 +128,19 @@ def get_packages(self, invalid_sources=None):
                 'description': repo_info['description'],
                 'homepage': repo_info['homepage'],
                 'author': repo_info['author'],
-                'last_modified': releases[0].get('date'),
-                'releases': releases,
+                'last_modified': downloads[0].get('date'),
+                'releases': downloads,
                 'previous_names': [],
                 'labels': [],
-                'sources': [self.repo],
+                'sources': [self.repo_url],
                 'readme': repo_info['readme'],
                 'issues': repo_info['issues'],
                 'donate': repo_info['donate'],
                 'buy': None
             }
-            self.cache['get_packages'] = {name: details}
+            self.packages = {name: details}
             yield (name, details)
 
         except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources[self.repo] = e
-            self.cache['get_packages'] = {}
-            raise StopIteration()
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs
-        """
-
-        return [self.repo]
-
-    def get_renamed_packages(self):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}
+            self.failed_sources[self.repo_url] = e
+            self.packages = {}
diff --git a/app/lib/package_control/providers/github_user_provider.py b/app/lib/package_control/providers/github_user_provider.py
index 81723c4..f35e976 100644
--- a/app/lib/package_control/providers/github_user_provider.py
+++ b/app/lib/package_control/providers/github_user_provider.py
@@ -1,18 +1,20 @@
-import re
-
+from ..clients.client_exception import ClientException
 from ..clients.github_client import GitHubClient
 from ..downloaders.downloader_exception import DownloaderException
-from ..clients.client_exception import ClientException
-from .provider_exception import ProviderException
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import (
+    GitProviderDownloadInfoException,
+    GitProviderUserInfoException,
+    ProviderException,
+)
 
 
-class GitHubUserProvider():
-
+class GitHubUserProvider(BaseRepositoryProvider):
     """
     Allows using a GitHub user/organization as the source for multiple packages,
     or in Package Control terminology, a "repository".
 
-    :param repo:
+    :param repo_url:
         The public web URL to the GitHub user/org. Should be in the format
         `https://github.com/user`.
 
@@ -31,57 +33,21 @@ class GitHubUserProvider():
           `http_basic_auth`
     """
 
-    def __init__(self, repo, settings):
-        self.cache = {}
-        self.repo = repo
-        self.settings = settings
-        self.failed_sources = {}
-
     @classmethod
-    def match_url(cls, repo):
-        """Indicates if this provider can handle the provided repo"""
-
-        return re.search('^https?://github.com/[^/]+/?$', repo) is not None
-
-    def prefetch(self):
-        """
-        Go out and perform HTTP operations, caching the result
+    def match_url(cls, repo_url):
         """
+        Indicates if this provider can handle the provided repo_url
 
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
-        """
-        List of any URLs that could not be accessed while accessing this repository
-
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
+        :param repo_url:
+            The URL to the repository, in one of the forms:
+                https://github.com/{user}
+                https://github.com/{user}/
 
         :return:
-            A generator of ("https://github.com/user/repo", Exception()) tuples
-        """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_broken_dependencies(self):
-        """
-        For API-compatibility with RepositoryProvider
+            True if repo_url matches an supported scheme.
         """
-
-        return {}.items()
-
-    def get_dependencies(self, ):
-        "For API-compatibility with RepositoryProvider"
-
-        return {}.items()
+        user, repo, _ = GitHubClient.user_repo_branch(repo_url)
+        return bool(user and not repo)
 
     def get_packages(self, invalid_sources=None):
         """
@@ -90,10 +56,6 @@ def get_packages(self, invalid_sources=None):
         :param invalid_sources:
             A list of URLs that should be ignored
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
         :return:
             A generator of
             (
@@ -125,45 +87,53 @@ def get_packages(self, invalid_sources=None):
             tuples
         """
 
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
+        if self.packages is not None:
+            for key, value in self.packages.items():
                 yield (key, value)
             return
 
-        client = GitHubClient(self.settings)
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
 
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
+        client = GitHubClient(self.settings)
 
         try:
-            user_repos = client.user_info(self.repo)
+            user_repos = client.user_info(self.repo_url)
+            if not user_repos:
+                raise GitProviderUserInfoException(self)
         except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources = [self.repo]
-            self.cache['get_packages'] = e
-            raise e
+            self.failed_sources[self.repo_url] = e
+            self.packages = {}
+            return
 
         output = {}
         for repo_info in user_repos:
+            author = repo_info['author']
+            name = repo_info['name']
+            repo_url = client.repo_url(author, name)
+
+            if invalid_sources is not None and repo_url in invalid_sources:
+                continue
+
             try:
-                name = repo_info['name']
-                repo_url = 'https://github.com/%s/%s' % (repo_info['author'], name)
+                downloads = client.download_info_from_branch(repo_url, repo_info['default_branch'])
+                if not downloads:
+                    raise GitProviderDownloadInfoException(self)
 
-                releases = []
-                for download in client.download_info(repo_url):
+                for download in downloads:
                     download['sublime_text'] = '*'
                     download['platforms'] = ['*']
-                    releases.append(download)
 
                 details = {
                     'name': name,
                     'description': repo_info['description'],
                     'homepage': repo_info['homepage'],
-                    'author': repo_info['author'],
-                    'last_modified': releases[0].get('date'),
-                    'releases': releases,
+                    'author': author,
+                    'last_modified': downloads[0].get('date'),
+                    'releases': downloads,
                     'previous_names': [],
                     'labels': [],
-                    'sources': [self.repo],
+                    'sources': [self.repo_url],
                     'readme': repo_info['readme'],
                     'issues': repo_info['issues'],
                     'donate': repo_info['donate'],
@@ -175,19 +145,4 @@ def get_packages(self, invalid_sources=None):
             except (DownloaderException, ClientException, ProviderException) as e:
                 self.failed_sources[repo_url] = e
 
-        self.cache['get_packages'] = output
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs
-        """
-
-        return [self.repo]
-
-    def get_renamed_packages(self):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}
+        self.packages = output
diff --git a/app/lib/package_control/providers/gitlab_repository_provider.py b/app/lib/package_control/providers/gitlab_repository_provider.py
index 59c340d..5872795 100644
--- a/app/lib/package_control/providers/gitlab_repository_provider.py
+++ b/app/lib/package_control/providers/gitlab_repository_provider.py
@@ -1,18 +1,23 @@
 import re
 
+from ..clients.client_exception import ClientException
 from ..clients.gitlab_client import GitLabClient
 from ..downloaders.downloader_exception import DownloaderException
-from ..clients.client_exception import ClientException
-from .provider_exception import ProviderException
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import (
+    GitProviderDownloadInfoException,
+    GitProviderRepoInfoException,
+    ProviderException,
+)
 
 
-class GitLabRepositoryProvider():
+class GitLabRepositoryProvider(BaseRepositoryProvider):
     """
     Allows using a public GitLab repository as the source for a single package.
     For legacy purposes, this can also be treated as the source for a Package
     Control "repository".
 
-    :param repo:
+    :param repo_url:
         The public web URL to the GitLab repository. Should be in the format
         `https://gitlab.com/user/package` for the master branch, or
         `https://gitlab.com/user/package/-/tree/{branch_name}` for any other
@@ -33,60 +38,28 @@ class GitLabRepositoryProvider():
           `http_basic_auth`
     """
 
-    def __init__(self, repo, settings):
-        self.cache = {}
+    def __init__(self, repo_url, settings):
         # Clean off the trailing .git to be more forgiving
-        self.repo = re.sub(r'\.git$', '', repo)
-        self.settings = settings
-        self.failed_sources = {}
+        super().__init__(re.sub(r'\.git$', '', repo_url), settings)
 
     @classmethod
-    def match_url(cls, repo):
-        """Indicates if this provider can handle the provided repo"""
-
-        master = re.search('^https?://gitlab.com/[^/]+/[^/]+/?$', repo)
-        branch = re.search('^https?://gitlab.com/[^/]+/[^/]+/-/tree/[^/]+/?$', repo)
-        return master is not None or branch is not None
-
-    def prefetch(self):
+    def match_url(cls, repo_url):
         """
-        Go out and perform HTTP operations, caching the result
+        Indicates if this provider can handle the provided repo_url
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-        """
-
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
-        """
-        List of any URLs that could not be accessed while accessing this repository
+        :param repo_url:
+            The URL to the repository, in one of the forms:
+                https://gitlab.com/{user}/{repo}.git
+                https://gitlab.com/{user}/{repo}
+                https://gitlab.com/{user}/{repo}/
+                https://gitlab.com/{user}/{repo}/-/tree/{branch}
+                https://gitlab.com/{user}/{repo}/-/tree/{branch}/
 
         :return:
-            A generator of ("https://gitlab.com/user/repo", Exception()) tuples
-        """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_broken_dependencies(self):
-        """
-        For API-compatibility with RepositoryProvider
+            True if repo_url matches an supported scheme.
         """
-
-        return {}.items()
-
-    def get_dependencies(self, ):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}.items()
+        user, repo, _ = GitLabClient.user_repo_branch(repo_url)
+        return bool(user and repo)
 
     def get_packages(self, invalid_sources=None):
         """
@@ -95,10 +68,6 @@ def get_packages(self, invalid_sources=None):
         :param invalid_sources:
             A list of URLs that should be ignored
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
         :return:
             A generator of
             (
@@ -130,24 +99,28 @@ def get_packages(self, invalid_sources=None):
             tuples
         """
 
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
+        if self.packages is not None:
+            for key, value in self.packages.items():
                 yield (key, value)
             return
 
-        client = GitLabClient(self.settings)
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
 
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
+        client = GitLabClient(self.settings)
 
         try:
-            repo_info = client.repo_info(self.repo)
+            repo_info = client.repo_info(self.repo_url)
+            if not repo_info:
+                raise GitProviderRepoInfoException(self)
+
+            downloads = client.download_info_from_branch(self.repo_url, repo_info['default_branch'])
+            if not downloads:
+                raise GitProviderDownloadInfoException(self)
 
-            releases = []
-            for download in client.download_info(self.repo):
+            for download in downloads:
                 download['sublime_text'] = '*'
                 download['platforms'] = ['*']
-                releases.append(download)
 
             name = repo_info['name']
             details = {
@@ -155,35 +128,19 @@ def get_packages(self, invalid_sources=None):
                 'description': repo_info['description'],
                 'homepage': repo_info['homepage'],
                 'author': repo_info['author'],
-                'last_modified': releases[0].get('date'),
-                'releases': releases,
+                'last_modified': downloads[0].get('date'),
+                'releases': downloads,
                 'previous_names': [],
                 'labels': [],
-                'sources': [self.repo],
+                'sources': [self.repo_url],
                 'readme': repo_info['readme'],
                 'issues': repo_info['issues'],
                 'donate': repo_info['donate'],
                 'buy': None
             }
-            self.cache['get_packages'] = {name: details}
+            self.packages = {name: details}
             yield (name, details)
 
         except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources[self.repo] = e
-            self.cache['get_packages'] = {}
-            raise StopIteration()
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs
-        """
-
-        return [self.repo]
-
-    def get_renamed_packages(self):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}
+            self.failed_sources[self.repo_url] = e
+            self.packages = {}
diff --git a/app/lib/package_control/providers/gitlab_user_provider.py b/app/lib/package_control/providers/gitlab_user_provider.py
index acc1cb9..61f63f5 100644
--- a/app/lib/package_control/providers/gitlab_user_provider.py
+++ b/app/lib/package_control/providers/gitlab_user_provider.py
@@ -1,17 +1,20 @@
-import re
-
 from ..clients.client_exception import ClientException
 from ..clients.gitlab_client import GitLabClient
 from ..downloaders.downloader_exception import DownloaderException
-from .provider_exception import ProviderException
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import (
+    GitProviderDownloadInfoException,
+    GitProviderUserInfoException,
+    ProviderException,
+)
 
 
-class GitLabUserProvider:
+class GitLabUserProvider(BaseRepositoryProvider):
     """
     Allows using a GitLab user/organization as the source for multiple packages,
     or in Package Control terminology, a 'repository'.
 
-    :param repo:
+    :param repo_url:
         The public web URL to the GitHub user/org. Should be in the format
         `https://gitlab.com/user`.
 
@@ -30,59 +33,21 @@ class GitLabUserProvider:
           `http_basic_auth`
     """
 
-    def __init__(self, repo, settings):
-        self.cache = {}
-        self.repo = repo
-        self.settings = settings
-        self.failed_sources = {}
-
     @classmethod
-    def match_url(cls, repo):
-        """
-        Indicates if this provider can handle the provided repo
-        """
-
-        return re.search('^https?://gitlab.com/[^/]+/?$', repo) is not None
-
-    def prefetch(self):
-        """
-        Go out and perform HTTP operations, caching the result
-        """
-
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
+    def match_url(cls, repo_url):
         """
-        List of any URLs that could not be accessed while accessing this repository
+        Indicates if this provider can handle the provided repo_url
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
+        :param repo_url:
+            The URL to the repository, in one of the forms:
+                https://gitlab.com/{user}
+                https://gitlab.com/{user}/
 
         :return:
-            A generator of ('https://gitlab.com/user/repo', Exception()) tuples
+            True if repo_url matches an supported scheme.
         """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_broken_dependencies(self):
-        """
-        For API-compatibility with RepositoryProvider
-        """
-
-        return {}.items()
-
-    def get_dependencies(self, ):
-        '''For API-compatibility with RepositoryProvider'''
-
-        return {}.items()
+        user, repo, _ = GitLabClient.user_repo_branch(repo_url)
+        return bool(user and not repo)
 
     def get_packages(self, invalid_sources=None):
         """
@@ -91,10 +56,6 @@ def get_packages(self, invalid_sources=None):
         :param invalid_sources:
             A list of URLs that should be ignored
 
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
         :return:
             A generator of
             (
@@ -126,71 +87,62 @@ def get_packages(self, invalid_sources=None):
             tuples
         """
 
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
+        if self.packages is not None:
+            for key, value in self.packages.items():
                 yield (key, value)
             return
 
-        client = GitLabClient(self.settings)
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
 
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
+        client = GitLabClient(self.settings)
 
         try:
-            user_repos = client.user_info(self.repo)
+            user_repos = client.user_info(self.repo_url)
+            if not user_repos:
+                raise GitProviderUserInfoException(self)
         except (DownloaderException, ClientException, ProviderException) as e:
-            self.failed_sources = [self.repo]
-            self.cache['get_packages'] = e
-            raise e
+            self.failed_sources[self.repo_url] = e
+            self.packages = {}
+            return
 
         output = {}
         for repo_info in user_repos:
+            author = repo_info['author']
+            name = repo_info['name']
+            repo_url = client.repo_url(author, name)
+
+            if invalid_sources is not None and repo_url in invalid_sources:
+                continue
+
             try:
-                name = repo_info['name']
-                repo_url = 'https://gitlab.com/%s/%s' % (repo_info['author'],
-                                                         name)
+                downloads = client.download_info_from_branch(repo_url, repo_info['default_branch'])
+                if not downloads:
+                    raise GitProviderDownloadInfoException(self)
 
-                releases = []
-                for download in client.download_info(repo_url):
+                for download in downloads:
                     download['sublime_text'] = '*'
                     download['platforms'] = ['*']
-                    releases.append(download)
 
                 details = {
                     'name': name,
                     'description': repo_info['description'],
                     'homepage': repo_info['homepage'],
-                    'author': repo_info['author'],
-                    'last_modified': releases[0].get('date'),
-                    'releases': releases,
+                    'author': author,
+                    'last_modified': downloads[0].get('date'),
+                    'releases': downloads,
                     'previous_names': [],
                     'labels': [],
-                    'sources': [self.repo],
+                    'sources': [self.repo_url],
                     'readme': repo_info['readme'],
                     'issues': repo_info['issues'],
                     'donate': repo_info['donate'],
-                    'buy': None,
+                    'buy': None
                 }
                 output[name] = details
                 yield (name, details)
 
-            except (DownloaderException, ClientException,
-                    ProviderException) as e:
+            except (DownloaderException, ClientException, ProviderException) as e:
                 self.failed_sources[repo_url] = e
 
-        self.cache['get_packages'] = output
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs
-        """
-
-        return [self.repo]
-
-    def get_renamed_packages(self):
-        """For API-compatibility with RepositoryProvider"""
-
-        return {}
+        self.packages = output
diff --git a/app/lib/package_control/providers/json_repository_provider.py b/app/lib/package_control/providers/json_repository_provider.py
new file mode 100644
index 0000000..30c517f
--- /dev/null
+++ b/app/lib/package_control/providers/json_repository_provider.py
@@ -0,0 +1,931 @@
+import json
+import re
+import os
+from itertools import chain
+from urllib.parse import urlparse
+
+from ..clients.bitbucket_client import BitBucketClient
+from ..clients.client_exception import ClientException
+from ..clients.github_client import GitHubClient
+from ..clients.gitlab_client import GitLabClient
+from ..clients.pypi_client import PyPiClient
+from ..console_write import console_write
+from ..download_manager import http_get, resolve_url, resolve_urls, update_url
+from ..downloaders.downloader_exception import DownloaderException
+from ..package_version import version_sort
+from .base_repository_provider import BaseRepositoryProvider
+from .provider_exception import ProviderException
+from .schema_version import SchemaVersion
+
+try:
+    # running within ST
+    from ..selectors import is_compatible_platform, is_compatible_version
+    IS_ST = True
+except ImportError:
+    # running on CLI or server
+    IS_ST = False
+
+
+class InvalidRepoFileException(ProviderException):
+    def __init__(self, repo, reason_message):
+        super().__init__(
+            'Repository {} does not appear to be a valid repository file because'
+            ' {}'.format(repo.repo_url, reason_message))
+
+
+class InvalidLibraryReleaseKeyError(ProviderException):
+    def __init__(self, repo, name, key):
+        super().__init__(
+            'Invalid or missing release-level key "{}" in library "{}"'
+            ' in repository "{}".'.format(key, name, repo))
+
+
+class InvalidPackageReleaseKeyError(ProviderException):
+    def __init__(self, repo, name, key):
+        super().__init__(
+            'Invalid or missing release-level key "{}" in package "{}"'
+            ' in repository "{}".'.format(key, name, repo))
+
+
+class JsonRepositoryProvider(BaseRepositoryProvider):
+    """
+    Generic repository downloader that fetches package info
+
+    With the current channel/repository architecture where the channel file
+    caches info from all includes repositories, these package providers just
+    serve the purpose of downloading packages not in the default channel.
+
+    The structure of the JSON a repository should contain is located in
+    example-packages.json.
+
+    :param repo_url:
+        The URL of the package repository
+
+    :param settings:
+        A dict containing at least the following fields:
+          `cache_length`,
+          `debug`,
+          `timeout`,
+          `user_agent`
+        Optional fields:
+          `http_proxy`,
+          `https_proxy`,
+          `proxy_username`,
+          `proxy_password`,
+          `query_string_params`,
+          `http_basic_auth`
+    """
+
+    def __init__(self, repo_url, settings):
+        super().__init__(repo_url, settings)
+        self.included_urls = set()
+        self.repo_info = None
+        self.schema_version = None
+
+    def fetch(self):
+        """
+        Retrieves and loads the JSON for other methods to use
+
+        :raises:
+            InvalidChannelFileException: when parsing or validation file content fails
+            ProviderException: when an error occurs trying to open a file
+            DownloaderException: when an error occurs trying to open a URL
+        """
+
+        if self.repo_info is not None:
+            return True
+
+        if self.repo_url in self.failed_sources:
+            return False
+
+        try:
+            self.repo_info = self.fetch_repo(self.repo_url)
+            self.schema_version = self.repo_info['schema_version']
+        except (DownloaderException, ClientException, ProviderException) as e:
+            self.failed_sources[self.repo_url] = e
+            self.libraries = {}
+            self.packages = {}
+            return False
+
+        return True
+
+    def fetch_repo(self, location):
+        """
+        Fetches the contents of a URL of file path
+
+        :param location:
+            The URL or file path
+
+        :raises:
+            ProviderException: when an error occurs trying to open a file
+            DownloaderException: when an error occurs trying to open a URL
+
+        :return:
+            A dict of the parsed JSON
+        """
+
+        # Prevent circular includes
+        if location in self.included_urls:
+            raise ProviderException('Error, repository "%s" already included.' % location)
+
+        self.included_urls.add(location)
+
+        if re.match(r'https?://', location, re.I):
+            json_string = http_get(location, self.settings, 'Error downloading repository.')
+
+        # Anything that is not a URL is expected to be a filesystem path
+        else:
+            if not os.path.exists(location):
+                raise ProviderException('Error, file %s does not exist' % location)
+
+            if self.settings.get('debug'):
+                console_write(
+                    '''
+                    Loading %s as a repository
+                    ''',
+                    location
+                )
+
+            # We open as binary so we get bytes like the DownloadManager
+            with open(location, 'rb') as f:
+                json_string = f.read()
+
+        try:
+            repo_info = json.loads(json_string.decode('utf-8'))
+        except (ValueError):
+            raise InvalidRepoFileException(self, 'parsing JSON failed.')
+
+        try:
+            schema_version = repo_info['schema_version'] = SchemaVersion(repo_info['schema_version'])
+        except KeyError:
+            raise InvalidRepoFileException(
+                self, 'the "schema_version" JSON key is missing.')
+        except ValueError as e:
+            raise InvalidRepoFileException(self, e)
+
+        # Main keys depending on scheme version
+        if schema_version.major < 4:
+            repo_keys = {'packages', 'dependencies', 'includes'}
+        else:
+            repo_keys = {'packages', 'libraries', 'includes'}
+
+        # Check existence of at least one required main key
+        if not set(repo_info.keys()) & repo_keys:
+            raise InvalidRepoFileException(self, 'it doesn\'t look like a repository.')
+
+        # Check type of existing main keys
+        for key in repo_keys:
+            if key in repo_info and not isinstance(repo_info[key], list):
+                raise InvalidRepoFileException(self, 'the "%s" key is not an array.' % key)
+
+        # Migrate dependencies to libraries
+        # The 4.0.0 repository schema renamed dependencies key to libraries.
+        if schema_version.major < 4:
+            repo_info['libraries'] = repo_info.pop('dependencies', [])
+
+        # Allow repositories to include other repositories, recursively
+        includes = repo_info.pop('includes', None)
+        if includes:
+            for include in resolve_urls(self.repo_url, includes):
+                try:
+                    include_info = self.fetch_repo(include)
+                except (DownloaderException, ClientException, ProviderException) as e:
+                    self.failed_sources[include] = e
+                else:
+                    include_version = include_info['schema_version']
+                    if include_version != schema_version:
+                        raise ProviderException(
+                            'Scheme version of included repository %s doesn\'t match its parent.' % include)
+
+                    repo_info['packages'].extend(include_info.get('packages', []))
+                    repo_info['libraries'].extend(include_info.get('libraries', []))
+
+        return repo_info
+
+    def get_libraries(self, invalid_sources=None):
+        """
+        Provides access to the libraries in this repository
+
+        :param invalid_sources:
+            A list of URLs that are permissible to fetch data from
+
+        :return:
+            A generator of
+            (
+                'Library Name',
+                {
+                    'name': name,
+                    'description': description,
+                    'author': author,
+                    'issues': URL,
+                    'releases': [
+                        {
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'python_versions': ['3.3', '3.8'],
+                            'url': url,
+                            'version': version,
+                            'sha256': hex hash
+                        }, ...
+                    ],
+                    'sources': [url, ...]
+                }
+            )
+            tuples
+        """
+
+        if self.libraries is not None:
+            for key, value in self.libraries.items():
+                yield (key, value)
+            return
+
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
+
+        if not self.fetch():
+            return
+
+        if not self.repo_info:
+            return
+
+        if self.schema_version.major >= 4:
+            allowed_library_keys = {
+                'name', 'description', 'author', 'homepage', 'issues', 'releases'
+            }
+            allowed_release_keys = {  # todo: remove 'branch'
+                'base', 'version', 'sublime_text', 'platforms', 'python_versions',
+                'branch', 'tags', 'asset', 'url', 'date', 'sha256'
+            }
+        else:
+            allowed_library_keys = {
+                'name', 'description', 'author', 'issues', 'load_order', 'releases'
+            }
+            allowed_release_keys = {
+                'base', 'version', 'sublime_text', 'platforms',
+                'branch', 'tags', 'url', 'date', 'sha256'
+            }
+
+        copied_library_keys = ('name', 'description', 'author', 'homepage', 'issues')
+        copied_release_keys = ('date', 'version', 'sha256')
+        default_platforms = ['*']
+        default_python_versions = ['3.3']
+        default_sublime_text = '*'
+
+        debug = self.settings.get('debug')
+
+        clients = [
+            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient, PyPiClient)
+        ]
+
+        output = {}
+        for library in self.repo_info.get('libraries', []):
+            info = {
+                'releases': [],
+                'sources': [self.repo_url]
+            }
+
+            for field in copied_library_keys:
+                field_value = library.get(field)
+                if field_value:
+                    info[field] = field_value
+
+            if 'name' not in info:
+                self.failed_sources[self.repo_url] = ProviderException(
+                    'No "name" value for one of libraries'
+                    ' in repository "{}".'.format(self.repo_url)
+                )
+                continue
+
+            try:
+                unknown_keys = set(library) - allowed_library_keys
+                if unknown_keys:
+                    raise ProviderException(
+                        'The "{}" key(s) in library "{}" in repository {} are not supported.'.format(
+                            '", "'.join(sorted(unknown_keys)), info['name'],
+                            self.repo_url
+                        )
+                    )
+
+                releases = library.get('releases', [])
+                if releases and not isinstance(releases, list):
+                    raise ProviderException(
+                        'The "releases" value is not an array for library "{}"'
+                        ' in repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                staged_releases = {}
+
+                for release in releases:
+                    download_info = {}
+
+                    unknown_keys = set(release) - allowed_release_keys
+                    if unknown_keys:
+                        raise ProviderException(
+                            'The "{}" key(s) in one of the releases of library "{}"'
+                            ' in repository {} are not supported.'.format(
+                                '", "'.join(sorted(unknown_keys)), info['name'], self.repo_url
+                            )
+                        )
+
+                    # Validate libraries
+                    # the key can be used to specify dependencies, upstream via repositories
+                    key = 'libraries' if self.schema_version.major >= 4 else 'dependencies'
+                    value = release.get(key, [])
+                    if value:
+                        if not isinstance(value, list):
+                            raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info['libraries'] = value
+
+                    # Validate supported platforms
+                    key = 'platforms'
+                    value = release.get(key, default_platforms)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_platform(value):
+                        continue
+                    download_info[key] = value
+
+                    # Validate supported python_versions
+                    key = 'python_versions'
+                    value = release.get(key, default_python_versions)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    download_info[key] = value
+
+                    # Validate supported ST version
+                    key = 'sublime_text'
+                    value = release.get(key, default_sublime_text)
+                    if not isinstance(value, str):
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_version(value):
+                        continue
+                    download_info[key] = value
+
+                    # Validate url
+                    # if present, it is an explicit or resolved release
+                    url = release.get('url')
+                    if url:
+                        for key in copied_release_keys:
+                            if key in release:
+                                value = release[key]
+                                if not value or not isinstance(value, str):
+                                    raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], key)
+                                download_info[key] = value
+
+                        if 'version' not in download_info:
+                            raise ProviderException(
+                                'Missing "version" key in release with explicit "url" of library "{}"'
+                                ' in repository "{}".'.format(info['name'], self.repo_url)
+                            )
+
+                        download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                        is_http = urlparse(download_info['url']).scheme == 'http'
+                        if is_http and 'sha256' not in download_info:
+                            raise ProviderException(
+                                'No "sha256" key for the non-secure "url" value in one of the releases'
+                                ' of the library "{}" in repository {}.'.format(info['name'], self.repo_url)
+                            )
+
+                        info['releases'].append(download_info)
+                        continue
+
+                    # Resolve release template using `base` and `branch` or `tags` keys
+
+                    base = release.get('base')
+                    if not base:
+                        raise InvalidLibraryReleaseKeyError(self.repo_url, info['name'], 'base')
+
+                    base_url = resolve_url(self.repo_url, base)
+                    downloads = None
+
+                    # Evaluate and resolve "tags" and "branch" release templates
+                    asset = release.get('asset')
+                    branch = release.get('branch')
+                    tags = release.get('tags')
+                    extra = None if tags is True else tags
+
+                    if asset:
+                        if branch:
+                            raise ProviderException(
+                                'Illegal "asset" key "{}" for branch based release of library "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+                        # group releases with assets by base_url and tag-prefix
+                        # to prepare gathering download_info with a single API call
+                        staged_releases.setdefault((base_url, extra), []).append((asset, download_info))
+                        continue
+
+                    elif tags:
+                        for client in clients:
+                            downloads = client.download_info_from_tags(base_url, extra)
+                            if downloads is not None:
+                                break
+
+                    elif branch:
+                        for client in clients:
+                            downloads = client.download_info_from_branch(base_url, branch)
+                            if downloads is not None:
+                                break
+                    else:
+                        raise ProviderException(
+                            'Missing "branch", "tags" or "url" key in release of library "{}"'
+                            ' in repository "{}".'.format(info['name'], self.repo_url)
+                        )
+
+                    if downloads is None:
+                        raise ProviderException(
+                            'Invalid "base" value "{}" for one of the releases of library "{}"'
+                            ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                        )
+
+                    if downloads is False:
+                        raise ProviderException(
+                            'No valid semver tags found at "{}" for library "{}"'
+                            ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                        )
+
+                    for download in downloads:
+                        download.update(download_info)
+                        info['releases'].append(download)
+
+                # gather download_info from releases
+                for (base_url, extra), asset_templates in staged_releases.items():
+                    for client in clients:
+                        downloads = client.download_info_from_releases(base_url, asset_templates, extra)
+                        if downloads is not None:
+                            info['releases'].extend(downloads)
+                            break
+
+                # check required library keys
+                for key in ('description', 'author', 'issues'):
+                    if not info.get(key):
+                        raise ProviderException(
+                            'Missing or invalid "{}" key for library "{}"'
+                            ' in repository "{}".'.format(key, info['name'], self.repo_url)
+                        )
+
+                # Empty releases means package is unavailable on current platform or for version of ST
+                if not info['releases']:
+                    continue
+
+                info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
+
+                output[info['name']] = info
+                yield (info['name'], info)
+
+            except (DownloaderException, ClientException, ProviderException) as e:
+                self.broken_libriaries[info['name']] = e
+
+        self.libraries = output
+
+    def get_packages(self, invalid_sources=None):
+        """
+        Provides access to the packages in this repository
+
+        :param invalid_sources:
+            A list of URLs that are permissible to fetch data from
+
+        :return:
+            A generator of
+            (
+                'Package Name',
+                {
+                    'name': name,
+                    'description': description,
+                    'author': author,
+                    'homepage': homepage,
+                    'previous_names': [old_name, ...],
+                    'labels': [label, ...],
+                    'sources': [url, ...],
+                    'readme': url,
+                    'issues': url,
+                    'donate': url,
+                    'buy': url,
+                    'last_modified': last modified date,
+                    'releases': [
+                        {
+                            'sublime_text': compatible version,
+                            'platforms': [platform name, ...],
+                            'url': url,
+                            'date': date,
+                            'version': version,
+                            'libraries': [library name, ...]
+                        }, ...
+                    ]
+                }
+            )
+            tuples
+        """
+
+        if self.packages is not None:
+            for key, value in self.packages.items():
+                yield (key, value)
+            return
+
+        if invalid_sources is not None and self.repo_url in invalid_sources:
+            return
+
+        if not self.fetch():
+            return
+
+        if not self.repo_info:
+            return
+
+        copied_package_keys = (
+            'name',
+            'description',
+            'author',
+            'last_modified',
+            'previous_names',
+            'labels',
+            'homepage',
+            'readme',
+            'issues',
+            'donate',
+            'buy'
+        )
+        copied_release_keys = ('date', 'version')
+        default_platforms = ['*']
+        default_sublime_text = '*'
+
+        debug = self.settings.get('debug')
+
+        clients = [
+            Client(self.settings) for Client in (GitHubClient, GitLabClient, BitBucketClient)
+        ]
+
+        output = {}
+        for package in self.repo_info.get('packages', []):
+            info = {
+                'releases': [],
+                'sources': [self.repo_url]
+            }
+
+            for field in copied_package_keys:
+                if package.get(field):
+                    info[field] = package.get(field)
+
+            # Try to grab package-level details from GitHub or BitBucket
+            details = package.get('details')
+            if details:
+                details = resolve_url(self.repo_url, details)
+
+                if invalid_sources is not None and details in invalid_sources:
+                    continue
+
+                if details not in info['sources']:
+                    info['sources'].append(details)
+
+                try:
+                    repo_info = None
+
+                    for client in clients:
+                        repo_info = client.repo_info(details)
+                        if repo_info:
+                            break
+                    else:
+                        raise ProviderException(
+                            'Invalid "details" value "{}" for one of the packages'
+                            ' in the repository {}.'.format(details, self.repo_url)
+                        )
+
+                    del repo_info['default_branch']
+
+                    # When grabbing details, prefer explicit field values over the values
+                    # from the GitHub or BitBucket API
+                    info = dict(chain(repo_info.items(), info.items()))
+
+                except (DownloaderException, ClientException, ProviderException) as e:
+                    if 'name' in info:
+                        self.broken_packages[info['name']] = e
+                    self.failed_sources[details] = e
+                    continue
+
+            if 'name' not in info:
+                self.failed_sources[self.repo_url] = ProviderException(
+                    'No "name" value for one of the packages'
+                    ' in the repository {}.'.format(self.repo_url)
+                )
+                continue
+
+            try:
+                if not info.get('author'):
+                    raise ProviderException(
+                        'Missing or invalid "author" key for package "{}"'
+                        ' in repository "{}".'.format(info['name'], self.repo_url)
+                    )
+
+                # evaluate releases
+
+                releases = package.get('releases')
+
+                # If no releases info was specified, also grab the download info from GH or BB
+                if self.schema_version.major == 2 and not releases and details:
+                    releases = [{'details': details}]
+
+                if not releases:
+                    raise ProviderException(
+                        'No "releases" value for the package "{}"'
+                        ' in the repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                if not isinstance(releases, list):
+                    raise ProviderException(
+                        'The "releases" value is not an array for the package "{}"'
+                        ' in the repository {}.'.format(info['name'], self.repo_url)
+                    )
+
+                staged_releases = {}
+
+                # This allows developers to specify a GH or BB location to get releases from,
+                # especially tags URLs (https://github.com/user/repo/tags or
+                # https://bitbucket.org/user/repo#tags)
+                for release in releases:
+                    download_info = {}
+
+                    # Validate libraries
+                    # the key can be used to specify dependencies, upstream via repositories
+                    key = 'libraries' if self.schema_version.major >= 4 else 'dependencies'
+                    value = release.get(key, [])
+                    if value:
+                        if not isinstance(value, list):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info['libraries'] = value
+
+                    # Validate supported platforms
+                    key = 'platforms'
+                    value = release.get(key, default_platforms)
+                    if isinstance(value, str):
+                        value = [value]
+                    elif not isinstance(value, list):
+                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                    # ignore incompatible release (avoid downloading/evaluating further information)
+                    if IS_ST and not is_compatible_platform(value):
+                        continue
+                    download_info[key] = value
+
+                    # Validate supported python_versions (requires scheme 4.0.0!)
+                    key = 'python_versions'
+                    value = release.get(key)
+                    if value:
+                        # Package releases may optionally contain `python_versions` list to tell
+                        # which python version they are compatibilible with.
+                        # The main purpose is to be able to opt-in unmaintained packages to python 3.8
+                        # if they are known not to cause trouble.
+                        if isinstance(value, str):
+                            value = [value]
+                        elif not isinstance(value, list):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        download_info[key] = value
+
+                    if self.schema_version.major >= 3:
+                        # Validate supported ST version
+                        # missing key indicates any ST3+ build is supported
+                        key = 'sublime_text'
+                        value = release.get(key, default_sublime_text)
+                        if not isinstance(value, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        # ignore incompatible release (avoid downloading/evaluating further information)
+                        if IS_ST and not is_compatible_version(value):
+                            continue
+                        download_info[key] = value
+
+                        # Validate url
+                        # if present, it is an explicit or resolved release
+                        url = release.get('url')
+                        if url:
+                            # Validate date and version
+                            for key in copied_release_keys:
+                                if key in release:
+                                    value = release[key]
+                                    if not value or not isinstance(value, str):
+                                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                                    download_info[key] = value
+
+                            if 'version' not in download_info:
+                                raise ProviderException(
+                                    'Missing "version" key in release with explicit "url" of package "{}"'
+                                    ' in repository "{}".'.format(info['name'], self.repo_url)
+                                )
+
+                            download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                            info['releases'].append(download_info)
+                            continue
+
+                        # Resolve release template using `base` and `branch` or `tags` keys
+
+                        base = release.get('base')
+                        if not base:
+                            base = details
+                        if not base:
+                            raise ProviderException(
+                                'Missing root-level "details" key, or release-level "base" key'
+                                ' for one of the releases of package "{}"'
+                                ' in repository {}.'.format(info['name'], self.repo_url)
+                            )
+
+                        base_url = resolve_url(self.repo_url, base)
+                        downloads = None
+
+                        asset = release.get('asset')
+                        branch = release.get('branch')
+                        tags = release.get('tags')
+                        extra = None if tags is True else tags
+
+                        if asset:
+                            if branch:
+                                raise ProviderException(
+                                    'Illegal "asset" key "{}" for branch based release of library "{}"'
+                                    ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                                )
+                            # group releases with assets by base_url and tag-prefix
+                            # to prepare gathering download_info with a single API call
+                            staged_releases.setdefault((base_url, extra), []).append((asset, download_info))
+                            continue
+
+                        elif tags:
+                            for client in clients:
+                                downloads = client.download_info_from_tags(base_url, extra)
+                                if downloads is not None:
+                                    break
+
+                        elif branch:
+                            for client in clients:
+                                downloads = client.download_info_from_branch(base_url, branch)
+                                if downloads is not None:
+                                    break
+                        else:
+                            raise ProviderException(
+                                'Missing "branch", "tags" or "url" key in release of package "{}"'
+                                ' in repository "{}".'.format(info['name'], self.repo_url)
+                            )
+
+                        if downloads is None:
+                            raise ProviderException(
+                                'Invalid "base" value "{}" for one of the releases of package "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+
+                        if downloads is False:
+                            raise ProviderException(
+                                'No valid semver tags found at "{}" for package "{}"'
+                                ' in repository "{}".'.format(base, info['name'], self.repo_url)
+                            )
+
+                        for download in downloads:
+                            download.update(download_info)
+                            info['releases'].append(download)
+
+                    elif self.schema_version.major == 2:
+                        # missing key indicates ST2 release; no longer supported
+                        key = 'sublime_text'
+                        value = release.get(key)
+                        if not value:
+                            continue
+                        if not isinstance(value, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                        # ignore incompatible release (avoid downloading/evaluating further information)
+                        if IS_ST and not is_compatible_version(value):
+                            continue
+                        download_info[key] = value
+
+                        # Validate url
+                        # if present, it is an explicit or resolved release
+                        url = release.get('url')
+                        if url:
+                            for key in copied_release_keys:
+                                if key in release:
+                                    value = release[key]
+                                    if not value or not isinstance(value, str):
+                                        raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], key)
+                                    download_info[key] = value
+
+                            if 'version' not in download_info:
+                                raise ProviderException(
+                                    'Missing "version" key in release with explicit "url" of package "{}"'
+                                    ' in repository "{}".'.format(info['name'], self.repo_url)
+                                )
+
+                            download_info['url'] = update_url(resolve_url(self.repo_url, url), debug)
+                            info['releases'].append(download_info)
+                            continue
+
+                        # Evaluate and resolve "tags" and "branch" release templates
+
+                        download_details = release.get('details')
+                        if not download_details or not isinstance(download_details, str):
+                            raise InvalidPackageReleaseKeyError(self.repo_url, info['name'], 'details')
+
+                        download_details = resolve_url(self.repo_url, release['details'])
+
+                        downloads = None
+
+                        for client in clients:
+                            downloads = client.download_info(download_details)
+                            if downloads is not None:
+                                break
+
+                        if downloads is None:
+                            raise ProviderException(
+                                'Invalid "details" value "{}" for one of the releases of package "{}"'
+                                ' in repository "{}".'.format(download_details, info['name'], self.repo_url)
+                            )
+
+                        if downloads is False:
+                            raise ProviderException(
+                                'No valid semver tags found at "{}" for package "{}"'
+                                ' in repository "{}".'.format(download_details, info['name'], self.repo_url)
+                            )
+
+                        for download in downloads:
+                            download.update(download_info)
+                            info['releases'].append(download)
+
+                # gather download_info from releases
+                for (base_url, extra), asset_templates in staged_releases.items():
+                    for client in clients:
+                        downloads = client.download_info_from_releases(base_url, asset_templates, extra)
+                        if downloads is not None:
+                            info['releases'].extend(downloads)
+                            break
+
+                # Empty releases means package is unavailable on current platform or for version of ST
+                if not info['releases']:
+                    continue
+
+                info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
+
+                for field in ('previous_names', 'labels'):
+                    if field not in info:
+                        info[field] = []
+
+                if 'readme' in info:
+                    info['readme'] = update_url(resolve_url(self.repo_url, info['readme']), debug)
+
+                for field in ('description', 'readme', 'issues', 'donate', 'buy'):
+                    if field not in info:
+                        info[field] = None
+
+                if 'homepage' not in info:
+                    info['homepage'] = details if details else self.repo_url
+
+                if 'last_modified' not in info:
+                    # Extract a date from the newest release
+                    date = '1970-01-01 00:00:00'
+                    for release in info['releases']:
+                        release_date = release.get('date')
+                        if release_date and isinstance(release_date, str) and release_date > date:
+                            date = release_date
+                    info['last_modified'] = date
+
+                output[info['name']] = info
+                yield (info['name'], info)
+
+            except (DownloaderException, ClientException, ProviderException) as e:
+                self.broken_packages[info['name']] = e
+
+        self.packages = output
+
+    def get_sources(self):
+        """
+        Return a list of current URLs that are directly referenced by the repo
+
+        :return:
+            A list of URLs and/or file paths
+        """
+
+        if not self.fetch():
+            return []
+
+        output = [self.repo_url]
+        for package in self.repo_info['packages']:
+            details = package.get('details')
+            if details:
+                output.append(details)
+        return output
+
+    def get_renamed_packages(self):
+        """:return: A dict of the packages that have been renamed"""
+
+        if not self.fetch():
+            return {}
+
+        output = {}
+        for package in self.repo_info['packages']:
+            if 'previous_names' not in package:
+                continue
+
+            previous_names = package['previous_names']
+            if not isinstance(previous_names, list):
+                previous_names = [previous_names]
+
+            for previous_name in previous_names:
+                output[previous_name] = package['name']
+
+        return output
diff --git a/app/lib/package_control/providers/provider_exception.py b/app/lib/package_control/providers/provider_exception.py
index bb23d1e..4964782 100644
--- a/app/lib/package_control/providers/provider_exception.py
+++ b/app/lib/package_control/providers/provider_exception.py
@@ -1,17 +1,51 @@
-import sys
-
-
 class ProviderException(Exception):
 
     """If a provider could not return information"""
 
-    def __unicode__(self):
-        return self.args[0]
+
+class GitProviderUserInfoException(ProviderException):
+    """
+    Exception for signalling user information download error.
+
+    The exception is used to indicate a given URL not being in expected form
+    to be used by given provider to download user info from.
+    """
+
+    def __init__(self, provider):
+        self.provider_name = provider.__class__.__name__
+        self.url = provider.repo_url
+
+    def __str__(self):
+        return '%s unable to fetch user information from "%s".' % (self.provider_name, self.url)
+
+
+class GitProviderRepoInfoException(ProviderException):
+    """
+    Exception for signalling repository information download error.
+
+    The exception is used to indicate a given URL not being in expected form
+    to be used by given provider to download repo info from.
+    """
+
+    def __init__(self, provider):
+        self.provider_name = provider.__class__.__name__
+        self.url = provider.repo_url
 
     def __str__(self):
-        if sys.version_info < (3,):
-            return self.__bytes__()
-        return self.__unicode__()
+        return '%s unable to fetch repo information from "%s".' % (self.provider_name, self.url)
+
 
-    def __bytes__(self):
-        return self.__unicode__().encode('utf-8')
+class GitProviderDownloadInfoException(ProviderException):
+    """
+    Exception for signalling download information download error.
+
+    The exception is used to indicate a given URL not being in expected form
+    to be used by given provider to download release information from.
+    """
+
+    def __init__(self, provider, url=None):
+        self.provider_name = provider.__class__.__name__
+        self.url = url or provider.repo_url
+
+    def __str__(self):
+        return '%s unable to fetch download information from "%s".' % (self.provider_name, self.url)
diff --git a/app/lib/package_control/providers/release_selector.py b/app/lib/package_control/providers/release_selector.py
deleted file mode 100644
index 672825b..0000000
--- a/app/lib/package_control/providers/release_selector.py
+++ /dev/null
@@ -1,94 +0,0 @@
-import re
-import sublime
-
-from ..versions import version_exclude_prerelease
-
-
-def filter_releases(package, settings, releases):
-    """
-    Returns all releases in the list of releases that are compatible with
-    the current platform and version of Sublime Text
-
-    :param package:
-        The name of the package
-
-    :param settings:
-        A dict optionally containing the `install_prereleases` key
-
-    :param releases:
-        A list of release dicts
-
-    :return:
-        A list of release dicts
-    """
-
-    platform_selectors = [
-        sublime.platform() + '-' + sublime.arch(),
-        sublime.platform(),
-        '*'
-    ]
-
-    install_prereleases = settings.get('install_prereleases')
-    allow_prereleases = install_prereleases is True
-    if not allow_prereleases and isinstance(install_prereleases, list) and package in install_prereleases:
-        allow_prereleases = True
-
-    if not allow_prereleases:
-        releases = version_exclude_prerelease(releases)
-
-    output = []
-    for release in releases:
-        platforms = release.get('platforms', '*')
-        if not isinstance(platforms, list):
-            platforms = [platforms]
-
-        matched = False
-        for selector in platform_selectors:
-            if selector in platforms:
-                matched = True
-                break
-        if not matched:
-            continue
-
-        # Default to '*' (for legacy reasons), see #604
-        if not is_compatible_version(release.get('sublime_text', '*')):
-            continue
-
-        output.append(release)
-
-    return output
-
-
-def is_compatible_version(version_range):
-    min_version = float("-inf")
-    max_version = float("inf")
-
-    if version_range == '*':
-        return True
-
-    gt_match = re.match(r'>(\d+)$', version_range)
-    ge_match = re.match(r'>=(\d+)$', version_range)
-    lt_match = re.match(r'<(\d+)$', version_range)
-    le_match = re.match(r'<=(\d+)$', version_range)
-    range_match = re.match(r'(\d+) - (\d+)$', version_range)
-
-    if gt_match:
-        min_version = int(gt_match.group(1)) + 1
-    elif ge_match:
-        min_version = int(ge_match.group(1))
-    elif lt_match:
-        max_version = int(lt_match.group(1)) - 1
-    elif le_match:
-        max_version = int(le_match.group(1))
-    elif range_match:
-        min_version = int(range_match.group(1))
-        max_version = int(range_match.group(2))
-    else:
-        return None
-
-    if min_version > int(sublime.version()):
-        return False
-    if max_version < int(sublime.version()):
-        return False
-
-    return True
diff --git a/app/lib/package_control/providers/repository_provider.py b/app/lib/package_control/providers/repository_provider.py
deleted file mode 100644
index 05fd797..0000000
--- a/app/lib/package_control/providers/repository_provider.py
+++ /dev/null
@@ -1,940 +0,0 @@
-import json
-import re
-import os
-from itertools import chain
-
-try:
-    # Python 3
-    from urllib.parse import urljoin, urlparse
-    str_cls = str
-except (ImportError):
-    # Python 2
-    from urlparse import urljoin, urlparse
-    str_cls = unicode  # noqa
-
-from .. import text
-from ..console_write import console_write
-from .provider_exception import ProviderException
-from .schema_compat import platforms_to_releases
-from ..downloaders.downloader_exception import DownloaderException
-from ..clients.client_exception import ClientException
-from ..clients.github_client import GitHubClient
-from ..clients.gitlab_client import GitLabClient
-from ..clients.bitbucket_client import BitBucketClient
-from ..download_manager import downloader, update_url
-from ..versions import version_sort
-
-
-class RepositoryProvider():
-
-    """
-    Generic repository downloader that fetches package info
-
-    With the current channel/repository architecture where the channel file
-    caches info from all includes repositories, these package providers just
-    serve the purpose of downloading packages not in the default channel.
-
-    The structure of the JSON a repository should contain is located in
-    example-packages.json.
-
-    :param repo:
-        The URL of the package repository
-
-    :param settings:
-        A dict containing at least the following fields:
-          `cache_length`,
-          `debug`,
-          `timeout`,
-          `user_agent`
-        Optional fields:
-          `http_proxy`,
-          `https_proxy`,
-          `proxy_username`,
-          `proxy_password`,
-          `query_string_params`,
-          `http_basic_auth`
-    """
-
-    def __init__(self, repo, settings):
-        self.cache = {}
-        self.repo_info = None
-        self.schema_version = '0.0'
-        self.schema_major_version = 0
-        self.repo = repo
-        self.settings = settings
-        self.failed_sources = {}
-        self.broken_packages = {}
-        self.broken_dependencies = {}
-
-    @classmethod
-    def match_url(cls, repo):
-        """Indicates if this provider can handle the provided repo"""
-
-        return True
-
-    def prefetch(self):
-        """
-        Go out and perform HTTP operations, caching the result
-
-        :raises:
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-        """
-
-        [name for name, info in self.get_packages()]
-
-    def get_failed_sources(self):
-        """
-        List of any URLs that could not be accessed while accessing this repository
-
-        :return:
-            A generator of ("https://example.com", Exception()) tuples
-        """
-
-        return self.failed_sources.items()
-
-    def get_broken_packages(self):
-        """
-        List of package names for packages that are missing information
-
-        :return:
-            A generator of ("Package Name", Exception()) tuples
-        """
-
-        return self.broken_packages.items()
-
-    def get_broken_dependencies(self):
-        """
-        List of dependency names for dependencies that are missing information
-
-        :return:
-            A generator of ("Dependency Name", Exception()) tuples
-        """
-
-        return self.broken_dependencies.items()
-
-    def fetch(self):
-        """
-        Retrieves and loads the JSON for other methods to use
-
-        :raises:
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when an error occurs trying to open a URL
-        """
-
-        if self.repo_info is not None:
-            return
-
-        self.repo_info = self.fetch_location(self.repo)
-        for key in ['packages', 'dependencies']:
-            if key not in self.repo_info:
-                self.repo_info[key] = []
-
-        if 'includes' not in self.repo_info:
-            return
-
-        # Allow repositories to include other repositories
-        scheme_match = re.match('(https?:)//', self.repo, re.I)
-        if scheme_match is None:
-            relative_base = os.path.dirname(self.repo)
-            is_http = False
-        else:
-            is_http = True
-
-        includes = self.repo_info.get('includes', [])
-        del self.repo_info['includes']
-        for include in includes:
-            if include.startswith('//'):
-                if scheme_match is not None:
-                    include = scheme_match.group(1) + include
-                else:
-                    include = 'https:' + include
-            elif include.startswith('/'):
-                # We don't allow absolute includes
-                continue
-            elif include.startswith('./') or include.startswith('../'):
-                if is_http:
-                    include = urljoin(self.repo, include)
-                else:
-                    include = os.path.join(relative_base, include)
-                    include = os.path.normpath(include)
-            include_info = self.fetch_location(include)
-            included_packages = include_info.get('packages', [])
-            self.repo_info['packages'].extend(included_packages)
-            included_dependencies = include_info.get('dependencies', [])
-            self.repo_info['dependencies'].extend(included_dependencies)
-
-    def fetch_and_validate(self):
-        """
-        Fetch the repository and validates that it is parse-able
-
-        :return:
-            Boolean if the repo was fetched and validated
-        """
-
-        if self.repo in self.failed_sources:
-            return False
-
-        if self.repo_info is not None:
-            return True
-
-        try:
-            self.fetch()
-        except (DownloaderException, ProviderException) as e:
-            self.failed_sources[self.repo] = e
-            self.cache['get_packages'] = {}
-            return False
-
-        def fail(message):
-            exception = ProviderException(message)
-            self.failed_sources[self.repo] = exception
-            self.cache['get_packages'] = {}
-            return
-        schema_error = u'Repository %s does not appear to be a valid repository file because ' % self.repo
-
-        if 'schema_version' not in self.repo_info:
-            error_string = u'%s the "schema_version" JSON key is missing.' % schema_error
-            fail(error_string)
-            return False
-
-        try:
-            self.schema_version = self.repo_info.get('schema_version')
-            if isinstance(self.schema_version, int):
-                self.schema_version = float(self.schema_version)
-            if isinstance(self.schema_version, float):
-                self.schema_version = str_cls(self.schema_version)
-        except (ValueError):
-            error_string = u'%s the "schema_version" is not a valid number.' % schema_error
-            fail(error_string)
-            return False
-
-        if self.schema_version not in ['1.0', '1.1', '1.2', '2.0', '3.0.0']:
-            fail(text.format(
-                u'''
-                %s the "schema_version" is not recognized. Must be one of: 1.0, 1.1, 1.2, 2.0 or 3.0.0.
-                ''',
-                schema_error
-            ))
-            return False
-
-        version_parts = self.schema_version.split('.')
-        self.schema_major_version = int(version_parts[0])
-
-        if 'packages' not in self.repo_info:
-            error_string = u'%s the "packages" JSON key is missing.' % schema_error
-            fail(error_string)
-            return False
-
-        if isinstance(self.repo_info['packages'], dict):
-            fail(text.format(
-                u'''
-                %s the "packages" key is an object, not an array. This indicates it is a channel not a repository.
-                ''',
-                schema_error
-            ))
-            return False
-
-        return True
-
-    def fetch_location(self, location):
-        """
-        Fetches the contents of a URL of file path
-
-        :param location:
-            The URL or file path
-
-        :raises:
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when an error occurs trying to open a URL
-
-        :return:
-            A dict of the parsed JSON
-        """
-
-        if re.match('https?://', self.repo, re.I):
-            with downloader(location, self.settings) as manager:
-                json_string = manager.fetch(location, 'Error downloading repository.')
-
-        # Anything that is not a URL is expected to be a filesystem path
-        else:
-            if not os.path.exists(location):
-                raise ProviderException(u'Error, file %s does not exist' % location)
-
-            if self.settings.get('debug'):
-                console_write(
-                    u'''
-                    Loading %s as a repository
-                    ''',
-                    location
-                )
-
-            # We open as binary so we get bytes like the DownloadManager
-            with open(location, 'rb') as f:
-                json_string = f.read()
-
-        try:
-            return json.loads(json_string.decode('utf-8'))
-        except (ValueError):
-            raise ProviderException(u'Error parsing JSON from repository %s.' % location)
-
-    def get_dependencies(self, invalid_sources=None):
-        """
-        Provides access to the dependencies in this repository
-
-        :param invalid_sources:
-            A list of URLs that are permissible to fetch data from
-
-        :raises:
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
-        :return:
-            A generator of
-            (
-                'Dependency Name',
-                {
-                    'name': name,
-                    'load_order': two digit string,
-                    'description': description,
-                    'author': author,
-                    'issues': URL,
-                    'releases': [
-                        {
-                            'sublime_text': compatible version,
-                            'platforms': [platform name, ...],
-                            'url': url,
-                            'version': version,
-                            'sha256': hex hash
-                        }, ...
-                    ],
-                    'sources': [url, ...]
-                }
-            )
-            tuples
-        """
-
-        if 'get_dependencies' in self.cache:
-            for key, value in self.cache['get_dependencies'].items():
-                yield (key, value)
-            return
-
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
-
-        if not self.fetch_and_validate():
-            return
-
-        debug = self.settings.get('debug')
-
-        github_client = GitHubClient(self.settings)
-        gitlab_client = GitLabClient(self.settings)
-        bitbucket_client = BitBucketClient(self.settings)
-
-        if self.schema_major_version < 3:
-            self.repo_info['dependencies'] = []
-
-        output = {}
-        for dependency in self.repo_info['dependencies']:
-            info = {
-                'sources': [self.repo]
-            }
-
-            for field in ['name', 'description', 'author', 'issues', 'load_order']:
-                if dependency.get(field):
-                    info[field] = dependency.get(field)
-
-            if 'name' not in info:
-                self.failed_sources[self.repo] = ProviderException(text.format(
-                    u'''
-                    No "name" value for one of the dependencies in the repository %s.
-                    ''',
-                    self.repo
-                ))
-                continue
-
-            releases = dependency.get('releases', [])
-
-            if releases and not isinstance(releases, list):
-                self.broken_dependencies[info['name']] = ProviderException(text.format(
-                    u'''
-                    The "releases" value is not an array for the dependency "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo)
-                ))
-                continue
-
-            for release in releases:
-                if 'releases' not in info:
-                    info['releases'] = []
-
-                download_info = {}
-
-                # Make sure that explicit fields are copied over
-                for field in ['platforms', 'sublime_text', 'version', 'url', 'sha256']:
-                    if field in release:
-                        value = release[field]
-                        if field == 'url':
-                            value = update_url(value, debug)
-                        if field == 'platforms' and not isinstance(release['platforms'], list):
-                            value = [value]
-                        download_info[field] = value
-
-                if 'platforms' not in download_info:
-                    download_info['platforms'] = ['*']
-
-                tags = release.get('tags')
-                branch = release.get('branch')
-
-                if tags or branch:
-                    try:
-                        base = None
-                        if 'base' in release:
-                            base = release['base']
-
-                        if not base:
-                            raise ProviderException(text.format(
-                                u'''
-                                Missing release-level "base" key for one of the releases of the
-                                dependency "%s" in the repository %s.
-                                ''',
-                                (info['name'], self.repo)
-                            ))
-
-                        github_url = False
-                        gitlab_url = False
-                        bitbucket_url = False
-                        extra = None
-
-                        if tags:
-                            github_url = github_client.make_tags_url(base)
-                            gitlab_url = gitlab_client.make_tags_url(base)
-                            bitbucket_url = bitbucket_client.make_tags_url(base)
-                            if tags is not True:
-                                extra = tags
-
-                        if branch:
-                            github_url = github_client.make_branch_url(base, branch)
-                            gitlab_url = gitlab_client.make_branch_url(base, branch)
-                            bitbucket_url = bitbucket_client.make_branch_url(base, branch)
-
-                        if github_url:
-                            downloads = github_client.download_info(github_url, extra)
-                            url = github_url
-                        elif gitlab_url:
-                            downloads = gitlab_client.download_info(gitlab_url, extra)
-                            url = gitlab_url
-                        elif bitbucket_url:
-                            downloads = bitbucket_client.download_info(bitbucket_url, extra)
-                            url = bitbucket_url
-                        else:
-                            raise ProviderException(text.format(
-                                u'''
-                                Invalid "base" value "%s" for one of the releases of the
-                                dependency "%s" in the repository %s.
-                                ''',
-                                (base, info['name'], self.repo)
-                            ))
-
-                        if downloads is False:
-                            raise ProviderException(text.format(
-                                u'''
-                                No valid semver tags found at %s for the dependency
-                                "%s" in the repository %s.
-                                ''',
-                                (url, info['name'], self.repo)
-                            ))
-
-                        for download in downloads:
-                            del download['date']
-                            new_download = download_info.copy()
-                            new_download.update(download)
-                            info['releases'].append(new_download)
-
-                    except (DownloaderException, ClientException, ProviderException) as e:
-                        self.broken_dependencies[info['name']] = e
-                        continue
-
-                elif download_info:
-                    if 'url' in download_info:
-                        is_http = urlparse(download_info['url']).scheme == 'http'
-                        if is_http and 'sha256' not in download_info:
-                            self.broken_dependencies[info['name']] = ProviderException(text.format(
-                                u'''
-                                No "sha256" key for the non-secure "url" value in one of the
-                                releases of the dependency "%s" in the repository %s.
-                                ''',
-                                (info['name'], self.repo)
-                            ))
-                            continue
-
-                    info['releases'].append(download_info)
-
-            if info['name'] in self.broken_dependencies:
-                continue
-
-            # Make sure the dependency has the appropriate keys. We use a
-            # function here so that we can break out of multiple loops.
-            def is_missing_keys():
-                for key in ['author', 'releases', 'issues', 'description', 'load_order']:
-                    if key not in info:
-                        self.broken_dependencies[info['name']] = ProviderException(text.format(
-                            u'''
-                            No "%s" key for the dependency "%s" in the repository %s.
-                            ''',
-                            (key, info['name'], self.repo)
-                        ))
-                        return True
-                for release in info.get('releases', []):
-                    for key in ['version', 'url', 'sublime_text', 'platforms']:
-                        if key not in release:
-                            self.broken_dependencies[info['name']] = ProviderException(text.format(
-                                u'''
-                                Missing "%s" key for one of the releases of the dependency "%s" in the repository %s.
-                                ''',
-                                (key, info['name'], self.repo)
-                            ))
-                            return True
-                return False
-
-            if is_missing_keys():
-                continue
-
-            info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
-
-            output[info['name']] = info
-            yield (info['name'], info)
-
-        self.cache['get_dependencies'] = output
-
-    def get_packages(self, invalid_sources=None):
-        """
-        Provides access to the packages in this repository
-
-        :param invalid_sources:
-            A list of URLs that are permissible to fetch data from
-
-        :raises:
-            ProviderException: when an error occurs trying to open a file
-            DownloaderException: when there is an issue download package info
-            ClientException: when there is an issue parsing package info
-
-        :return:
-            A generator of
-            (
-                'Package Name',
-                {
-                    'name': name,
-                    'description': description,
-                    'author': author,
-                    'homepage': homepage,
-                    'last_modified': last modified date,
-                    'releases': [
-                        {
-                            'sublime_text': compatible version,
-                            'platforms': [platform name, ...],
-                            'url': url,
-                            'date': date,
-                            'version': version,
-                            'dependencies': [dependency name, ...]
-                        }, ...
-                    ]
-                    'previous_names': [old_name, ...],
-                    'labels': [label, ...],
-                    'sources': [url, ...],
-                    'readme': url,
-                    'issues': url,
-                    'donate': url,
-                    'buy': url
-                }
-            )
-            tuples
-        """
-
-        if 'get_packages' in self.cache:
-            for key, value in self.cache['get_packages'].items():
-                yield (key, value)
-            return
-
-        if invalid_sources is not None and self.repo in invalid_sources:
-            raise StopIteration()
-
-        if not self.fetch_and_validate():
-            return
-
-        debug = self.settings.get('debug')
-
-        github_client = GitHubClient(self.settings)
-        gitlab_client = GitLabClient(self.settings)
-        bitbucket_client = BitBucketClient(self.settings)
-
-        # Backfill the "previous_names" keys for old schemas
-        previous_names = {}
-        if self.schema_major_version < 2:
-            renamed = self.get_renamed_packages()
-            for old_name in renamed:
-                new_name = renamed[old_name]
-                if new_name not in previous_names:
-                    previous_names[new_name] = []
-                previous_names[new_name].append(old_name)
-
-        output = {}
-        for package in self.repo_info['packages']:
-            info = {
-                'sources': [self.repo]
-            }
-
-            copy_fields = [
-                'name',
-                'description',
-                'author',
-                'last_modified',
-                'previous_names',
-                'labels',
-                'homepage',
-                'readme',
-                'issues',
-                'donate',
-                'buy'
-            ]
-            for field in copy_fields:
-                if package.get(field):
-                    info[field] = package.get(field)
-
-            # Schema version 2.0 allows for grabbing details about a package, or its
-            # download from "details" urls. See the GitHubClient, GitLabClient
-            # and BitBucketClient classes for valid URLs.
-            if self.schema_major_version >= 2:
-                details = package.get('details')
-                releases = package.get('releases')
-
-                # Try to grab package-level details from GitHub, GitLab or BitBucket
-                if details:
-                    if invalid_sources is not None and details in invalid_sources:
-                        continue
-
-                    info['sources'].append(details)
-
-                    try:
-                        github_repo_info = github_client.repo_info(details)
-                        gitlab_repo_info = gitlab_client.repo_info(details)
-                        bitbucket_repo_info = bitbucket_client.repo_info(details)
-
-                        # When grabbing details, prefer explicit field values over the values
-                        # from the GitHub, GitLab or BitBucket API
-                        if github_repo_info:
-                            info = dict(chain(github_repo_info.items(), info.items()))
-                        elif gitlab_repo_info:
-                            info = dict(chain(gitlab_repo_info.items(), info.items()))
-                        elif bitbucket_repo_info:
-                            info = dict(chain(bitbucket_repo_info.items(), info.items()))
-                        else:
-                            raise ProviderException(text.format(
-                                u'''
-                                Invalid "details" value "%s" for one of the packages in the repository %s.
-                                ''',
-                                (details, self.repo)
-                            ))
-
-                    except (DownloaderException, ClientException, ProviderException) as e:
-                        if 'name' in info:
-                            self.broken_packages[info['name']] = e
-                        self.failed_sources[details] = e
-                        continue
-
-            if 'name' not in info:
-                self.failed_sources[self.repo] = ProviderException(text.format(
-                    u'''
-                    No "name" value for one of the packages in the repository %s.
-                    ''',
-                    self.repo
-                ))
-                continue
-
-            info['releases'] = []
-            if self.schema_major_version == 2:
-                # If no releases info was specified, also grab the download info from GH or BB
-                if not releases and details:
-                    releases = [{'details': details}]
-
-            if self.schema_major_version >= 2:
-                if not releases:
-                    e = ProviderException(text.format(
-                        u'''
-                        No "releases" value for the package "%s" in the repository %s.
-                        ''',
-                        (info['name'], self.repo)
-                    ))
-                    self.broken_packages[info['name']] = e
-                    continue
-
-                if not isinstance(releases, list):
-                    e = ProviderException(text.format(
-                        u'''
-                        The "releases" value is not an array or the package "%s" in the repository %s.
-                        ''',
-                        (info['name'], self.repo)
-                    ))
-                    self.broken_packages[info['name']] = e
-                    continue
-
-                # This allows developers to specify a GH or BB location to get releases from,
-                # especially tags URLs (https://github.com/user/repo/tags or
-                # https://bitbucket.org/user/repo#tags)
-                for release in releases:
-                    download_details = None
-                    download_info = {}
-
-                    # Make sure that explicit fields are copied over
-                    for field in ['platforms', 'sublime_text', 'version', 'url', 'date', 'dependencies']:
-                        if field in release:
-                            value = release[field]
-                            if field == 'url':
-                                value = update_url(value, debug)
-                            if field == 'platforms' and not isinstance(release['platforms'], list):
-                                value = [value]
-                            download_info[field] = value
-
-                    if 'platforms' not in download_info:
-                        download_info['platforms'] = ['*']
-
-                    if self.schema_major_version == 2:
-                        if 'sublime_text' not in download_info:
-                            download_info['sublime_text'] = '<3000'
-
-                        if 'details' in release:
-                            download_details = release['details']
-
-                            try:
-                                github_downloads = github_client.download_info(download_details)
-                                gitlab_downloads = gitlab_client.download_info(download_details)
-                                bitbucket_downloads = bitbucket_client.download_info(download_details)
-
-                                if github_downloads is False or gitlab_downloads is False \
-                                        or bitbucket_downloads is False:
-                                    raise ProviderException(text.format(
-                                        u'''
-                                        No valid semver tags found at %s for the package "%s" in the repository %s.
-                                        ''',
-                                        (download_details, info['name'], self.repo)
-                                    ))
-
-                                if github_downloads:
-                                    downloads = github_downloads
-                                elif gitlab_downloads:
-                                    downloads = gitlab_downloads
-                                elif bitbucket_downloads:
-                                    downloads = bitbucket_downloads
-                                else:
-                                    raise ProviderException(text.format(
-                                        u'''
-                                        Invalid "details" value "%s" under the "releases" key
-                                        for the package "%s" in the repository %s.
-                                        ''',
-                                        (download_details, info['name'], self.repo)
-                                    ))
-
-                                for download in downloads:
-                                    new_download = download_info.copy()
-                                    new_download.update(download)
-                                    info['releases'].append(new_download)
-
-                            except (DownloaderException, ClientException, ProviderException) as e:
-                                self.broken_packages[info['name']] = e
-
-                        elif download_info:
-                            info['releases'].append(download_info)
-
-                    elif self.schema_major_version == 3:
-                        tags = release.get('tags')
-                        branch = release.get('branch')
-
-                        if tags or branch:
-                            try:
-                                base = None
-                                if 'base' in release:
-                                    base = release['base']
-                                elif details:
-                                    base = details
-
-                                if not base:
-                                    raise ProviderException(text.format(
-                                        u'''
-                                        Missing root-level "details" key, or release-level "base" key
-                                        for one of the releases of the package "%s" in the repository %s.
-                                        ''',
-                                        (info['name'], self.repo)
-                                    ))
-
-                                github_url = False
-                                gitlab_url = False
-                                bitbucket_url = False
-                                extra = None
-
-                                if tags:
-                                    github_url = github_client.make_tags_url(base)
-                                    gitlab_url = gitlab_client.make_tags_url(base)
-                                    bitbucket_url = bitbucket_client.make_tags_url(base)
-                                    if tags is not True:
-                                        extra = tags
-
-                                if branch:
-                                    github_url = github_client.make_branch_url(base, branch)
-                                    gitlab_url = gitlab_client.make_branch_url(base, branch)
-                                    bitbucket_url = bitbucket_client.make_branch_url(base, branch)
-
-                                if github_url:
-                                    downloads = github_client.download_info(github_url, extra)
-                                    url = github_url
-                                elif gitlab_url:
-                                    downloads = gitlab_client.download_info(gitlab_url, extra)
-                                    url = gitlab_url
-                                elif bitbucket_url:
-                                    downloads = bitbucket_client.download_info(bitbucket_url, extra)
-                                    url = bitbucket_url
-                                else:
-                                    raise ProviderException(text.format(
-                                        u'''
-                                        Invalid "base" value "%s" for one of the releases of the
-                                        package "%s" in the repository %s.
-                                        ''',
-                                        (base, info['name'], self.repo)
-                                    ))
-
-                                if downloads is False:
-                                    raise ProviderException(text.format(
-                                        u'''
-                                        No valid semver tags found at %s for the
-                                        package "%s" in the repository %s.
-                                        ''',
-                                        (url, info['name'], self.repo)
-                                    ))
-
-                                for download in downloads:
-                                    new_download = download_info.copy()
-                                    new_download.update(download)
-                                    info['releases'].append(new_download)
-
-                            except (DownloaderException, ClientException, ProviderException) as e:
-                                self.broken_packages[info['name']] = e
-                                continue
-                        elif download_info:
-                            info['releases'].append(download_info)
-
-            # Schema version 1.0, 1.1 and 1.2 just require that all values be
-            # explicitly specified in the package JSON
-            else:
-                info['releases'] = platforms_to_releases(package, debug)
-
-            info['releases'] = version_sort(info['releases'], 'platforms', reverse=True)
-
-            if info['name'] in self.broken_packages:
-                continue
-
-            if 'author' not in info:
-                self.broken_packages[info['name']] = ProviderException(text.format(
-                    u'''
-                    No "author" key for the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo)
-                ))
-                continue
-
-            if 'releases' not in info:
-                self.broken_packages[info['name']] = ProviderException(text.format(
-                    u'''
-                    No "releases" key for the package "%s" in the repository %s.
-                    ''',
-                    (info['name'], self.repo)
-                ))
-                continue
-
-            # Make sure all releases have the appropriate keys. We use a
-            # function here so that we can break out of multiple loops.
-            def has_broken_release():
-                for release in info.get('releases', []):
-                    for key in ['version', 'date', 'url', 'sublime_text', 'platforms']:
-                        if key not in release:
-                            self.broken_packages[info['name']] = ProviderException(text.format(
-                                u'''
-                                Missing "%s" key for one of the releases of the package "%s" in the repository %s.
-                                ''',
-                                (key, info['name'], self.repo)
-                            ))
-                            return True
-                return False
-
-            if has_broken_release():
-                continue
-
-            for field in ['previous_names', 'labels']:
-                if field not in info:
-                    info[field] = []
-
-            if 'readme' in info:
-                info['readme'] = update_url(info['readme'], debug)
-
-            for field in ['description', 'readme', 'issues', 'donate', 'buy']:
-                if field not in info:
-                    info[field] = None
-
-            if 'homepage' not in info:
-                info['homepage'] = self.repo
-
-            if 'releases' in info and 'last_modified' not in info:
-                # Extract a date from the newest release
-                date = '1970-01-01 00:00:00'
-                for release in info['releases']:
-                    if 'date' in release and release['date'] > date:
-                        date = release['date']
-                info['last_modified'] = date
-
-            if info['name'] in previous_names:
-                info['previous_names'].extend(previous_names[info['name']])
-
-            output[info['name']] = info
-            yield (info['name'], info)
-
-        self.cache['get_packages'] = output
-
-    def get_sources(self):
-        """
-        Return a list of current URLs that are directly referenced by the repo
-
-        :return:
-            A list of URLs and/or file paths
-        """
-
-        if not self.fetch_and_validate():
-            return []
-
-        output = [self.repo]
-        if self.schema_major_version >= 2:
-            for package in self.repo_info['packages']:
-                details = package.get('details')
-                if details:
-                    output.append(details)
-        return output
-
-    def get_renamed_packages(self):
-        """:return: A dict of the packages that have been renamed"""
-
-        if not self.fetch_and_validate():
-            return {}
-
-        if self.schema_major_version < 2:
-            return self.repo_info.get('renamed_packages', {})
-
-        output = {}
-        for package in self.repo_info['packages']:
-            if 'previous_names' not in package:
-                continue
-
-            previous_names = package['previous_names']
-            if not isinstance(previous_names, list):
-                previous_names = [previous_names]
-
-            for previous_name in previous_names:
-                output[previous_name] = package['name']
-
-        return output
diff --git a/app/lib/package_control/providers/schema_compat.py b/app/lib/package_control/providers/schema_compat.py
deleted file mode 100644
index 0f648fe..0000000
--- a/app/lib/package_control/providers/schema_compat.py
+++ /dev/null
@@ -1,47 +0,0 @@
-from ..download_manager import update_url
-
-
-def platforms_to_releases(info, debug):
-    """
-    Accepts a dict from a schema version 1.0, 1.1 or 1.2 package containing
-    a "platforms" key and converts it to a list of releases compatible with'
-    schema version 2.0.
-
-    :param info:
-        The dict of package info
-
-    :param debug:
-        If debug information should be shown
-
-    :return:
-        A list of release dicts
-    """
-
-    output = []
-
-    temp_releases = {}
-    platforms = info.get('platforms')
-
-    for platform in platforms:
-        for release in platforms[platform]:
-            key = '%s-%s' % (release['version'], release['url'])
-            if key not in temp_releases:
-                temp_releases[key] = {
-                    'sublime_text': '<3000',
-                    'version': release['version'],
-                    'date': info.get('last_modified', '2011-08-01 00:00:00'),
-                    'url': update_url(release['url'], debug),
-                    'platforms': []
-                }
-            if platform == '*':
-                temp_releases[key]['platforms'] = ['*']
-            elif temp_releases[key]['platforms'] != ['*']:
-                temp_releases[key]['platforms'].append(platform)
-
-    for key in temp_releases:
-        release = temp_releases[key]
-        if release['platforms'] == ['windows', 'linux', 'osx']:
-            release['platforms'] = ['*']
-        output.append(release)
-
-    return output
diff --git a/app/lib/package_control/providers/schema_version.py b/app/lib/package_control/providers/schema_version.py
new file mode 100644
index 0000000..3e5efbb
--- /dev/null
+++ b/app/lib/package_control/providers/schema_version.py
@@ -0,0 +1,33 @@
+from ..pep440 import PEP440Version
+
+
+class SchemaVersion(PEP440Version):
+    supported_versions = ('2.0', '3.0.0', '4.0.0')
+
+    def __init__(self, ver):
+        """
+        Custom version string parsing to maintain backward compatibility.
+
+        SemVer needs all of major, minor and patch parts being present in `ver`.
+
+        :param ver:
+            An integer, float or string containing a version string.
+
+        :returns:
+            List of (major, minor, patch)
+        """
+        try:
+            if isinstance(ver, int):
+                ver = float(ver)
+            if isinstance(ver, float):
+                ver = str(ver)
+        except ValueError:
+            raise ValueError('the "schema_version" is not a valid number.')
+
+        if ver not in self.supported_versions:
+            raise ValueError(
+                'the "schema_version" is not recognized. Must be one of: %s or %s.'
+                % (', '.join(self.supported_versions[:-1]), self.supported_versions[-1])
+            )
+
+        super().__init__(ver)
diff --git a/app/lib/package_control/semver.py b/app/lib/package_control/semver.py
deleted file mode 100644
index 20baf16..0000000
--- a/app/lib/package_control/semver.py
+++ /dev/null
@@ -1,848 +0,0 @@
-"""pysemver: Semantic Version comparing for Python.
-
-Provides comparing of semantic versions by using SemVer objects using rich comperations plus the
-possibility to match a selector string against versions. Interesting for version dependencies.
-Versions look like: "1.7.12+b.133"
-Selectors look like: ">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113"
-
-Example usages:
-    >>> SemVer(1, 2, 3, build=13)
-    SemVer("1.2.3+13")
-    >>> SemVer.valid("1.2.3.4")
-    False
-    >>> SemVer.clean("this is unimportant text 1.2.3-2 and will be stripped")
-    "1.2.3-2"
-    >>> SemVer("1.7.12+b.133").satisfies(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113")
-    True
-    >>> SemSel(">1.7.0 || 1.6.9+b.111 - 1.6.9+b.113").matches(SemVer("1.7.12+b.133"),
-    ... SemVer("1.6.9+b.112"), SemVer("1.6.10"))
-    [SemVer("1.7.12+b.133"), SemVer("1.6.9+b.112")]
-    >>> min(_)
-    SemVer("1.6.9+b.112")
-    >>> _.patch
-    9
-
-Exported classes:
-    * SemVer(collections.namedtuple())
-        Parses semantic versions and defines methods for them. Supports rich comparisons.
-    * SemSel(tuple)
-        Parses semantic version selector strings and defines methods for them.
-    * SelParseError(Exception)
-        An error among others raised when parsing a semantic version selector failed.
-
-Other classes:
-    * SemComparator(object)
-    * SemSelAndChunk(list)
-    * SemSelOrChunk(list)
-
-Functions/Variables/Constants:
-    none
-
-
-Copyright (c) 2013 Zachary King, FichteFoll
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
-associated documentation files (the "Software"), to deal in the Software without restriction,
-including without limitation the rights to use, copy, modify, merge, publish, distribute,
-sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions: The above copyright notice and this
-permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
-NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES
-OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
-
-import re
-import sys
-from collections import namedtuple  # Python >=2.6
-
-
-__all__ = ('SemVer', 'SemSel', 'SelParseError')
-
-
-if sys.version_info[0] == 3:
-    basestring = str
-
-    def cmp(a, b):
-        return (a > b) - (a < b)
-
-
-# @functools.total_ordering would be nice here but was added in 2.7, __cmp__ is not Py3
-class SemVer(namedtuple("_SemVer", 'major, minor, patch, prerelease, build')):
-
-    """Semantic Version, consists of 3 to 5 components defining the version's adicity.
-
-    See http://semver.org/ (2.0.0-rc.1) for the standard mainly used for this implementation, few
-    changes have been made.
-
-    Information on this particular class and their instances:
-        - Immutable and hashable.
-        - Subclasses `collections.namedtuple`.
-        - Always `True` in boolean context.
-        - len() returns an int between 3 and 5; 4 when a pre-release is set and 5 when a build is
-          set. Note: Still returns 5 when build is set but not pre-release.
-        - Parts of the semantic version can be accessed by integer indexing, key (string) indexing,
-          slicing and getting an attribute. Returned slices are tuple. Leading '-' and '+' of
-          optional components are not stripped. Supported keys/attributes:
-          major, minor, patch, prerelease, build.
-
-          Examples:
-            s = SemVer("1.2.3-4.5+6")
-            s[2] == 3
-            s[:3] == (1, 2, 3)
-            s['build'] == '-4.5'
-            s.major == 1
-
-    Short information on semantic version structure:
-
-    Semantic versions consist of:
-        * a major component (numeric)
-        * a minor component (numeric)
-        * a patch component (numeric)
-        * a pre-release component [optional]
-        * a build component [optional]
-
-    The pre-release component is indicated by a hyphen '-' and followed by alphanumeric[1] sequences
-    separated by dots '.'. Sequences are compared numerically if applicable (both sequences of two
-    versions are numeric) or lexicographically. May also include hyphens. The existence of a
-    pre-release component lowers the actual version; the shorter pre-release component is considered
-    lower. An 'empty' pre-release component is considered to be the least version for this
-    major-minor-patch combination (e.g. "1.0.0-").
-
-    The build component may follow the optional pre-release component and is indicated by a plus '+'
-    followed by sequences, just as the pre-release component. Comparing works similarly. However the
-    existence of a build component raises the actual version and may also raise a pre-release. An
-    'empty' build component is considered to be the highest version for this
-    major-minor-patch-prerelease combination (e.g. "1.2.3+").
-
-
-    [1]: Regexp for a sequence: r'[0-9A-Za-z-]+'.
-    """
-
-    # Static class variables
-    _base_regex = r'''(?x)
-        (?P[0-9]+)
-        \.(?P[0-9]+)
-        \.(?P[0-9]+)
-        (?:\-(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?
-        (?:\+(?P(?:[0-9A-Za-z-]+(?:\.[0-9A-Za-z-]+)*)?))?'''
-    _search_regex = re.compile(_base_regex)
-    _match_regex = re.compile('^%s$' % _base_regex)  # required because of $ anchor
-
-    # "Constructor"
-    def __new__(cls, *args, **kwargs):
-        """There are two different constructor styles that are allowed:
-        - Option 1 allows specification of a semantic version as a string and the option to "clean"
-          the string before parsing it.
-        - Option 2 allows specification of each component separately as one parameter.
-
-        Note that all the parameters specified in the following sections can be passed either as
-        positional or as named parameters while considering the usual Python rules for this. As
-        such, `SemVer(1, 2, minor=1)` will result in an exception and not in `SemVer("1.1.2")`.
-
-        Option 1:
-            Constructor examples:
-                SemVer("1.0.1")
-                SemVer("this version 1.0.1-pre.1 here", True)
-                SemVer(ver="0.0.9-pre-alpha+34", clean=False)
-
-            Parameters:
-                * ver (str)
-                    The string containing the version.
-                * clean = `False` (bool; optional)
-                    If this is true in boolean context, `SemVer.clean(ver)` is called before
-                    parsing.
-
-        Option 2:
-            Constructor examples:
-                SemVer(1, 0, 1)
-                SemVer(1, '0', prerelease='pre-alpha', patch=1, build=34)
-                SemVer(**dict(minor=2, major=1, patch=3))
-
-            Parameters:
-                * major (int, str, float ...)
-                * minor (...)
-                * patch (...)
-                    Major to patch components must be an integer or convertable to an int (e.g. a
-                    string or another number type).
-
-                * prerelease = `None` (str, int, float ...; optional)
-                * build = `None` (...; optional)
-                    Pre-release and build components should be a string (or number) type.
-                    Will be passed to `str()` if not already a string but the final string must
-                    match '^[0-9A-Za-z.-]*$'
-
-        Raises:
-            * TypeError
-                Invalid parameter type(s) or combination (e.g. option 1 and 2).
-            * ValueError
-                Invalid semantic version or option 2 parameters unconvertable.
-        """
-        ver, clean, comps = None, False, None
-        kw, length = kwargs.copy(), len(args) + len(kwargs)
-
-        def inv():
-            raise TypeError("Invalid parameter combination: args=%s; kwargs=%s" % (args, kwargs))
-
-        # Do validation and parse the parameters
-        if length == 0 or length > 5:
-            raise TypeError("SemVer accepts at least 1 and at most 5 arguments (%d given)" % length)
-
-        elif length < 3:
-            if len(args) == 2:
-                ver, clean = args
-            else:
-                ver = args[0] if args else kw.pop('ver', None)
-                clean = kw.pop('clean', clean)
-                if kw:
-                    inv()
-
-        else:
-            comps = list(args) + [kw.pop(cls._fields[k], None) for k in range(len(args), 5)]
-            if kw or any(comps[i] is None for i in range(3)):
-                inv()
-
-            typecheck = (int,) * 3 + (basestring,) * 2
-            for i, (v, t) in enumerate(zip(comps, typecheck)):
-                if v is None:
-                    continue
-                elif not isinstance(v, t):
-                    try:
-                        if i < 3:
-                            v = typecheck[i](v)
-                        else:  # The real `basestring` can not be instatiated (Py2)
-                            v = str(v)
-                    except ValueError as e:
-                        # Modify the exception message. I can't believe this actually works
-                        e.args = ("Parameter #%d must be of type %s or convertable"
-                                  % (i, t.__name__),)
-                        raise
-                    else:
-                        comps[i] = v
-                if t is basestring and not re.match(r"^[0-9A-Za-z.-]*$", v):
-                    raise ValueError("Build and pre-release strings must match '^[0-9A-Za-z.-]*$'")
-
-        # Final adjustments
-        if not comps:
-            if ver is None or clean is None:
-                inv()
-            ver = clean and cls.clean(ver) or ver
-            comps = cls._parse(ver)
-
-        # Create the obj
-        return super(SemVer, cls).__new__(cls, *comps)
-
-    # Magic methods
-    def __str__(self):
-        return ('.'.join(map(str, self[:3]))
-                + ('-' + self.prerelease if self.prerelease is not None else '')
-                + ('+' + self.build if self.build is not None else ''))
-
-    def __repr__(self):
-        # Use the shortest representation - what would you prefer?
-        return 'SemVer("%s")' % str(self)
-        # return 'SemVer(%s)' % ', '.join('%s=%r' % (k, getattr(self, k)) for k in self._fields)
-
-    def __len__(self):
-        return 3 + (self.build is not None and 2 or self.prerelease is not None)
-
-    # Magic rich comparing methods
-    def __gt__(self, other):
-        return self._compare(other) == 1 if isinstance(other, SemVer) else NotImplemented
-
-    def __eq__(self, other):
-        return self._compare(other) == 0 if isinstance(other, SemVer) else NotImplemented
-
-    def __lt__(self, other):
-        return not (self > other or self == other)
-
-    def __ge__(self, other):
-        return not (self < other)
-
-    def __le__(self, other):
-        return not (self > other)
-
-    def __ne__(self, other):
-        return not (self == other)
-
-    # Utility (class-)methods
-    def satisfies(self, sel):
-        """Alias for `bool(sel.matches(self))` or `bool(SemSel(sel).matches(self))`.
-
-        See `SemSel.__init__()` and `SemSel.matches(*vers)` for possible exceptions.
-
-        Returns:
-            * bool: `True` if the version matches the passed selector, `False` otherwise.
-        """
-        if not isinstance(sel, SemSel):
-            sel = SemSel(sel)  # just "re-raise" exceptions
-
-        return bool(sel.matches(self))
-
-    @classmethod
-    def valid(cls, ver):
-        """Check if `ver` is a valid semantic version. Classmethod.
-
-        Parameters:
-            * ver (str)
-                The string that should be stripped.
-
-        Raises:
-            * TypeError
-                Invalid parameter type.
-
-        Returns:
-            * bool: `True` if it is valid, `False` otherwise.
-        """
-        if not isinstance(ver, basestring):
-            raise TypeError("%r is not a string" % ver)
-
-        if cls._match_regex.match(ver):
-            return True
-        else:
-            return False
-
-    @classmethod
-    def clean(cls, vers):
-        """Remove everything before and after a valid version string. Classmethod.
-
-        Parameters:
-            * vers (str)
-                The string that should be stripped.
-
-        Raises:
-            * TypeError
-                Invalid parameter type.
-
-        Returns:
-            * str:  The stripped version string. Only the first version is matched.
-            * None: No version found in the string.
-        """
-        if not isinstance(vers, basestring):
-            raise TypeError("%r is not a string" % vers)
-        m = cls._search_regex.search(vers)
-        if m:
-            return vers[m.start():m.end()]
-        else:
-            return None
-
-    # Private (class-)methods
-    @classmethod
-    def _parse(cls, ver):
-        """Private. Do not touch. Classmethod.
-        """
-        if not isinstance(ver, basestring):
-            raise TypeError("%r is not a string" % ver)
-
-        match = cls._match_regex.match(ver)
-
-        if match is None:
-            raise ValueError("'%s' is not a valid SemVer string" % ver)
-
-        g = list(match.groups())
-        for i in range(3):
-            g[i] = int(g[i])
-
-        return g  # Will be passed as namedtuple(...)(*g)
-
-    def _compare(self, other):
-        """Private. Do not touch.
-        self > other: 1
-        self = other: 0
-        self < other: -1
-        """
-        # Shorthand lambdas
-        def cp_len(t, i=0):
-            return cmp(len(t[i]), len(t[not i]))
-
-        for i, (x1, x2) in enumerate(zip(self, other)):
-            if i > 2:
-                if x1 is None and x2 is None:
-                    continue
-
-                # self is greater when other has a prerelease but self doesn't
-                # self is less    when other has a build      but self doesn't
-                if x1 is None or x2 is None:
-                    return int(2 * (i - 3.5)) * (1 - 2 * (x1 is None))
-
-                # self is less when other's build is empty
-                if i == 4 and (not x1 or not x2) and x1 != x2:
-                    return 1 - 2 * bool(x1)
-
-                # Split by '.' and use numeric comp or lexicographical order
-                t2 = [x1.split('.'), x2.split('.')]
-                for y1, y2 in zip(*t2):
-                    if y1.isdigit() and y2.isdigit():
-                        y1 = int(y1)
-                        y2 = int(y2)
-                    if y1 > y2:
-                        return 1
-                    elif y1 < y2:
-                        return -1
-
-                # The "longer" sub-version is greater
-                d = cp_len(t2)
-                if d:
-                    return d
-            else:
-                if x1 > x2:
-                    return 1
-                elif x1 < x2:
-                    return -1
-
-        # The versions equal
-        return 0
-
-
-class SemComparator(object):
-
-    """Holds a SemVer object and a comparing operator and can match these against a given version.
-
-    Constructor: SemComparator('<=', SemVer("1.2.3"))
-
-    Methods:
-        * matches(ver)
-    """
-    # Private properties
-    _ops = {
-        '>=': '__ge__',
-        '<=': '__le__',
-        '>':  '__gt__',
-        '<':  '__lt__',
-        '=':  '__eq__',
-        '!=': '__ne__'
-    }
-    _ops_satisfy = ('~', '!')
-
-    # Constructor
-    def __init__(self, op, ver):
-        """Constructor examples:
-        SemComparator('<=', SemVer("1.2.3"))
-        SemComparator('!=', SemVer("2.3.4"))
-
-        Parameters:
-            * op (str, False, None)
-                One of [>=, <=, >, <, =, !=, !, ~] or evaluates to `False` which defaults to '~'.
-                '~' means a "satisfy" operation where pre-releases and builds are ignored.
-                '!' is a negative "~".
-            * ver (SemVer)
-                Holds the version to compare with.
-
-        Raises:
-            * ValueError
-                Invalid `op` parameter.
-            * TypeError
-                Invalid `ver` parameter.
-        """
-        super(SemComparator, self).__init__()
-
-        if op and op not in self._ops_satisfy and op not in self._ops:
-            raise ValueError("Invalid value for `op` parameter.")
-        if not isinstance(ver, SemVer):
-            raise TypeError("`ver` parameter is not instance of SemVer.")
-
-        # Default to '~' for versions with no build or pre-release
-        op = op or '~'
-        # Fallback to '=' and '!=' if len > 3
-        if len(ver) != 3:
-            if op == '~':
-                op = '='
-            if op == '!':
-                op = '!='
-
-        self.op = op
-        self.ver = ver
-
-    # Magic methods
-    def __str__(self):
-        return (self.op or "") + str(self.ver)
-
-    # Utility methods
-    def matches(self, ver):
-        """Match the internal version (constructor) against `ver`.
-
-        Parameters:
-            * ver (SemVer)
-
-        Raises:
-            * TypeError
-                Could not compare `ver` against the version passed in the constructor with the
-                passed operator.
-
-        Returns:
-            * bool
-                `True` if the version matched the specified operator and internal version, `False`
-                otherwise.
-        """
-        if self.op in self._ops_satisfy:
-            # Compare only the first three parts (which are tuples) and directly
-            return bool((self.ver[:3] == ver[:3]) + (self.op == '!') * -1)
-        ret = getattr(ver, self._ops[self.op])(self.ver)
-        if ret == NotImplemented:
-            raise TypeError("Unable to compare %r with operator '%s'" % (ver, self.op))
-        return ret
-
-
-class SemSelAndChunk(list):
-
-    """Extends list and defines a few methods used for matching versions.
-
-    New elements should be added by calling `.add_child(op, ver)` which creates a SemComparator
-    instance and adds that to itself.
-
-    Methods:
-        * matches(ver)
-        * add_child(op, ver)
-    """
-
-    # Magic methods
-
-    def __str__(self):
-        return ' '.join(map(str, self))
-
-    # Utitlity methods
-
-    def matches(self, ver):
-        """Match all of the added children against `ver`.
-
-        Parameters:
-            * ver (SemVer)
-
-        Raises:
-            * TypeError
-                Invalid `ver` parameter.
-
-        Returns:
-            * bool:
-                `True` if *all* of the SemComparator children match `ver`, `False` otherwise.
-        """
-        if not isinstance(ver, SemVer):
-            raise TypeError("`ver` parameter is not instance of SemVer.")
-        return all(cp.matches(ver) for cp in self)
-
-    def add_child(self, op, ver):
-        """Create a SemComparator instance with the given parameters and appends that to self.
-
-        Parameters:
-            * op (str)
-            * ver (SemVer)
-        Both parameters are forwarded to `SemComparator.__init__`, see there for a more detailed
-        description.
-
-        Raises:
-            Exceptions raised by `SemComparator.__init__`.
-        """
-        self.append(SemComparator(op, SemVer(ver)))
-
-
-class SemSelOrChunk(list):
-
-    """Extends list and defines a few methods used for matching versions.
-
-    New elements should be added by calling `.new_child()` which returns a SemSelAndChunk
-    instance.
-
-    Methods:
-        * matches(ver)
-        * new_child()
-    """
-
-    # Magic methods
-
-    def __str__(self):
-        return ' || '.join(map(str, self))
-
-    # Utility methods
-
-    def matches(self, ver):
-        """Match all of the added children against `ver`.
-
-        Parameters:
-            * ver (SemVer)
-
-        Raises:
-            * TypeError
-                Invalid `ver` parameter.
-
-        Returns:
-            * bool
-                `True` if *any* of the SemSelAndChunk children matches `ver`.
-                `False` otherwise.
-        """
-        if not isinstance(ver, SemVer):
-            raise TypeError("`ver` parameter is not instance of SemVer.")
-        return any(ch.matches(ver) for ch in self)
-
-    def new_child(self):
-        """Creates a new SemSelAndChunk instance, appends it to self and returns it.
-
-        Returns:
-            * SemSelAndChunk: An empty instance.
-        """
-        ch = SemSelAndChunk()
-        self.append(ch)
-        return ch
-
-
-class SelParseError(Exception):
-
-    """An Exception raised when parsing a semantic selector failed.
-    """
-    pass
-
-
-# Subclass `tuple` because this is a somewhat simple method to make this immutable
-class SemSel(tuple):
-
-    """A Semantic Version Selector, holds a selector and can match it against semantic versions.
-
-    Information on this particular class and their instances:
-        - Immutable but not hashable because the content within might have changed.
-        - Subclasses `tuple` but does not behave like one.
-        - Always `True` in boolean context.
-        - len() returns the number of containing *and chunks* (see below).
-        - Iterable, iterates over containing *and chunks*.
-
-    When talking about "versions" it refers to a semantic version (SemVer). For information on how
-    versions compare to one another, see SemVer's doc string.
-
-    List for **comparators**:
-        "1.0.0"            matches the version 1.0.0 and all its pre-release and build variants
-        "!1.0.0"           matches any version that is not 1.0.0 or any of its variants
-        "=1.0.0"           matches only the version 1.0.0
-        "!=1.0.0"          matches any version that is not 1.0.0
-        ">=1.0.0"          matches versions greater than or equal 1.0.0
-        "<1.0.0"           matches versions smaller than 1.0.0
-        "1.0.0 - 1.0.3"    matches versions greater than or equal 1.0.0 thru 1.0.3
-        "~1.0"             matches versions greater than or equal 1.0.0 thru 1.0.9999 (and more)
-        "~1", "1.x", "1.*" match versions greater than or equal 1.0.0 thru 1.9999.9999 (and more)
-        "~1.1.2"           matches versions greater than or equal 1.1.2 thru 1.1.9999 (and more)
-        "~1.1.2+any"       matches versions greater than or equal 1.1.2+any thru 1.1.9999 (and more)
-        "*", "~", "~x"     match any version
-
-    Multiple comparators can be combined by using ' ' spaces and every comparator must match to make
-    the **and chunk** match a version.
-    Multiple and chunks can be combined to **or chunks** using ' || ' and match if any of the and
-    chunks split by these matches.
-
-    A complete example would look like:
-        ~1 || 0.0.3 || <0.0.2 >0.0.1+b.1337 || 2.0.x || 2.1.0 - 2.1.0+b.12 !=2.1.0+b.9
-
-    Methods:
-        * matches(*vers)
-    """
-    # Private properties
-    _fuzzy_regex = re.compile(r'''(?x)^
-        (?P[<>]=?|~>?=?)?
-        (?:(?P\d+)
-         (?:\.(?P\d+)
-          (?:\.(?P\d+)
-           (?P[-+][a-zA-Z0-9-+.]*)?
-          )?
-         )?
-        )?$''')
-    _xrange_regex = re.compile(r'''(?x)^
-        (?P[<>]=?|~>?=?)?
-        (?:(?P\d+|[xX*])
-         (?:\.(?P\d+|[xX*])
-          (?:\.(?P\d+|[xX*]))?
-         )?
-        )
-        (?P.*)$''')
-    _split_op_regex = re.compile(r'^(?P=|[<>!]=?)?(?P.*)$')
-
-    # "Constructor"
-    def __new__(cls, sel):
-        """Constructor examples:
-            SemSel(">1.0.0")
-            SemSel("~1.2.9 !=1.2.12")
-
-        Parameters:
-            * sel (str)
-                A version selector string.
-
-        Raises:
-            * TypeError
-                `sel` parameter is not a string.
-            * ValueError
-                A version in the selector could not be matched as a SemVer.
-            * SemParseError
-                The version selector's syntax is unparsable; invalid ranges (fuzzy, xrange or
-                explicit range) or invalid '||'
-        """
-        chunk = cls._parse(sel)
-        return super(SemSel, cls).__new__(cls, (chunk,))
-
-    # Magic methods
-    def __str__(self):
-        return str(self._chunk)
-
-    def __repr__(self):
-        return 'SemSel("%s")' % self._chunk
-
-    def __len__(self):
-        # What would you expect?
-        return len(self._chunk)
-
-    def __iter__(self):
-        return iter(self._chunk)
-
-    # Read-only (private) attributes
-    @property
-    def _chunk(self):
-        return self[0]
-
-    # Utility methods
-    def matches(self, *vers):
-        """Match the selector against a selection of versions.
-
-        Parameters:
-            * *vers (str, SemVer)
-                Versions can be passed as strings and SemVer objects will be created with them.
-                May also be a mixed list.
-
-        Raises:
-            * TypeError
-                A version is not an instance of str (basestring) or SemVer.
-            * ValueError
-                A string version could not be parsed as a SemVer.
-
-        Returns:
-            * list
-                A list with all the versions that matched, may be empty. Use `max()` to determine
-                the highest matching version, or `min()` for the lowest.
-        """
-        ret = []
-        for v in vers:
-            if isinstance(v, str):
-                t = self._chunk.matches(SemVer(v))
-            elif isinstance(v, SemVer):
-                t = self._chunk.matches(v)
-            else:
-                raise TypeError("Invalid parameter type '%s': %s" % (v, type(v)))
-            if t:
-                ret.append(v)
-
-        return ret
-
-    # Private methods
-    @classmethod
-    def _parse(cls, sel):
-        """Private. Do not touch.
-
-        1. split by whitespace into tokens
-            a. start new and_chunk on ' || '
-            b. parse " - " ranges
-            c. replace "xX*" ranges with "~" equivalent
-            d. parse "~" ranges
-            e. parse unmatched token as comparator
-            ~. append to current and_chunk
-        2. return SemSelOrChunk
-
-        Raises TypeError, ValueError or SelParseError.
-        """
-        if not isinstance(sel, basestring):
-            raise TypeError("Selector must be a string")
-        if not sel:
-            raise ValueError("String must not be empty")
-
-        # Split selector by spaces and crawl the tokens
-        tokens = sel.split()
-        i = -1
-        or_chunk = SemSelOrChunk()
-        and_chunk = or_chunk.new_child()
-
-        while i + 1 < len(tokens):
-            i += 1
-            t = tokens[i]
-
-            # Replace x ranges with ~ selector
-            m = cls._xrange_regex.match(t)
-            m = m and m.groups('')
-            if m and any(not x.isdigit() for x in m[1:4]) and not m[0].startswith('>'):
-                # (do not match '>1.0' or '>*')
-                if m[4]:
-                    raise SelParseError("XRanges do not allow pre-release or build components")
-
-                # Only use digit parts and fail if digit found after non-digit
-                mm, xran = [], False
-                for x in m[1:4]:
-                    if x.isdigit():
-                        if xran:
-                            raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i])
-                        mm.append(x)
-                    else:
-                        xran = True
-                t = m[0] + '.'.join(mm)  # x for x in m[1:4] if x.isdigit())
-                # Append "~" if not already present
-                if not t.startswith('~'):
-                    t = '~' + t
-
-            # switch t:
-            if t == '||':
-                if i == 0 or tokens[i - 1] == '||' or i + 1 == len(tokens):
-                    raise SelParseError("OR range must not be empty")
-                # Start a new and_chunk
-                and_chunk = or_chunk.new_child()
-
-            elif t == '-':
-                # ' - ' range
-                i += 1
-                invalid = False
-                try:
-                    # If these result in exceptions, you know you're doing it wrong
-                    t = tokens[i]
-                    c = and_chunk[-1]
-                except (Exception):
-                    raise SelParseError("Invalid ' - ' range position")
-
-                # If there is an op in front of one of the bound versions
-                invalid = (c.op not in ('=', '~')
-                           or cls._split_op_regex.match(t).group(1) not in (None, '='))
-                if invalid:
-                    raise SelParseError("Invalid ' - ' range '%s - %s'"
-                                        % (tokens[i - 2], tokens[i]))
-
-                c.op = ">="
-                and_chunk.add_child('<=', t)
-
-            elif t == '':
-                # Multiple spaces
-                pass
-
-            elif t.startswith('~'):
-                m = cls._fuzzy_regex.match(t)
-                if not m:
-                    raise SelParseError("Invalid fuzzy range or XRange '%s'" % tokens[i])
-
-                mm, m = m.groups('')[1:4], m.groupdict('')  # mm: major to patch
-
-                # Minimum requirement
-                min_ver = ('.'.join(x or '0' for x in mm) + '-'
-                           if not m['other']
-                           else cls._split_op_regex(t[1:]).group('ver'))
-                and_chunk.add_child('>=', min_ver)
-
-                if m['major']:
-                    # Increase version before none (or second to last if '~1.2.3')
-                    e = [0, 0, 0]
-                    for j, d in enumerate(mm):
-                        if not d or j == len(mm) - 1:
-                            e[j - 1] = e[j - 1] + 1
-                            break
-                        e[j] = int(d)
-
-                    and_chunk.add_child('<', '.'.join(str(x) for x in e) + '-')
-
-                # else: just plain '~' or '*', or '~>X' which are already handled
-
-            else:
-                # A normal comparator
-                m = cls._split_op_regex.match(t).groupdict()  # this regex can't fail
-                and_chunk.add_child(**m)
-
-        # Finally return the or_chunk
-        return or_chunk
diff --git a/app/lib/package_control/sys_path.py b/app/lib/package_control/sys_path.py
index 16b96bc..d182aef 100644
--- a/app/lib/package_control/sys_path.py
+++ b/app/lib/package_control/sys_path.py
@@ -2,9 +2,16 @@
 
 import os
 
+__cache_path = os.path.join(os.path.expanduser('~'), '.package_control')
+
+
+def set_cache_dir(cache_path):
+    global __cache_path
+    __cache_path = cache_path
+
 
 def pc_cache_dir():
-    return os.path.join(os.path.expanduser('~'), '.package_control')
+    return __cache_path
 
 
 def user_config_dir():
diff --git a/app/lib/package_control/unicode.py b/app/lib/package_control/unicode.py
deleted file mode 100644
index 4e98c9c..0000000
--- a/app/lib/package_control/unicode.py
+++ /dev/null
@@ -1,148 +0,0 @@
-import locale
-import sys
-import tempfile
-import os
-
-if sys.platform == 'win32':
-    import ctypes
-
-try:
-    str_cls = unicode
-except (NameError):
-    str_cls = str
-
-# Sublime Text on OS X does not seem to report the correct encoding
-# so we hard-code that to UTF-8
-_encoding = 'utf-8' if sys.platform == 'darwin' else locale.getpreferredencoding()
-
-_fallback_encodings = ['utf-8', 'cp1252']
-
-
-def unicode_from_os(e):
-    """
-    This is needed as some exceptions coming from the OS are
-    already encoded and so just calling unicode(e) will result
-    in an UnicodeDecodeError as the string isn't in ascii form.
-
-    :param e:
-        The exception to get the value of
-
-    :return:
-        The unicode version of the exception message
-    """
-
-    if sys.version_info >= (3,):
-        return str(e)
-
-    try:
-        if isinstance(e, Exception):
-            e = e.args[0]
-
-        if isinstance(e, str_cls):
-            return e
-
-        if isinstance(e, int):
-            e = str(e)
-
-        return str_cls(e, _encoding)
-
-    # If the "correct" encoding did not work, try some defaults, and then just
-    # obliterate characters that we can't seen to decode properly
-    except UnicodeDecodeError:
-        for encoding in _fallback_encodings:
-            try:
-                return str_cls(e, encoding, errors='strict')
-            except (Exception):
-                pass
-    return str_cls(e, errors='replace')
-
-
-def tempfile_unicode_patch():
-    """
-    This function monkey-patches the tempfile module in ST2 on Windows to
-    properly handle non-ASCII paths from environmental variables being
-    used as the basis for a temp directory.
-    """
-
-    if sys.version_info >= (3,):
-        return
-
-    if sys.platform != 'win32':
-        return
-
-    if hasattr(tempfile._candidate_tempdir_list, 'patched'):
-        return
-
-    unicode_error = False
-    for var in ['TMPDIR', 'TEMP', 'TMP']:
-        dir_ = os.getenv(var)
-        if not dir_:
-            continue
-        # If the path contains a non-unicode chars that is also
-        # non-ASCII, then this will fail
-        try:
-            dir_ + u''
-        except (UnicodeDecodeError):
-            unicode_error = True
-            break
-        # Windows paths can not contain a ?, so this is evidence
-        # that a unicode deocding issue happened
-        if dir_.find('?') != -1:
-            unicode_error = True
-            break
-
-    if not unicode_error:
-        return
-
-    kernel32 = ctypes.windll.kernel32
-
-    kernel32.GetEnvironmentStringsW.argtypes = []
-    kernel32.GetEnvironmentStringsW.restype = ctypes.c_void_p
-
-    str_pointer = kernel32.GetEnvironmentStringsW()
-    string = ctypes.wstring_at(str_pointer)
-
-    env_vars = {}
-    while string != '':
-        if string[0].isalpha():
-            name, value = string.split(u'=', 1)
-            env_vars[name.encode('ascii')] = value
-        # Include the trailing null byte, and measure each
-        # char as 2 bytes since Windows uses UTF-16 for
-        # wide chars
-        str_pointer += (len(string) + 1) * 2
-
-        string = ctypes.wstring_at(str_pointer)
-
-    # This is pulled from tempfile.py in Python 2.6 and patched to grab the
-    # temp path environmental variables as unicode from the call to
-    # GetEnvironmentStringsW()
-    def _candidate_tempdir_list():
-        dirlist = []
-
-        # First, try the environment.
-        for envname in 'TMPDIR', 'TEMP', 'TMP':
-            dirname = env_vars.get(envname)
-            if dirname:
-                dirlist.append(dirname)
-
-        # Failing that, try OS-specific locations.
-        if os.name == 'riscos':
-            dirname = os.getenv('Wimp$ScrapDir')
-            if dirname:
-                dirlist.append(dirname)
-        elif os.name == 'nt':
-            dirlist.extend([r'c:\temp', r'c:\tmp', r'\temp', r'\tmp'])
-        else:
-            dirlist.extend(['/tmp', '/var/tmp', '/usr/tmp'])
-
-        # As a last resort, the current directory.
-        try:
-            dirlist.append(os.getcwd())
-        except (AttributeError, os.error):
-            dirlist.append(os.curdir)
-
-        return dirlist
-
-    tempfile._candidate_tempdir_list = _candidate_tempdir_list
-    setattr(tempfile._candidate_tempdir_list, 'patched', True)
diff --git a/app/lib/package_control/versions.py b/app/lib/package_control/versions.py
deleted file mode 100644
index f00a2ac..0000000
--- a/app/lib/package_control/versions.py
+++ /dev/null
@@ -1,157 +0,0 @@
-import re
-
-from .semver import SemVer
-from .console_write import console_write
-
-
-def semver_compat(v):
-    """
-    Converts a string version number into SemVer. If the version is based on
-    a date, converts to 0.0.1+yyyy.mm.dd.hh.mm.ss.
-
-    :param v:
-        A string, dict with 'version' key, or a SemVer object
-
-    :return:
-        A string that is a valid semantic version number
-    """
-
-    if isinstance(v, SemVer):
-        # SemVer only defined __str__, not __unicode__, so we always use str()
-        return str(v)
-
-    # Allowing passing in a dict containing info about a package
-    if isinstance(v, dict):
-        if 'version' not in v:
-            return '0'
-        v = v['version']
-
-    # Trim v off of the front
-    v = re.sub('^v', '', v)
-
-    # We prepend 0 to all date-based version numbers so that developers
-    # may switch to explicit versioning from GitHub/BitBucket
-    # versioning based on commit dates.
-    #
-    # When translating dates into semver, the way to get each date
-    # segment into the version is to treat the year and month as
-    # minor and patch, and then the rest as a numeric build version
-    # with four different parts. The result looks like:
-    # 0.2012.11+10.31.23.59
-    date_match = re.match(r'(\d{4})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})\.(\d{2})$', v)
-    if date_match:
-        v = '0.0.1+%s.%s.%s.%s.%s.%s' % date_match.groups()
-
-    # This handles version that were valid pre-semver with 4+ dotted
-    # groups, such as 1.6.9.0
-    four_plus_match = re.match(r'(\d+\.\d+\.\d+)[T\.](\d+(\.\d+)*)$', v)
-    if four_plus_match:
-        v = '%s+%s' % (four_plus_match.group(1), four_plus_match.group(2))
-
-    # Semver must have major, minor, patch
-    elif re.match(r'^\d+$', v):
-        v += '.0.0'
-    elif re.match(r'^\d+\.\d+$', v):
-        v += '.0'
-    return v
-
-
-def version_comparable(string):
-    return SemVer(semver_compat(string))
-
-
-def version_exclude_prerelease(versions):
-    """
-    Remove prerelease versions for a list of SemVer versions
-
-    :param versions:
-        The list of versions to filter
-
-    :return:
-        The list of versions with pre-releases removed
-    """
-
-    output = []
-    for version in versions:
-        if SemVer(semver_compat(version)).prerelease is not None:
-            continue
-        output.append(version)
-    return output
-
-
-def version_process(versions, filter_prefix):
-    """
-    Filter a list of versions to ones that are valid SemVers, if a prefix
-    is provided, only match versions starting with the prefix and split
-
-    :param versions:
-        The list of versions to filter
-
-    :param filter_prefix:
-        Remove this prefix from the version before checking if it is a valid
-        SemVer. If this prefix is not present, skip the version.
-
-    :return:
-        A list of dicts, each of which has the keys "version" and "prefix"
-    """
-
-    output = []
-    for version in versions:
-        prefix = ''
-
-        if filter_prefix:
-            if version[0:len(filter_prefix)] != filter_prefix:
-                continue
-            check_version = version[len(filter_prefix):]
-            prefix = filter_prefix
-
-        else:
-            check_version = re.sub('^v', '', version)
-            if check_version != version:
-                prefix = 'v'
-
-        if not SemVer.valid(check_version):
-            continue
-
-        output.append({'version': check_version, 'prefix': prefix})
-    return output
-
-
-def version_sort(sortable, *fields, **kwargs):
-    """
-    Sorts a list that is a list of versions, or dicts with a 'version' key.
-    Can also secondly sort by another field.
-
-    :param sortable:
-        The list to sort
-
-    :param *fields:
-        If sortable is a list of dicts, perform secondary sort via these fields,
-        in order
-
-    :param **kwargs:
-        Keyword args to pass on to sorted()
-
-    :return:
-        A copy of sortable that is sorted according to SemVer rules
-    """
-
-    def _version_sort_key(item):
-        result = SemVer(semver_compat(item))
-        if fields:
-            values = [result]
-            for field in fields:
-                values.append(item[field])
-            result = tuple(values)
-        return result
-
-    try:
-        return sorted(sortable, key=_version_sort_key, **kwargs)
-    except (ValueError) as e:
-        console_write(
-            u'''
-            Error sorting versions - %s
-            ''',
-            e
-        )
-        return []
diff --git a/app/lib/readme_images.py b/app/lib/readme_images.py
index a2ea0bc..a054a4c 100644
--- a/app/lib/readme_images.py
+++ b/app/lib/readme_images.py
@@ -3,7 +3,7 @@
 import hashlib
 from urllib.parse import urlparse
 
-from .package_control.download_manager import downloader
+from .package_control.download_manager import http_get
 from .package_control.downloaders.downloader_exception import DownloaderException
 
 
@@ -14,39 +14,38 @@
 def cache(settings, rendered_html):
     urls = re.findall(' 10 and data[6:10] in [b'JFIF', b'Exif']) or (length > 24 and data[0:4] == b'\xFF\xD8\xFF\xED' and data[20:24] == b'8BIM'):
-                        ext = '.jpg'
-                    elif data[0:128].find(b']+>'
-                rendered_html = re.sub(regex, '', rendered_html)
+            data = http_get(url, settings, 'fetching readme image')
+
+            # Detect file extension by file contents
+            if ext == '':
+                length = len(data)
+
+                if data[0:8] == b'\x89PNG\r\n\x1A\n':
+                    ext = '.png'
+                elif data[0:6] == b'GIF87a' or data[0:6] == b'GIF89a':
+                    ext = '.gif'
+                elif (length > 10 and data[6:10] in [b'JFIF', b'Exif']) or (length > 24 and data[0:4] == b'\xFF\xD8\xFF\xED' and data[20:24] == b'8BIM'):
+                    ext = '.jpg'
+                elif data[0:128].find(b']+>'
+            rendered_html = re.sub(regex, '', rendered_html)
 
     return rendered_html
 
diff --git a/app/lib/refresh_packages.py b/app/lib/refresh_packages.py
index cc0e656..6bf6101 100644
--- a/app/lib/refresh_packages.py
+++ b/app/lib/refresh_packages.py
@@ -4,23 +4,24 @@
 import traceback
 
 from .package_control.providers import REPOSITORY_PROVIDERS, CHANNEL_PROVIDERS
-from .package_control.download_manager import downloader, close_all_connections
+from .package_control.downloaders.rate_limit_exception import RateLimitException, RateLimitSkipException
+from .package_control.download_manager import close_all_connections
 from .package_control.clients.readme_client import ReadmeClient
 from .. import config
-from ..models import package, dependency
+from ..models import package, library
 from .readme_renderer import render
 from .readme_images import cache
 
 
-def refresh_packages(invalid_sources=None, invalid_dependency_sources=None):
+def refresh_packages(invalid_package_sources=None, invalid_library_sources=None):
     """
     Refresh the package information in the database
 
-    :param invalid_sources:
+    :param invalid_package_sources:
         A list of source URLs to ignore
 
-    :param invalid_dependency_sources:
-        A list of dependency source URLs to ignore
+    :param invalid_library_sources:
+        A list of library source URLs to ignore
 
     :return:
         A list of the names of all of the packages that were refreshed
@@ -83,43 +84,44 @@ def resolve_path(path):
 
     readme_client = ReadmeClient(settings)
 
-    if invalid_sources:
+    if invalid_package_sources:
         if search and replace:
             mapped_invalid_sources = []
-            for source in invalid_sources:
+            for source in invalid_package_sources:
                 if source not in ignore:
                     source = source.replace(replace, search)
                 mapped_invalid_sources.append(source)
-            invalid_sources = mapped_invalid_sources
+            invalid_package_sources = mapped_invalid_sources
 
-    if invalid_dependency_sources:
+    if invalid_library_sources:
         if search and replace:
-            mapped_invalid_dependency_sources = []
-            for source in invalid_dependency_sources:
+            mapped_invalid_library_sources = []
+            for source in invalid_library_sources:
                 if source not in ignore:
                     source = source.replace(replace, search)
-                mapped_invalid_dependency_sources.append(source)
-            invalid_dependency_sources = mapped_invalid_dependency_sources
+                mapped_invalid_library_sources.append(source)
+            invalid_library_sources = mapped_invalid_library_sources
 
-    if not invalid_dependency_sources:
-        invalid_dependency_sources = None
+    if not invalid_library_sources:
+        invalid_library_sources = None
 
+    repositories = []
     for provider_cls in CHANNEL_PROVIDERS:
-        if not provider_cls.match_url(channel):
-            continue
-        provider = provider_cls(channel, settings)
-        repositories = provider.get_repositories()
-        break
+        if provider_cls.match_url(channel):
+            repositories = provider_cls(channel, settings).get_repositories()
+            break
+
+    accepted_errors = (RateLimitException, RateLimitSkipException)
 
     affected_packages = []
-    affected_dependencies = []
+    affected_libraries = []
     for repository in repositories:
         for provider_cls in REPOSITORY_PROVIDERS:
             if not provider_cls.match_url(repository):
                 continue
 
             provider = provider_cls(repository, settings)
-            for name, info in provider.get_packages(invalid_sources):
+            for name, info in provider.get_packages(invalid_package_sources):
                 try:
                     if search and replace:
                         mapped_sources = []
@@ -155,7 +157,7 @@ def resolve_path(path):
                     traceback.print_exc(file=sys.stderr)
                     print('-' * 60, file=sys.stderr)
 
-            for name, info in provider.get_dependencies(invalid_dependency_sources):
+            for name, info in provider.get_libraries(invalid_library_sources):
                 try:
                     if search and replace:
                         mapped_sources = []
@@ -163,27 +165,33 @@ def resolve_path(path):
                             mapped_sources.append(source.replace(search, replace))
                         info['sources'] = mapped_sources
 
-                    dependency.mark_found(name)
-                    dependency.store(info)
-                    affected_dependencies.append(name)
+                    library.mark_found(name)
+                    library.store(info)
+                    affected_libraries.append(name)
 
                 except (Exception) as e:
-                    print('Exception processing dependency "%s":' % name, file=sys.stderr)
+                    print('Exception processing library "%s":' % name, file=sys.stderr)
                     print('-' * 60, file=sys.stderr)
                     traceback.print_exc(file=sys.stderr)
                     print('-' * 60, file=sys.stderr)
 
             for source, exception in provider.get_failed_sources():
+                if isinstance(exception, accepted_errors):
+                    continue
                 package.modify.mark_missing(source, clean_url(exception), needs_review(exception))
-                dependency.mark_missing(source, clean_url(exception), needs_review(exception))
+                library.mark_missing(source, clean_url(exception), needs_review(exception))
 
             for package_name, exception in provider.get_broken_packages():
+                if isinstance(exception, accepted_errors):
+                    continue
                 package.modify.mark_missing_by_name(package_name, clean_url(exception), needs_review(exception))
 
-            for dependency_name, exception in provider.get_broken_dependencies():
-                dependency.mark_missing_by_name(dependency_name, clean_url(exception), needs_review(exception))
+            for library_name, exception in provider.get_broken_libraries():
+                if isinstance(exception, accepted_errors):
+                    continue
+                library.mark_missing_by_name(library_name, clean_url(exception), needs_review(exception))
 
             break
 
     close_all_connections()
-    return (affected_packages, affected_dependencies)
+    return (affected_packages, affected_libraries)
diff --git a/app/lib/removed_packages.py b/app/lib/removed_packages.py
index 9574c87..9c72e59 100644
--- a/app/lib/removed_packages.py
+++ b/app/lib/removed_packages.py
@@ -3,8 +3,7 @@
 
 from .package_control.providers import REPOSITORY_PROVIDERS, CHANNEL_PROVIDERS
 from .. import config
-from .connection import connection
-from ..models import package, dependency
+from ..models import package, library
 
 
 
@@ -41,7 +40,7 @@ def mark():
             package.modify.mark_removed(info['name'])
             print('Package "%s" marked as removed' % info['name'])
 
-    for info in dependency.old():
+    for info in library.old():
         mark_removed = False
 
         for source in info['sources']:
@@ -53,8 +52,8 @@ def mark():
             mark_removed = True
 
         if mark_removed:
-            dependency.mark_removed(info['name'])
-            print('Dependency "%s" marked as removed' % info['name'])
+            library.mark_removed(info['name'])
+            print('library "%s" marked as removed' % info['name'])
 
 
 def find_active_sources():
diff --git a/app/lib/run_repo_tests.py b/app/lib/run_repo_tests.py
index b74f6a5..00d0360 100644
--- a/app/lib/run_repo_tests.py
+++ b/app/lib/run_repo_tests.py
@@ -11,8 +11,9 @@
 from urllib.error import URLError
 import imp
 
-from .package_control.providers import RepositoryProvider
-from .package_control.download_manager import downloader, close_all_connections
+from .package_control.providers import JsonRepositoryProvider
+from .package_control.providers.schema_version import SchemaVersion
+from .package_control.download_manager import close_all_connections, http_get
 from .package_control.downloaders.downloader_exception import DownloaderException
 from .. import config
 from .st_package_reviewer.check import file as file_checkers
@@ -137,9 +138,9 @@ def run_tests(spec):
         tmp_package_path = os.path.join(tmpdir, '%s.sublime-package' % name)
         tmp_package_dir = os.path.join(tmpdir, name)
         os.mkdir(tmp_package_dir)
-        with open(tmp_package_path, 'wb') as package_file, downloader(url, settings) as manager:
+        with open(tmp_package_path, 'wb') as package_file:
             try:
-                package_file.write(manager.fetch(url, 'fetching package'))
+                package_file.write(http_get(url, settings, 'fetching package'))
             except DownloaderException as e:
                 errors.append(format_report(str(e)))
                 return build_result(errors, warnings)
@@ -246,10 +247,9 @@ def clean_message(exception):
             error = re.sub(regex, '', error)
         return error.replace(' in the repository https://example.com', '')
 
-    provider = RepositoryProvider('https://example.com', settings)
-    provider.schema_version = '3.0.0'
-    provider.schema_major_version = 3
-    provider.repo_info = {'schema_version': '3.0.0', 'packages': [spec], 'dependencies': []}
+    provider = JsonRepositoryProvider('https://example.com', settings)
+    provider.schema_version = SchemaVersion('4.0.0')
+    provider.repo_info = {'schema_version': '4.0.0', 'packages': [spec], 'libraries': []}
 
     try:
         for name, info in provider.get_packages():
@@ -509,13 +509,12 @@ def test_pull_request(pr):
                     output.append('  - ERROR: External repositories added to the default channel must be served over HTTPS')
                     # Continue with testing regardless
 
-                with downloader(repo, settings) as manager:
-                    try:
-                        raw_data = manager.fetch(repo, 'fetching repository')
-                    except DownloaderException as e:
-                        errors = True
-                        output.append('  - ERROR: %s' % str(e))
-                        continue
+                try:
+                    raw_data = http_get(repo, settings, 'fetching repository')
+                except DownloaderException as e:
+                    errors = True
+                    output.append('  - ERROR: %s' % str(e))
+                    continue
 
                 try:
                     raw_data = raw_data.decode('utf-8')
@@ -541,9 +540,9 @@ def test_pull_request(pr):
                     errors = True
                     continue
 
-                if repo_json['schema_version'] != '3.0.0':
+                if repo_json['schema_version'] not in('3.0.0', '4.0.0'):
                     errors = True
-                    output.append('  - ERROR: "schema_version" must be "3.0.0"')
+                    output.append('  - ERROR: "schema_version" must be "3.0.0" or "4.0.0"')
                     continue
 
                 num_pkgs = 0
diff --git a/app/lib/st_package_reviewer/check/file/check_messages.py b/app/lib/st_package_reviewer/check/file/check_messages.py
index 45be76f..5ea9b31 100644
--- a/app/lib/st_package_reviewer/check/file/check_messages.py
+++ b/app/lib/st_package_reviewer/check/file/check_messages.py
@@ -1,10 +1,40 @@
 import json
 import re
 
-from ...lib.semver import SemVer
-
 from . import FileChecker
 
+_semver_regex = re.compile(
+    r"""
+    ^\s*
+    v?
+    (?P[0-9]+(?:\.[0-9]+){2})                    # semver release segment
+    (?P
                                              # pre-release
+        [-_.]?
+        (?Palpha|a|beta|b|prerelease|preview|pre|c|rc)
+        [-_.]?
+        (?P[0-9]+)?
+    )?
+    (?P                                             # post release
+        (?:-(?P[0-9]+))
+        |
+        (?:
+            [-_.]?
+            (?Ppatch|post|rev|r)
+            [-_.]?
+            (?P[0-9]+)?
+        )
+    )?
+    (?P                                              # dev release
+        [-_.]?
+        (?Pdevelopment|develop|devel|dev)
+        [-_.]?
+        (?P[0-9]+)?
+    )?
+    \s*$
+    """,
+    re.VERBOSE,
+)
+
 
 class CheckMessages(FileChecker):
 
@@ -42,7 +72,7 @@ def check(self):
             for key, rel_path in data.items():
                 if key == "install":
                     pass
-                elif SemVer.valid(re.sub(prefix_regex, '', key)):
+                elif _semver_regex.match(re.sub(prefix_regex, '', key)):
                     pass
                 else:
                     self.fail("Key {!r} is not 'install' or a valid semantic version"
diff --git a/app/lib/store_asset.py b/app/lib/store_asset.py
new file mode 100644
index 0000000..a97b4cd
--- /dev/null
+++ b/app/lib/store_asset.py
@@ -0,0 +1,50 @@
+import bz2
+import gzip
+import hashlib
+import os
+
+def store_asset(filename, content):
+    """
+    Stores an asset uncompressed and as gzip, bzip2 archive.
+
+    :param filename:
+        The filename
+    :param content:
+        The content
+    """
+    new_filename     = filename + '-new'
+    new_filename_gz  = filename + '.gz-new'
+    new_filename_bz2 = filename + '.bz2-new'
+    filename_gz      = filename + '.gz'
+    filename_bz2     = filename + '.bz2'
+    filename_sha512  = filename + '.sha512'
+
+    encoded_content = content.encode('utf-8')
+    content_hash = hashlib.sha512(encoded_content).hexdigest().encode('utf-8')
+
+    # Abort, if content hasn't changed so http server continues to return 304
+    # if clients already have a locally cached copy.
+    try:
+        with open(filename_sha512, 'rb') as f:
+            if f.read().strip() == content_hash:
+                return
+    except FileNotFoundError:
+        pass
+
+    with open(new_filename, 'wb') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename, filename)
+
+    with gzip.open(new_filename_gz, 'w') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename_gz, filename_gz)
+
+    with bz2.open(new_filename_bz2, 'w') as f:
+        f.write(encoded_content)
+
+    os.rename(new_filename_bz2, filename_bz2)
+
+    with open(filename_sha512, 'wb') as f:
+        f.write(content_hash)
diff --git a/app/models/dependency.py b/app/models/library.py
similarity index 75%
rename from app/models/dependency.py
rename to app/models/library.py
index ebe78e6..180bcfb 100644
--- a/app/models/dependency.py
+++ b/app/models/library.py
@@ -4,16 +4,15 @@
 from ..lib.connection import connection
 
 
-def all(limit_one_per_dependency=False):
+def all(limit_one_per_library=False):
     """
-    Fetches info about all dependencies for the purpose of writing JSON files
+    Fetches info about all libraries for the purpose of writing JSON files
 
     :return:
         A dict in the form:
         {
-            'Dependency Name': {
+            'Library Name': {
                 'name': 'Package Name',
-                'load_order': '01',
                 'authors': ['author', 'names'],
                 'description': 'Package description',
                 'issues': 'http://example.com/issues',
@@ -22,7 +21,8 @@ def all(limit_one_per_dependency=False):
                         'version': '1.0.0',
                         'url': 'https://example.com/download',
                         'sublime_text': '*',
-                        'platforms': ['*']
+                        'platforms': ['*'],
+                        'python_versions': ['3.3', '3.8']
                     }
                 ]
             }
@@ -35,12 +35,11 @@ def all(limit_one_per_dependency=False):
             SELECT
                 sources[1] AS repository,
                 name,
-                load_order,
                 authors,
                 description,
                 issues
             FROM
-                dependencies
+                libraries
             WHERE
                 is_missing != TRUE AND
                 removed != TRUE AND
@@ -53,7 +52,6 @@ def all(limit_one_per_dependency=False):
             output[row['name']] = {
                 'repository':     row['repository'],
                 'name':           row['name'],
-                'load_order':     row['load_order'],
                 'authors':        row['authors'],
                 'description':    row['description'],
                 'issues':         row['issues'],
@@ -62,8 +60,9 @@ def all(limit_one_per_dependency=False):
 
         cursor.execute("""
             SELECT
-                dr.dependency,
+                dr.library,
                 dr.platforms,
+                dr.python_versions,
                 dr.sublime_text,
                 dr.version,
                 dr.url,
@@ -76,8 +75,8 @@ def all(limit_one_per_dependency=False):
                     ELSE 0
                 END AS semver_variant
             FROM
-                dependency_releases AS dr INNER JOIN
-                dependencies AS d ON dr.dependency = d.name
+                library_releases AS dr INNER JOIN
+                libraries AS d ON dr.library = d.name
             WHERE
                 d.is_missing != TRUE AND
                 d.removed != TRUE AND
@@ -103,45 +102,46 @@ def all(limit_one_per_dependency=False):
                 END DESC
         """)
 
-        dependencies_found = {}
+        libraries_found = {}
 
         for row in cursor.fetchall():
-            dependency = row['dependency']
-            # Skip pre-releases for dependencies
+            library = row['library']
+            # Skip pre-releases for libraries
             if row['semver_variant'] == -1:
                 continue
 
-            key = '%s-%s-%s' % (dependency, row['sublime_text'], ','.join(row['platforms']))
-            if limit_one_per_dependency:
-                if key in dependencies_found:
+            key = '%s-%s-%s' % (library, row['sublime_text'], ','.join(row['platforms']))
+            if limit_one_per_library:
+                if key in libraries_found:
                     continue
 
             release = {
-                'platforms':    row['platforms'],
-                'sublime_text': row['sublime_text'],
-                'version':      row['version'],
-                'url':          row['url']
+                'platforms':       row['platforms'],
+                'python_versions': row['python_versions'],
+                'sublime_text':    row['sublime_text'],
+                'version':         row['version'],
+                'url':             row['url']
             }
             if row['sha256']:
                 release['sha256'] = row['sha256']
 
-            output[dependency]['releases'].append(release)
+            output[library]['releases'].append(release)
 
-            if limit_one_per_dependency:
-                dependencies_found[key] = True
+            if limit_one_per_library:
+                libraries_found[key] = True
 
     return output
 
 
 def dependent_sources(source):
     """
-    Fetches a list of sources needed to fully refresh all dependencies from the specified source
+    Fetches a list of sources needed to fully refresh all libraries from the specified source
 
     :param source:
-        The string source (URL) to find the dependencies of
+        The string source (URL) to find the libraries of
 
     :return:
-        A list of sources (URLs) for dependencies to be refreshed
+        A list of sources (URLs) for libraries to be refreshed
     """
 
     with connection() as cursor:
@@ -149,7 +149,7 @@ def dependent_sources(source):
             SELECT
                 DISTINCT unnest(sources) AS source
             FROM
-                dependencies
+                libraries
             WHERE
                 sources @> ARRAY[%s]::varchar[]
         """, [source])
@@ -158,13 +158,13 @@ def dependent_sources(source):
 
 def outdated_sources(minutes, limit):
     """
-    Fetches a list of outdated dependency sources in the DB
+    Fetches a list of outdated library sources in the DB
 
     :param minutes:
         The int number of minutes to be considered "outdated"
 
     :return:
-        A list of sources (URLs) for dependencies that need to be refreshed
+        A list of sources (URLs) for libraries that need to be refreshed
     """
 
     outdated_date = datetime.utcnow() - timedelta(minutes=minutes)
@@ -178,7 +178,7 @@ def outdated_sources(minutes, limit):
                     SELECT
                         sources
                     FROM
-                        dependencies
+                        libraries
                     WHERE
                         last_seen <= %s
                     ORDER BY
@@ -198,7 +198,7 @@ def invalid_sources(valid_sources):
         The list of sources that are valid
 
     :return:
-        A list of sources (URLs) for dependencies that should be ignored
+        A list of sources (URLs) for libraries that should be ignored
     """
 
     with connection() as cursor:
@@ -206,7 +206,7 @@ def invalid_sources(valid_sources):
             SELECT
                 DISTINCT unnest(sources) AS source
             FROM
-                dependencies
+                libraries
         """)
         all_sources = [row['source'] for row in cursor]
 
@@ -215,7 +215,7 @@ def invalid_sources(valid_sources):
 
 def old():
     """
-    Finds all dependencies that haven't been seen in at least two hours
+    Finds all libraries that haven't been seen in at least two hours
 
     :return:
         A list of dict objects containing the keys:
@@ -231,7 +231,7 @@ def old():
                 sources,
                 is_missing
             FROM
-                dependencies
+                libraries
             WHERE
                 last_seen < CURRENT_TIMESTAMP - INTERVAL '2 hours' AND
                 removed != TRUE AND
@@ -241,30 +241,30 @@ def old():
         return cursor.fetchall()
 
 
-def mark_found(dependencies):
+def mark_found(libraries):
     """
-    Marks a dependencies as no longer missing
+    Marks a libraries as no longer missing
 
-    :param dependencies:
-        The name of the dependencies
+    :param libraries:
+        The name of the libraries
     """
 
     with connection() as cursor:
         cursor.execute("""
             UPDATE
-                dependencies
+                libraries
             SET
                 is_missing = FALSE,
                 missing_error = '',
                 removed = FALSE
             WHERE
                 name = %s
-        """, [dependencies])
+        """, [libraries])
 
 
 def mark_missing(source, error, needs_review):
     """
-    Marks all dependencies from a source as currently missing
+    Marks all libraries from a source as currently missing
 
     :param source:
         The URL of the source that could not be contacted
@@ -273,13 +273,13 @@ def mark_missing(source, error, needs_review):
         A unicode string of the error
 
     :param needs_review:
-        A bool if the dependency needs to be reviewed
+        A bool if the library needs to be reviewed
     """
 
     with connection() as cursor:
         cursor.execute("""
             UPDATE
-                dependencies
+                libraries
             SET
                 is_missing = TRUE,
                 missing_error = %s,
@@ -289,45 +289,45 @@ def mark_missing(source, error, needs_review):
         """, [error, needs_review, source])
 
 
-def mark_missing_by_name(dependency, error, needs_review):
+def mark_missing_by_name(library, error, needs_review):
     """
-    Marks a dependency as missing
+    Marks a library as missing
 
-    :param dependency:
-        The name of the dependency
+    :param library:
+        The name of the library
 
     :param error:
         A unicode string of the error
 
     :param needs_review:
-        A bool if the dependency needs to be reviewed
+        A bool if the library needs to be reviewed
     """
 
     with connection() as cursor:
         cursor.execute("""
             UPDATE
-                dependencies
+                libraries
             SET
                 is_missing = TRUE,
                 missing_error = %s,
                 needs_review = %s
             WHERE
                 name = %s
-        """, [error, needs_review, dependency])
+        """, [error, needs_review, library])
 
 
-def mark_removed(dependency):
+def mark_removed(library):
     """
-    Marks a dependency as removed
+    Marks a library as removed
 
-    :param dependency:
-        The name of the dependency
+    :param library:
+        The name of the library
     """
 
     with connection() as cursor:
         cursor.execute("""
             UPDATE
-                dependencies
+                libraries
             SET
                 removed = TRUE,
                 is_missing = FALSE,
@@ -335,17 +335,16 @@ def mark_removed(dependency):
                 needs_review = TRUE
             WHERE
                 name = %s
-        """, [dependency])
+        """, [library])
 
 
 def store(values):
     """
-    Stores dependency info in the database
+    Stores library info in the database
 
     :param values:
         A dict containing the following keys:
           `name`
-          `load_order`
           `author`
           `description`
           `issues`
@@ -356,12 +355,11 @@ def store(values):
     name = values['name']
 
     with connection() as cursor:
-        cursor.execute("SELECT name FROM dependencies WHERE name = %s", [name])
+        cursor.execute("SELECT name FROM libraries WHERE name = %s", [name])
 
         if cursor.fetchone() == None:
             sql = """
-                INSERT INTO dependencies (
-                    load_order,
+                INSERT INTO libraries (
                     authors,
                     description,
                     issues,
@@ -372,7 +370,6 @@ def store(values):
                     %s,
                     %s,
                     %s,
-                    %s,
                     CURRENT_TIMESTAMP,
                     %s,
                     %s
@@ -381,9 +378,8 @@ def store(values):
         else:
             sql = """
                 UPDATE
-                    dependencies
+                    libraries
                 SET
-                    load_order = %s,
                     authors = %s,
                     description = %s,
                     issues = %s,
@@ -394,12 +390,11 @@ def store(values):
             """
 
         if not isinstance(values['author'], list):
-            authors = re.split('\s*,\s*', values['author'])
+            authors = re.split(r'\s*,\s*', values['author'])
         else:
             authors = values['author']
 
         cursor.execute(sql, [
-            values['load_order'],
             authors,
             values['description'],
             values['issues'],
@@ -407,13 +402,14 @@ def store(values):
             name
         ])
 
-        cursor.execute("DELETE FROM dependency_releases WHERE dependency = %s", [name])
+        cursor.execute("DELETE FROM library_releases WHERE library = %s", [name])
 
         for release in values['releases']:
             sql = """
-                INSERT INTO dependency_releases (
-                    dependency,
+                INSERT INTO library_releases (
+                    library,
                     platforms,
+                    python_versions,
                     sublime_text,
                     version,
                     url,
@@ -424,6 +420,7 @@ def store(values):
                     %s,
                     %s,
                     %s,
+                    %s,
                     %s
                 )
             """
@@ -445,6 +442,7 @@ def store(values):
             cursor.execute(sql, [
                 name,
                 release['platforms'],
+                release['python_versions'],
                 sublime_text,
                 release['version'],
                 release['url'],
diff --git a/app/models/package/__init__.py b/app/models/package/__init__.py
index dfd50a3..bf262b4 100644
--- a/app/models/package/__init__.py
+++ b/app/models/package/__init__.py
@@ -4,4 +4,4 @@
 from . import stats
 from . import usage
 
-__all__ = [find, modify, sources, stats, usage]
+__all__ = ["find", "modify", "sources", "stats", "usage"]
diff --git a/app/models/package/find.py b/app/models/package/find.py
index ef24558..bc21fb4 100644
--- a/app/models/package/find.py
+++ b/app/models/package/find.py
@@ -41,11 +41,12 @@ def all(limit_one_per_package=False, only_package_control=False):
                 'releases': [
                     {
                         'platforms': ['*'],
+                        'python_versions': ['3.3', '3.8'],
                         'sublime_text': '*',
                         'version': '1.0.0',
                         'url': 'http://example.com/package.zip',
                         'date': '2015-01-01 10:15:00',
-                        'dependencies': []
+                        'libraries': []
                     },
                     ...
                 ]
@@ -107,11 +108,12 @@ def all(limit_one_per_package=False, only_package_control=False):
             SELECT
                 r.package,
                 r.platforms,
+                r.python_versions,
                 r.sublime_text,
                 r.version,
                 r.url,
                 r.date,
-                r.dependencies,
+                r.libraries,
                 CASE
                     WHEN r.version ~ E'^\\\\d+\\\\.\\\\d+\\\\.\\\\d+-'
                         then -1
@@ -199,8 +201,11 @@ def all(limit_one_per_package=False, only_package_control=False):
                 'date':         row['date']
             }
 
-            if row['dependencies']:
-                release['dependencies'] = row['dependencies']
+            if row['python_versions']:
+                release['python_versions'] = row['python_versions']
+
+            if row['libraries']:
+                release['libraries'] = row['libraries']
 
             output[package]['releases'].append(release)
 
@@ -215,7 +220,8 @@ def all(limit_one_per_package=False, only_package_control=False):
                     package_minor_versions[minor_key] = 0
                 package_minor_versions[minor_key] += 1
 
-    return output
+    # return repos with at least one release
+    return {repo: info for repo, info in output.items() if info['releases']}
 
 
 def old():
diff --git a/app/models/package/modify.py b/app/models/package/modify.py
index 6f6fc44..cdd7c19 100644
--- a/app/models/package/modify.py
+++ b/app/models/package/modify.py
@@ -533,7 +533,7 @@ def store(values):
             else:
                 st_versions.extend([2, 3, 4])
 
-        st_versions = sorted(list(set(st_versions)))
+        st_versions = sorted(set(st_versions))
 
         if not isinstance(values['author'], list):
             authors = re.split(r'\s*,\s*', values['author'])
@@ -564,11 +564,12 @@ def store(values):
                 INSERT INTO releases (
                     package,
                     platforms,
+                    python_versions,
                     sublime_text,
                     version,
                     url,
                     date,
-                    dependencies
+                    libraries
                 ) VALUES (
                     %s,
                     %s,
@@ -576,18 +577,20 @@ def store(values):
                     %s,
                     %s,
                     %s,
+                    %s,
                     %s
                 )
             """
 
             cursor.execute(sql, [
                 name,
-                release['platforms'],
-                _normalize_st_version(release['sublime_text']),
+                release.get('platforms', ['*']),
+                release.get('python_versions', []),
+                _normalize_st_version(release.get('sublime_text', '*')),
                 release['version'],
                 release['url'],
                 release['date'],
-                release.get('dependencies', [])
+                release.get('libraries', [])
             ])
 
 
diff --git a/app/models/package/sources.py b/app/models/package/sources.py
index 1adc5ec..510032f 100644
--- a/app/models/package/sources.py
+++ b/app/models/package/sources.py
@@ -8,7 +8,7 @@ def dependent_sources(source):
     Fetches a list of sources needed to fully refresh all packages from the specified source
 
     :param source:
-        The string source (URL) to find the dependencies of
+        The string source (URL) to find the libraries of
 
     :return:
         A list of sources (URLs) for packages to be refreshed
diff --git a/app/tasks/crawl.py b/app/tasks/crawl.py
index 0029668..10665c6 100644
--- a/app/tasks/crawl.py
+++ b/app/tasks/crawl.py
@@ -3,7 +3,7 @@
 import sys
 
 from ..models import package
-from ..models import dependency
+from ..models import library
 from ..lib.refresh_packages import refresh_packages
 
 
@@ -31,18 +31,18 @@
 
 
 if explicit_package:
-    valid_sources = package.sources.sources_for(explicit_package)
-    valid_dependency_sources = []
+    valid_package_sources = package.sources.sources_for(explicit_package)
+    valid_library_sources = []
 else:
-    valid_sources = package.sources.outdated_sources(60, 200)
-    valid_dependency_sources = dependency.outdated_sources(60, 200)
+    valid_package_sources = package.sources.outdated_sources(60, 200)
+    valid_library_sources = library.outdated_sources(60, 200)
 
-invalid_sources = package.sources.invalid_sources(valid_sources)
-invalid_dependency_sources = dependency.invalid_sources(valid_dependency_sources)
+invalid_package_sources = package.sources.invalid_sources(valid_package_sources)
+invalid_library_sources = library.invalid_sources(valid_library_sources)
 
-affected_packages, affected_dependencies = refresh_packages(invalid_sources, invalid_dependency_sources)
+affected_packages, affected_libraries = refresh_packages(invalid_package_sources, invalid_library_sources)
 
 print('AFFECTED PACKAGES')
 print(affected_packages)
-print('\nAFFECTED DEPENDENCIES')
-print(affected_dependencies)
+print('\nAFFECTED LIBRARIES')
+print(affected_libraries)
diff --git a/app/tasks/generate_channel_v3_json.py b/app/tasks/generate_channel_v3_json.py
index 452c4fd..25d110b 100644
--- a/app/tasks/generate_channel_v3_json.py
+++ b/app/tasks/generate_channel_v3_json.py
@@ -1,12 +1,9 @@
-import re
 import json
 import os
-import gzip
-import bz2
-from urllib.parse import urlparse
 
-from ..models import package, dependency
 from ..lib.json_datetime_encoder import JsonDatetimeEncoder
+from ..lib.store_asset import store_asset
+from ..models import package
 
 
 if 'PACKAGE_CONTROL_ASSETS' not in os.environ:
@@ -26,7 +23,7 @@
     'dependencies_cache': {}
 }
 
-package_info = package.find.all()
+package_info = package.find.all(limit_one_per_package=True, only_package_control=True)
 for name, info in package_info.items():
     repo = info['repository']
     del info['repository']
@@ -40,41 +37,14 @@
 
     output['packages_cache'][repo].append(info)
 
-dependency_info = dependency.all(limit_one_per_dependency=True)
-for name, info in dependency_info.items():
-    repo = info['repository']
-    del info['repository']
-
-    if repo not in output['repositories']:
-        output['repositories'].append(repo)
-
-    if repo not in output['dependencies_cache']:
-        output['dependencies_cache'][repo] = []
-
-    output['dependencies_cache'][repo].append(info)
-
 output['repositories'] = sorted(output['repositories'])
 
-new_json_path     = os.path.join(assets_dir, 'channel_v3.json-new')
-new_json_gz_path  = os.path.join(assets_dir, 'channel_v3.json.gz-new')
-new_json_bz2_path = os.path.join(assets_dir, 'channel_v3.json.bz2-new')
-json_path         = os.path.join(assets_dir, 'channel_v3.json')
-json_gz_path      = os.path.join(assets_dir, 'channel_v3.json.gz')
-json_bz2_path     = os.path.join(assets_dir, 'channel_v3.json.bz2')
-
-encoded_json = json.dumps(output, cls=JsonDatetimeEncoder).encode('utf-8')
-
-with open(new_json_path, 'wb') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_path, json_path)
-
-with gzip.open(new_json_gz_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_gz_path, json_gz_path)
-
-with bz2.open(new_json_bz2_path, 'w') as f:
-    f.write(encoded_json)
-
-os.rename(new_json_bz2_path, json_bz2_path)
+store_asset(
+    os.path.join(assets_dir, 'channel_v4.json'),
+    json.dumps(
+        output,
+        cls=JsonDatetimeEncoder,
+        check_circular=False,
+        sort_keys=True
+    )
+)
diff --git a/app/tasks/generate_channel_v4_json.py b/app/tasks/generate_channel_v4_json.py
new file mode 100644
index 0000000..09c4eac
--- /dev/null
+++ b/app/tasks/generate_channel_v4_json.py
@@ -0,0 +1,63 @@
+import json
+import os
+
+from ..lib.json_datetime_encoder import JsonDatetimeEncoder
+from ..lib.store_asset import store_asset
+from ..models import package, library
+
+
+if 'PACKAGE_CONTROL_ASSETS' not in os.environ:
+    dirname = os.path.dirname(os.path.abspath(__file__))
+    assets_dir = os.path.realpath(os.path.join(dirname, '..', '..', 'assets'))
+else:
+    assets_dir = os.environ['PACKAGE_CONTROL_ASSETS']
+
+if not os.path.exists(assets_dir):
+    raise Exception('The assets folder, "%s", does not exists. It can be ' +\
+        'overridden by the environmental variable PACKAGE_CONTROL_ASSETS.')
+
+output = {
+    'schema_version': '4.0.0',
+    'repositories': [],
+    'packages_cache': {},
+    'libraries_cache': {}
+}
+
+package_info = package.find.all(limit_one_per_package=True)
+for name, info in package_info.items():
+    repo = info['repository']
+    del info['repository']
+    del info['last_modified']
+
+    if repo not in output['repositories']:
+        output['repositories'].append(repo)
+
+    if repo not in output['packages_cache']:
+        output['packages_cache'][repo] = []
+
+    output['packages_cache'][repo].append(info)
+
+library_info = library.all(limit_one_per_library=True)
+for name, info in library_info.items():
+    repo = info['repository']
+    del info['repository']
+
+    if repo not in output['repositories']:
+        output['repositories'].append(repo)
+
+    if repo not in output['libraries_cache']:
+        output['libraries_cache'][repo] = []
+
+    output['libraries_cache'][repo].append(info)
+
+output['repositories'] = sorted(output['repositories'])
+
+store_asset(
+    os.path.join(assets_dir, 'channel_v4.json'),
+    json.dumps(
+        output,
+        cls=JsonDatetimeEncoder,
+        check_circular=False,
+        sort_keys=True
+    )
+)
diff --git a/app/tasks/update_package_control_lib.py b/app/tasks/update_package_control_lib.py
index ff35b24..421e006 100644
--- a/app/tasks/update_package_control_lib.py
+++ b/app/tasks/update_package_control_lib.py
@@ -1,5 +1,4 @@
 import os
-import re
 import shutil
 import sys
 
@@ -18,23 +17,20 @@
 
 
 REQUIRED_PATHS = [
+    'clients',
+    'deps',
+    'downloaders',
+    'http',
+    'providers',
     '__init__.py',
     'ca_certs.py',
     'cache.py',
-    'clients',
     'cmd.py',
     'console_write.py',
-    'deps',
     'download_manager.py',
-    'downloaders',
-    'file_not_found_error.py',
-    'http',
-    'open_compat.py',
-    'providers',
-    'semver.py',
+    'http_cache.py',
+    'package_versions.py',
     'text.py',
-    'unicode.py',
-    'versions.py',
 ]
 
 PRESERVE_PATHS = [
diff --git a/config/crawler.yml b/config/crawler.yml
index 285d13d..2674df0 100644
--- a/config/crawler.yml
+++ b/config/crawler.yml
@@ -5,6 +5,7 @@ user_agent: Package Control Default Channel Server
 http_cache: true
 http_cache_length: 5184000
 install_prereleases: true
+max_releases: 1
 downloader_precedence: 
     windows: [wininet, oscrypto]
     osx: [oscrypto, urllib, curl]
diff --git a/setup/scripts/extract_package_control.py b/setup/scripts/extract_package_control.py
index 137ebfe..d0cf9bd 100644
--- a/setup/scripts/extract_package_control.py
+++ b/setup/scripts/extract_package_control.py
@@ -94,8 +94,8 @@
     'providers/channel_provider.py',
     'providers/github_repository_provider.py',
     'providers/github_user_provider.py',
+    'providers/json_repository_provider.py',
     'providers/provider_exception.py',
-    'providers/repository_provider.py',
     'providers/schema_compat.py',
 
     '__init__.py',
@@ -106,7 +106,6 @@
     'download_manager.py',
     'file_not_found_error.py',
     # CUSTOM 'http_cache.py',
-    'open_compat.py',
     '../readme.md',
     'semver.py',
     # CUSTOM 'show_error.py',
diff --git a/setup/sql/down.sql b/setup/sql/down.sql
index 486fa0a..e2c7690 100644
--- a/setup/sql/down.sql
+++ b/setup/sql/down.sql
@@ -17,7 +17,8 @@ DROP TABLE system_stats;
 DROP TABLE package_stats;
 DROP TABLE readmes;
 DROP TABLE releases;
-DROP TABLE dependencies;
+DROP TABLE library_releases;
+DROP TABLE libraries;
 DROP TABLE packages;
 DROP TABLE usage;
 DROP TABLE unique_package_installs;
diff --git a/setup/sql/migrations/libraries.sql b/setup/sql/migrations/libraries.sql
new file mode 100644
index 0000000..c9c49a8
--- /dev/null
+++ b/setup/sql/migrations/libraries.sql
@@ -0,0 +1,12 @@
+ALTER TABLE releases ADD COLUMN python_versions varchar[];
+ALTER TABLE releases RENAME dependencies TO libraries;
+
+ALTER TABLE dependencies RENAME TO libraries;
+ALTER TABLE libraries DROP COLUMN load_order;
+
+ALTER TABLE dependency_releases RENAME TO library_releases;
+ALTER TABLE library_releases RENAME dependency TO library;
+
+ALTER TABLE library_releases ADD COLUMN python_versions varchar[];
+UPDATE library_releases SET python_versions = '{3.3}';
+ALTER TABLE library_releases ALTER python_versions SET NOT NULL;
diff --git a/setup/sql/up.sql b/setup/sql/up.sql
index 4019c00..610a7d2 100644
--- a/setup/sql/up.sql
+++ b/setup/sql/up.sql
@@ -84,9 +84,8 @@ CREATE TABLE packages (
 );
 
 
-CREATE TABLE dependencies (
+CREATE TABLE libraries (
     name                     varchar(500)  NOT NULL PRIMARY KEY,
-    load_order               varchar(2)    NOT NULL,
     description              varchar       NOT NULL DEFAULT '',
     authors                  varchar[],
     issues                   varchar       NOT NULL DEFAULT '',
@@ -99,14 +98,15 @@ CREATE TABLE dependencies (
 );
 
 
-CREATE TABLE dependency_releases (
-    dependency               varchar(500)  NOT NULL REFERENCES dependencies(name) ON DELETE CASCADE ON UPDATE CASCADE,
+CREATE TABLE library_releases (
+    library                  varchar(500)  NOT NULL REFERENCES libraries(name) ON DELETE CASCADE ON UPDATE CASCADE,
     platforms                varchar[]     NOT NULL,
+    python_versions          varchar[]     NOT NULL,
     sublime_text             varchar       NOT NULL,
     version                  varchar       NOT NULL,
     url                      varchar       NOT NULL,
     sha256                   varchar,
-    PRIMARY KEY(dependency, platforms, sublime_text, version)
+    PRIMARY KEY(library, platforms, sublime_text, version)
 );
 
 
@@ -114,11 +114,12 @@ CREATE TABLE dependency_releases (
 CREATE TABLE releases (
     package                  varchar(500)  NOT NULL REFERENCES packages(name) ON DELETE CASCADE ON UPDATE CASCADE,
     platforms                varchar[]     NOT NULL,
+    python_versions          varchar[]     NOT NULL,
     sublime_text             varchar       NOT NULL,
     version                  varchar       NOT NULL,
     url                      varchar       NOT NULL,
     date                     timestamp     NOT NULL,
-    dependencies             varchar[],
+    libraries                varchar[],
     PRIMARY KEY(package, platforms, sublime_text, version)
 );
 
@@ -272,7 +273,7 @@ $$ LANGUAGE plpgsql;
 CREATE FUNCTION array_unique(arr anyarray) RETURNS anyarray LANGUAGE sql AS $$
     SELECT array_agg(DISTINCT a)
     FROM (
-        SELECT unnest(arr) a 
+        SELECT unnest(arr) a
         ORDER BY a
     ) sq
 $$;
diff --git a/tasks.md b/tasks.md
index c4a5cf8..359c0fa 100644
--- a/tasks.md
+++ b/tasks.md
@@ -33,6 +33,9 @@ current list of valid tasks:
  - `generate_channel_v3_json` - builds the `channel_v3.json` file that contains
    the `3.0.0` schema version channel info used by Package Control 3.x.
 
+ - `generate_channel_v4_json` - builds the `channel_v4.json` file that contains
+   the `4.0.0` schema version channel info used by Package Control 4.x.
+
  - `generate_legacy_channel_json` - build the `repositories.json` file that
    contains the `1.2` schema version channel info used by Package Control 1.x.