diff --git a/docs/conf.py b/docs/conf.py index 95b29a34..efe6cb9b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -9,7 +9,6 @@ from packaging.version import Version - # Add the NailGun root directory to the system path. This allows references # such as :mod:`nailgun.client` to be processed correctly. ROOT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir)) diff --git a/docs/create_user_nailgun.py b/docs/create_user_nailgun.py index 50972ccc..a94ae953 100755 --- a/docs/create_user_nailgun.py +++ b/docs/create_user_nailgun.py @@ -3,17 +3,14 @@ from pprint import pprint from nailgun.config import ServerConfig -from nailgun.entities import Organization -from nailgun.entities import User +from nailgun.entities import Organization, User def main(): """Create an identical user account on a pair of satellites.""" server_configs = ServerConfig.get('sat1'), ServerConfig.get('sat2') for server_config in server_configs: - org = Organization(server_config).search(query={'search': 'name="Default_Organization"'})[ - 0 - ] + org = Organization(server_config).search(query={'search': 'name="Default_Organization"'})[0] # The LDAP authentication source with an ID of 1 is internal. It is # nearly guaranteed to exist and be functioning. user = User( diff --git a/docs/create_user_plain.py b/docs/create_user_plain.py index ede2efba..88350cc4 100755 --- a/docs/create_user_plain.py +++ b/docs/create_user_plain.py @@ -12,6 +12,7 @@ """ import json from pprint import pprint +import sys import requests @@ -70,7 +71,7 @@ def get_organization_id(server_config, label): f'Expected to find one organization, but instead found {decoded["subtotal"]}. ' f'Search results: {decoded["results"]}' ) - exit(1) + sys.exit(1) return decoded['results'][0]['id'] diff --git a/nailgun/__init__.py b/nailgun/__init__.py index 098cf95d..d1ac40c5 100644 --- a/nailgun/__init__.py +++ b/nailgun/__init__.py @@ -16,5 +16,4 @@ """ from logging import basicConfig - basicConfig() diff --git a/nailgun/client.py b/nailgun/client.py index dd6fcc5a..d34d43a9 100644 --- a/nailgun/client.py +++ b/nailgun/client.py @@ -15,14 +15,13 @@ http://docs.python-requests.org/en/latest/api/#main-interface """ -import logging from json import dumps +import logging from warnings import simplefilter import requests import urllib3 - logger = logging.getLogger(__name__) # The urllib3 module (which requests uses) refuses to make insecure HTTPS @@ -87,7 +86,7 @@ def _set_content_type(kwargs): def _truncate_data(data, max_len=500): """Truncate data to a max length""" - if isinstance(data, (str, bytes)): + if isinstance(data, str | bytes): if len(data) > max_len: return f"{data[:max_len - 3]}..." return data diff --git a/nailgun/config.py b/nailgun/config.py index 00a8d882..ad261d30 100644 --- a/nailgun/config.py +++ b/nailgun/config.py @@ -8,8 +8,7 @@ """ import json -from os.path import isfile -from os.path import join +from os.path import isfile, join from threading import Lock from packaging.version import parse @@ -114,7 +113,7 @@ def __repr__(self): attrs = vars(self).copy() if "version" in attrs: attrs["version"] = str(attrs.pop("version")) - kv_pairs = ", ".join(f"{key}={repr(value)}" for key, value in attrs.items()) + kv_pairs = ", ".join(f"{key}={value!r}" for key, value in attrs.items()) return f"{self.__module__}.{type(self).__name__}({kv_pairs})" @classmethod @@ -201,9 +200,7 @@ def save(self, label='default', path=None): # Where is the file we're writing to? if path is None: - path = join( - BaseDirectory.save_config_path(self._xdg_config_dir), self._xdg_config_file - ) + path = join(BaseDirectory.save_config_path(self._xdg_config_dir), self._xdg_config_file) self._file_lock.acquire() try: diff --git a/nailgun/entities.py b/nailgun/entities.py index 61bc7185..1719e59a 100644 --- a/nailgun/entities.py +++ b/nailgun/entities.py @@ -20,30 +20,29 @@ workings of entity classes. """ -import hashlib -import os.path from datetime import datetime from functools import lru_cache -from http.client import ACCEPTED -from http.client import NO_CONTENT -from urllib.parse import urljoin # noqa:F0401,E0611 +import hashlib +from http.client import ACCEPTED, NO_CONTENT +import os.path +from urllib.parse import urljoin -from fauxfactory import gen_alphanumeric -from fauxfactory import gen_choice +from fauxfactory import gen_alphanumeric, gen_choice from packaging.version import Version -from nailgun import client -from nailgun import entity_fields -from nailgun.entity_mixins import _get_entity_ids -from nailgun.entity_mixins import _payload -from nailgun.entity_mixins import _poll_task -from nailgun.entity_mixins import Entity -from nailgun.entity_mixins import EntityCreateMixin -from nailgun.entity_mixins import EntityDeleteMixin -from nailgun.entity_mixins import EntityReadMixin -from nailgun.entity_mixins import EntitySearchMixin -from nailgun.entity_mixins import EntityUpdateMixin -from nailgun.entity_mixins import to_json_serializable # noqa: F401 +from nailgun import client, entity_fields +from nailgun.entity_mixins import ( + Entity, + EntityCreateMixin, + EntityDeleteMixin, + EntityReadMixin, + EntitySearchMixin, + EntityUpdateMixin, + _get_entity_ids, + _payload, + _poll_task, + to_json_serializable, # noqa: F401 +) # The size of this file is a direct reflection of the size of Satellite's API. # This file's size has already been significantly cut down through the use of @@ -195,7 +194,7 @@ def _get_version(server_config): return getattr(server_config, 'version', Version('1!0')) -@lru_cache() +@lru_cache def _feature_list(server_config, smart_proxy_id=1): """Get list of features enabled on capsule""" smart_proxy = SmartProxy(server_config, id=smart_proxy_id).read_json() @@ -1526,7 +1525,10 @@ def read(self, entity=None, attrs=None, ignore=None, params=None): if attr not in ignore: # We cannot call `self.update_json([])`, as an ID might not be # present on self. However, `attrs` is guaranteed to have an ID. - attrs[attr] = DiscoveryRule(self._server_config, id=attrs['id'],).update_json( + attrs[attr] = DiscoveryRule( + self._server_config, + id=attrs['id'], + ).update_json( [] )[attr] return super().read(entity, attrs, ignore, params) @@ -1950,7 +1952,7 @@ def read(self, entity=None, attrs=None, ignore=None, params=None): ) for entity_id in _get_entity_ids('template_inputs', attrs) ] - setattr(entity, 'template_inputs', referenced_entities) + entity.template_inputs = referenced_entities return entity @@ -2529,7 +2531,7 @@ def read(self, entity=None, attrs=None, ignore=None, params=None): ignore = set() ignore.add('content_view_filter') ignore.update( - [field_name for field_name in entity.get_fields().keys() if field_name not in attrs] + [field_name for field_name in entity.get_fields() if field_name not in attrs] ) return super().read(entity, attrs, ignore, params) @@ -2899,7 +2901,7 @@ def add(self, synchronous=True, timeout=None, **kwargs): kwargs = kwargs.copy() # shadow the passed-in kwargs if 'data' not in kwargs: # data is required - kwargs['data'] = dict() + kwargs['data'] = {} kwargs.update(self._server_config.get_client_kwargs()) response = client.put(self.path('add'), **kwargs) return _handle_response(response, self._server_config, synchronous, timeout) @@ -2921,7 +2923,7 @@ def remove(self, synchronous=True, timeout=None, **kwargs): kwargs = kwargs.copy() # shadow the passed-in kwargs if 'data' not in kwargs: # data is required - kwargs['data'] = dict() + kwargs['data'] = {} if 'data' in kwargs and 'component_ids' not in kwargs['data']: kwargs['data']['component_ids'] = [self.id] kwargs.update(self._server_config.get_client_kwargs()) @@ -5889,9 +5891,7 @@ def __init__(self, server_config=None, **kwargs): self._fields = { 'match': entity_fields.StringField(required=True), 'value': entity_fields.StringField(required=True), - 'smart_class_parameter': entity_fields.OneToOneField( - SmartClassParameters, parent=True - ), + 'smart_class_parameter': entity_fields.OneToOneField(SmartClassParameters, parent=True), 'omit': entity_fields.BooleanField(), } super().__init__(server_config, **kwargs) @@ -6316,7 +6316,7 @@ def search_normalize(self, results): split by main puppet class. """ flattened_results = [] - for key in results.keys(): + for key in results: for item in results[key]: flattened_results.append(item) return super().search_normalize(flattened_results) @@ -6834,8 +6834,8 @@ def import_uploads( It expects either a list of uploads or upload_ids (but not both). - :param content_type: content type (‘deb’, ‘docker_manifest’, ‘file’, ‘ostree’, - ‘rpm’, ‘srpm’) + :param content_type: content type (`deb`, `docker_manifest`, `file`, `ostree`, + `rpm`, `srpm`) :param uploads: Array of uploads to be imported :param upload_ids: Array of upload ids to be imported :param synchronous: What should happen if the server returns an HTTP @@ -6980,7 +6980,7 @@ def available_repositories(self, **kwargs): """ if 'data' not in kwargs: - kwargs['data'] = dict() + kwargs['data'] = {} kwargs['data']['product_id'] = self.product.id kwargs = kwargs.copy() # shadow the passed-in kwargs kwargs.update(self._server_config.get_client_kwargs()) @@ -7004,7 +7004,7 @@ def enable(self, synchronous=True, timeout=None, **kwargs): """ if 'data' not in kwargs: - kwargs['data'] = dict() + kwargs['data'] = {} kwargs['data']['product_id'] = self.product.id kwargs = kwargs.copy() # shadow the passed-in kwargs kwargs.update(self._server_config.get_client_kwargs()) @@ -7026,7 +7026,7 @@ def disable(self, synchronous=True, timeout=None, **kwargs): """ if 'data' not in kwargs: - kwargs['data'] = dict() + kwargs['data'] = {} kwargs['data']['product_id'] = self.product.id kwargs = kwargs.copy() # shadow the passed-in kwargs kwargs.update(self._server_config.get_client_kwargs()) diff --git a/nailgun/entity_fields.py b/nailgun/entity_fields.py index e12c540b..a30ae568 100644 --- a/nailgun/entity_fields.py +++ b/nailgun/entity_fields.py @@ -20,18 +20,20 @@ """ import random -from fauxfactory import gen_alpha -from fauxfactory import gen_boolean -from fauxfactory import gen_choice -from fauxfactory import gen_date -from fauxfactory import gen_datetime -from fauxfactory import gen_email -from fauxfactory import gen_integer -from fauxfactory import gen_ipaddr -from fauxfactory import gen_mac -from fauxfactory import gen_netmask -from fauxfactory import gen_string -from fauxfactory import gen_url +from fauxfactory import ( + gen_alpha, + gen_boolean, + gen_choice, + gen_date, + gen_datetime, + gen_email, + gen_integer, + gen_ipaddr, + gen_mac, + gen_netmask, + gen_string, + gen_url, +) # The classes in this module serve a declarative role. It is OK that they don't # do much. @@ -63,9 +65,7 @@ class Field: mapped entity """ - def __init__( - self, required=False, choices=None, default=_SENTINEL, unique=False, parent=False - ): + def __init__(self, required=False, choices=None, default=_SENTINEL, unique=False, parent=False): self.unique = unique self.required = required self.parent = parent diff --git a/nailgun/entity_mixins.py b/nailgun/entity_mixins.py index 2084c5ce..12c631c8 100644 --- a/nailgun/entity_mixins.py +++ b/nailgun/entity_mixins.py @@ -1,23 +1,19 @@ """Defines a set of mixins that provide tools for interacting with entities.""" import _thread as thread +from collections.abc import Iterable +import contextlib +from datetime import date, datetime import http.client as http_client import json as std_json import threading import time -from collections.abc import Iterable -from datetime import date -from datetime import datetime from urllib.parse import urljoin from fauxfactory import gen_choice from inflection import pluralize -from nailgun import client -from nailgun import config -from nailgun.entity_fields import IntegerField -from nailgun.entity_fields import ListField -from nailgun.entity_fields import OneToManyField -from nailgun.entity_fields import OneToOneField +from nailgun import client, config +from nailgun.entity_fields import IntegerField, ListField, OneToManyField, OneToOneField # This module contains very extensive docstrings, so this module is easier to # understand than its size suggests. That said, it could be useful to split @@ -535,7 +531,7 @@ def get_values(self): def __repr__(self): kv_pairs = ", ".join( - f"{key}={repr(value)}" + f"{key}={value!r}" for key, value in self.get_values().items() if not key.startswith("_") ) @@ -713,7 +709,6 @@ def delete(self, synchronous=True, timeout=None): and hasattr(response, 'content') and not response.content.strip() ): - # "The server successfully processed the request, but is not # returning any content. Usually used as a response to a successful # delete request." @@ -1291,20 +1286,17 @@ def search_normalize(self, results): attrs = {} for field_name, field in fields.items(): if isinstance(field, OneToOneField): - try: + with contextlib.suppress(MissingValueError): attrs[field_name] = _get_entity_id(field_name, result) - except MissingValueError: - pass + elif isinstance(field, OneToManyField): - try: + with contextlib.suppress(MissingValueError): attrs[field_name] = _get_entity_ids(field_name, result) - except MissingValueError: - pass + else: - try: + with contextlib.suppress(KeyError): attrs[field_name] = result[field_name] - except KeyError: - pass + normalized.append(attrs) return normalized @@ -1437,7 +1429,7 @@ def search_filter(entities, filters): f'Valid filters are {fields.keys()}, but received {filters.keys()} instead.' ) for field_name in filters: - if isinstance(fields[field_name], (OneToOneField, OneToManyField)): + if isinstance(fields[field_name], OneToOneField | OneToManyField): raise NotImplementedError( 'Search results cannot (yet?) be locally filtered by ' f'`OneToOneField`s and `OneToManyField`s. ' @@ -1447,9 +1439,7 @@ def search_filter(entities, filters): # The arguments are sane. Filter away! filtered = [entity.read() for entity in entities] # don't alter inputs for field_name, field_value in filters.items(): - filtered = [ - entity for entity in filtered if getattr(entity, field_name) == field_value - ] + filtered = [entity for entity in filtered if getattr(entity, field_name) == field_value] return filtered @@ -1466,7 +1456,7 @@ def to_json_serializable(obj): if isinstance(obj, dict): return {k: to_json_serializable(v) for k, v in obj.items()} - elif isinstance(obj, (list, tuple)): + elif isinstance(obj, list | tuple): return [to_json_serializable(v) for v in obj] elif isinstance(obj, datetime): return obj.strftime('%Y-%m-%d %H:%M:%S') diff --git a/setup.py b/setup.py index ecca12e3..761efd78 100755 --- a/setup.py +++ b/setup.py @@ -7,9 +7,7 @@ * https://docs.python.org/distutils/sourcedist.html """ -from setuptools import find_packages -from setuptools import setup - +from setuptools import find_packages, setup with open('README.rst') as handle: LONG_DESCRIPTION = handle.read() diff --git a/tests/test_client.py b/tests/test_client.py index d5557afc..0de36914 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -1,10 +1,9 @@ """Unit tests for :mod:`nailgun.client`.""" import inspect -from unittest import mock -from unittest import TestCase +from unittest import TestCase, mock -import requests from fauxfactory import gen_alpha +import requests from nailgun import client diff --git a/tests/test_config.py b/tests/test_config.py index 5fa01b4e..4c5c7b48 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -2,18 +2,16 @@ import builtins import json from unittest import TestCase -from unittest.mock import call -from unittest.mock import mock_open -from unittest.mock import patch +from unittest.mock import call, mock_open, patch -from packaging.version import InvalidVersion -from packaging.version import parse - -from nailgun.config import _get_config_file_path -from nailgun.config import BaseServerConfig -from nailgun.config import ConfigFileError -from nailgun.config import ServerConfig +from packaging.version import InvalidVersion, parse +from nailgun.config import ( + BaseServerConfig, + ConfigFileError, + ServerConfig, + _get_config_file_path, +) FILE_PATH = '/tmp/bogus.json' CONFIGS = { @@ -195,7 +193,7 @@ def test_get(self): Assert that the ``auth`` attribute is a tuple. """ - for label in CONFIGS.keys(): + for label in CONFIGS: open_ = mock_open(read_data=json.dumps(CONFIGS)) with patch.object(builtins, 'open', open_): server_config = ServerConfig.get(label, FILE_PATH) diff --git a/tests/test_entities.py b/tests/test_entities.py index d66e1ce4..8cd9fa3f 100644 --- a/tests/test_entities.py +++ b/tests/test_entities.py @@ -1,26 +1,21 @@ """Tests for :mod:`nailgun.entities`.""" +from datetime import date, datetime +from http.client import ACCEPTED, NO_CONTENT import inspect import json import os -from datetime import date -from datetime import datetime -from http.client import ACCEPTED -from http.client import NO_CONTENT -from unittest import mock -from unittest import TestCase - -from fauxfactory import gen_alpha -from fauxfactory import gen_integer -from fauxfactory import gen_string - -from nailgun import client -from nailgun import config -from nailgun import entities -from nailgun.entity_mixins import EntityCreateMixin -from nailgun.entity_mixins import EntityReadMixin -from nailgun.entity_mixins import EntitySearchMixin -from nailgun.entity_mixins import EntityUpdateMixin -from nailgun.entity_mixins import NoSuchPathError +from unittest import TestCase, mock + +from fauxfactory import gen_alpha, gen_integer, gen_string + +from nailgun import client, config, entities +from nailgun.entity_mixins import ( + EntityCreateMixin, + EntityReadMixin, + EntitySearchMixin, + EntityUpdateMixin, + NoSuchPathError, +) _BUILTIN_OPEN = 'builtins.open' # For inspection comparison, a tuple matching the expected func arg spec @@ -243,9 +238,8 @@ def test_required_params(self): entities.SyncPlan, entities.TemplateInput, ): - with self.subTest(): - with self.assertRaises(TypeError): - entity(self.cfg) + with self.subTest(), self.assertRaises(TypeError): + entity(self.cfg) class PathTestCase(TestCase): @@ -429,9 +423,8 @@ def test_no_such_path_error(self): (entities.VirtWhoConfig, 'deploy_script'), (entities.VirtWhoConfig, 'configs'), ): - with self.subTest((entity, which)): - with self.assertRaises(NoSuchPathError): - entity(self.cfg).path(which=which) + with self.subTest((entity, which)), self.assertRaises(NoSuchPathError): + entity(self.cfg).path(which=which) def test_arfreport(self): """Test :meth:`nailgun.entities.ArfReport.path`. @@ -1438,13 +1431,12 @@ def test_snapshot_ignore_arg(self): Assert that entity`s predefined values of ``ignore`` are always correctly passed on. """ - with mock.patch.object(EntityReadMixin, 'read') as read: - with mock.patch.object( - EntityReadMixin, - 'read_json', - return_value={'host': 3}, - ): - entities.Snapshot(self.cfg, id=2, host=3).read() + with mock.patch.object(EntityReadMixin, 'read') as read, mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={'host': 3}, + ): + entities.Snapshot(self.cfg, id=2, host=3).read() # `call_args` is a two-tuple of (positional, keyword) args. self.assertEqual({'host'}, read.call_args[0][2]) @@ -1458,22 +1450,20 @@ def test_host_with_interface(self): EntityReadMixin, 'read', return_value=entities.Host(self.cfg, id=2), + ), mock.patch.object( + EntityReadMixin, + 'read_json', + return_value={ + 'interfaces': [{'id': 2}, {'id': 3}], + 'parameters': None, + 'puppet_proxy': None, + }, + ), mock.patch.object( + entities, + '_feature_list', + return_value={'Puppet'}, ): - with mock.patch.object( - EntityReadMixin, - 'read_json', - return_value={ - 'interfaces': [{'id': 2}, {'id': 3}], - 'parameters': None, - 'puppet_proxy': None, - }, - ): - with mock.patch.object( - entities, - '_feature_list', - return_value={'Puppet'}, - ): - host = entities.Host(self.cfg, id=2).read() + host = entities.Host(self.cfg, id=2).read() self.assertTrue(hasattr(host, 'interface')) self.assertTrue(isinstance(host.interface, list)) for interface in host.interface: @@ -1806,7 +1796,6 @@ def test_generic(self): ) for entity in entities_: with self.subTest(entity): - # Call update() with mock.patch.object(entity, 'update_json') as update_json: with mock.patch.object(entity, 'read') as read: @@ -2362,11 +2351,10 @@ def test_bulk_resume(self): {'search': gen_string('alpha')}, {'task_ids': self.foreman_task.id, 'search': gen_string('alpha')}, ): - with self.subTest(kwargs): - with mock.patch.object(client, 'post') as post: - self.foreman_task.bulk_resume(**kwargs) - self.assertEqual(post.call_count, 1) - self.assertEqual(post.mock_calls[2][1][0].ACCEPTED, 202) + with self.subTest(kwargs), mock.patch.object(client, 'post') as post: + self.foreman_task.bulk_resume(**kwargs) + self.assertEqual(post.call_count, 1) + self.assertEqual(post.mock_calls[2][1][0].ACCEPTED, 202) def test_bulk_cancel(self): """Call :meth:`nailgun.entities.ForemanTask.bulk_cancel`.""" @@ -2376,11 +2364,10 @@ def test_bulk_cancel(self): {'search': gen_string('alpha')}, {'task_ids': self.foreman_task.id, 'search': gen_string('alpha')}, ): - with self.subTest(kwargs): - with mock.patch.object(client, 'post') as post: - self.foreman_task.bulk_cancel(**kwargs) - self.assertEqual(post.call_count, 1) - self.assertEqual(post.mock_calls[2][1][0].ACCEPTED, 202) + with self.subTest(kwargs), mock.patch.object(client, 'post') as post: + self.foreman_task.bulk_cancel(**kwargs) + self.assertEqual(post.call_count, 1) + self.assertEqual(post.mock_calls[2][1][0].ACCEPTED, 202) class ContentUploadTestCase(TestCase): @@ -2457,15 +2444,14 @@ def test_content_upload_upload(self): with mock.patch.object( entities.ContentUpload, 'create', - ) as create: - with mock.patch.object( - entities.Repository, - 'import_uploads', - return_value={'status': 'success'}, - ) as import_uploads: - mock_open = mock.mock_open(read_data=gen_string('alpha').encode('ascii')) - with mock.patch(_BUILTIN_OPEN, mock_open, create=True): - response = self.content_upload.upload(filepath, filename) + ) as create, mock.patch.object( + entities.Repository, + 'import_uploads', + return_value={'status': 'success'}, + ) as import_uploads: + mock_open = mock.mock_open(read_data=gen_string('alpha').encode('ascii')) + with mock.patch(_BUILTIN_OPEN, mock_open, create=True): + response = self.content_upload.upload(filepath, filename) self.assertEqual(import_uploads.call_count, 1) self.assertEqual(create.call_count, 1) self.assertEqual(import_uploads.return_value, response) @@ -2483,15 +2469,14 @@ def test_content_upload_no_filename(self): with mock.patch.object( entities.ContentUpload, 'create', - ) as create: - with mock.patch.object( - entities.Repository, - 'import_uploads', - return_value={'status': 'success'}, - ) as import_uploads: - mock_open = mock.mock_open(read_data=gen_string('alpha').encode('ascii')) - with mock.patch(_BUILTIN_OPEN, mock_open, create=True): - response = self.content_upload.upload(filepath) + ) as create, mock.patch.object( + entities.Repository, + 'import_uploads', + return_value={'status': 'success'}, + ) as import_uploads: + mock_open = mock.mock_open(read_data=gen_string('alpha').encode('ascii')) + with mock.patch(_BUILTIN_OPEN, mock_open, create=True): + response = self.content_upload.upload(filepath) self.assertEqual(import_uploads.call_count, 1) self.assertEqual(create.call_count, 1) self.assertEqual(import_uploads.return_value, response) @@ -2587,9 +2572,7 @@ def test_search(self): response = self.cv.search() self.assertEqual(handlr.call_count, 1) self.assertEqual(type(response[0]), entities.ContentView) - self.assertEqual( - type(response[0].content_view_component[0]), entities.ContentViewComponent - ) + self.assertEqual(type(response[0].content_view_component[0]), entities.ContentViewComponent) class ContentViewComponentTestCase(TestCase): @@ -2832,19 +2815,19 @@ def setUp(self): self.cfg = config.ServerConfig('some url') self.job_template = entities.JobTemplate(self.cfg, id=2) self.entity = entities.TemplateInput(self.cfg, id=1, template=self.job_template) - self.data = dict( - id=1, - description=None, - fact_name=None, - input_type='user', - name='my new template input', - options=None, - puppet_class_name=None, - puppet_parameter_name=None, - required=False, - template_id=self.job_template.id, - variable_name=None, - ) + self.data = { + 'id': 1, + 'description': None, + 'fact_name': None, + 'input_type': 'user', + 'name': 'my new template input', + 'options': None, + 'puppet_class_name': None, + 'puppet_parameter_name': None, + 'required': False, + 'template_id': self.job_template.id, + 'variable_name': None, + } self.read_json_patcher = mock.patch.object(EntityReadMixin, 'read_json') self.read_json = self.read_json_patcher.start() self.read_json.return_value = self.data.copy() @@ -2856,7 +2839,7 @@ def tearDown(self): def test_read(self): entity = self.entity.read() self.read_json.assert_called_once() - self.assertEqual(self.data, {key: getattr(entity, key) for key in self.data.keys()}) + self.assertEqual(self.data, {key: getattr(entity, key) for key in self.data}) self.assertIsInstance(entity.template, entities.JobTemplate) self.assertEqual(entity.template.id, self.job_template.id) @@ -2869,22 +2852,22 @@ def setUp(self): self.entity = entities.JobTemplate(self.cfg, id=1) self.read_json_patcher = mock.patch.object(EntityReadMixin, 'read_json') self.read_json = self.read_json_patcher.start() - self.template_input_data = dict(id=1, template=1) - self.data = dict( - id=1, - audit_comment=None, - description_format=None, - effective_user=None, - job_category='Commands', - location=[], - locked=False, - name='my new job template', - organization=[], - provider_type=None, - snippet=False, - template='rm -rf /', - template_inputs=[self.template_input_data], - ) + self.template_input_data = {'id': 1, 'template': 1} + self.data = { + 'id': 1, + 'audit_comment': None, + 'description_format': None, + 'effective_user': None, + 'job_category': 'Commands', + 'location': [], + 'locked': False, + 'name': 'my new job template', + 'organization': [], + 'provider_type': None, + 'snippet': False, + 'template': 'rm -rf /', + 'template_inputs': [self.template_input_data], + } self.read_json.return_value = self.data.copy() del self.data['template_inputs'] @@ -2894,7 +2877,7 @@ def tearDown(self): def test_read(self): entity = self.entity.read() self.read_json.assert_called_once() - self.assertEqual(self.data, {key: getattr(entity, key) for key in self.data.keys()}) + self.assertEqual(self.data, {key: getattr(entity, key) for key in self.data}) self.assertEqual(len(entity.template_inputs), 1) template_input = entity.template_inputs[0] self.assertIsInstance(template_input, entities.TemplateInput) @@ -3003,7 +2986,7 @@ def test_add_func_with_id(self): entity = self.entity entity.id = 1 func_param_dict = {entity.add_ansible_role: 'ansible_role_id'} - for func in func_param_dict.keys(): + for func in func_param_dict: self.assertEqual(inspect.getfullargspec(func), EXPECTED_ARGSPEC) kwargs = {'kwarg': gen_integer(), 'data': {func_param_dict[func]: gen_integer()}} with mock.patch.object(entities, '_handle_response') as handlr: @@ -3034,7 +3017,7 @@ def test_delete_func_with_id(self): entity.delete_puppetclass: 'puppetclass_id', entity.remove_ansible_role: 'ansible_role_id', } - for func in func_param_dict.keys(): + for func in func_param_dict: self.assertEqual(inspect.getfullargspec(func), EXPECTED_ARGSPEC) kwargs = {'kwarg': gen_integer(), 'data': {func_param_dict[func]: gen_integer()}} with mock.patch.object(entities, '_handle_response') as handlr: @@ -3185,7 +3168,7 @@ def test_add_func_with_id(self): """ entity = entities.Host(self.cfg, id=1) func_param_dict = {entity.add_ansible_role: 'ansible_role_id'} - for func in func_param_dict.keys(): + for func in func_param_dict: self.assertEqual(inspect.getfullargspec(func), EXPECTED_ARGSPEC) kwargs = {'kwarg': gen_integer(), 'data': {func_param_dict[func]: gen_integer()}} with mock.patch.object(entities, '_handle_response') as handlr: @@ -3215,7 +3198,7 @@ def test_delete_func_with_id(self): entity.delete_puppetclass: 'puppetclass_id', entity.remove_ansible_role: 'ansible_role_id', } - for func in func_param_dict.keys(): + for func in func_param_dict: self.assertEqual(inspect.getfullargspec(func), EXPECTED_ARGSPEC) kwargs = {'kwarg': gen_integer(), 'data': {func_param_dict[func]: gen_integer()}} with mock.patch.object(entities, '_handle_response') as handlr: @@ -3302,13 +3285,12 @@ def test_upload_content_v1(self): """ kwargs = {'kwarg': gen_integer()} - with mock.patch.object(client, 'post') as post: - with mock.patch.object( - entities, - '_handle_response', - return_value={'status': 'success'}, - ) as handler: - response = self.repo.upload_content(**kwargs) + with mock.patch.object(client, 'post') as post, mock.patch.object( + entities, + '_handle_response', + return_value={'status': 'success'}, + ) as handler: + response = self.repo.upload_content(**kwargs) self.assertEqual(post.call_count, 1) self.assertEqual(len(post.call_args[0]), 1) self.assertEqual(post.call_args[1], kwargs) @@ -3323,14 +3305,12 @@ def test_upload_content_v2(self): """ kwargs = {'kwarg': gen_integer()} - with mock.patch.object(client, 'post') as post: - with mock.patch.object( - entities, - '_handle_response', - return_value={'status': 'failure'}, - ) as handler: - with self.assertRaises(entities.APIResponseError): - self.repo.upload_content(**kwargs) + with mock.patch.object(client, 'post') as post, mock.patch.object( + entities, + '_handle_response', + return_value={'status': 'failure'}, + ) as handler, self.assertRaises(entities.APIResponseError): + self.repo.upload_content(**kwargs) self.assertEqual(post.call_count, 1) self.assertEqual(len(post.call_args[0]), 1) self.assertEqual(post.call_args[1], kwargs) @@ -3354,13 +3334,12 @@ def test_import_uploads_uploads(self): 'checksum': gen_string('numeric'), } ] - with mock.patch.object(client, 'put') as put: - with mock.patch.object( - entities, - '_handle_response', - return_value={'status': 'success'}, - ) as handler: - response = self.repo.import_uploads(uploads=uploads, **kwargs) + with mock.patch.object(client, 'put') as put, mock.patch.object( + entities, + '_handle_response', + return_value={'status': 'success'}, + ) as handler: + response = self.repo.import_uploads(uploads=uploads, **kwargs) self.assertEqual(put.call_count, 1) self.assertEqual(len(put.call_args[0]), 2) self.assertEqual(put.call_args[1], kwargs) @@ -3378,13 +3357,12 @@ def test_import_uploads_upload_ids(self): """ kwargs = {'kwarg': gen_integer()} upload_ids = [gen_string('numeric')] - with mock.patch.object(client, 'put') as put: - with mock.patch.object( - entities, - '_handle_response', - return_value={'status': 'success'}, - ) as handler: - response = self.repo.import_uploads(upload_ids=upload_ids, **kwargs) + with mock.patch.object(client, 'put') as put, mock.patch.object( + entities, + '_handle_response', + return_value={'status': 'success'}, + ) as handler: + response = self.repo.import_uploads(upload_ids=upload_ids, **kwargs) self.assertEqual(put.call_count, 1) self.assertEqual(len(put.call_args[0]), 2) self.assertEqual(put.call_args[1], kwargs) diff --git a/tests/test_entity_fields.py b/tests/test_entity_fields.py index 5f330eea..d0496610 100644 --- a/tests/test_entity_fields.py +++ b/tests/test_entity_fields.py @@ -1,7 +1,7 @@ """Unit tests for :mod:`nailgun.entity_fields`.""" import datetime -import socket from random import randint +import socket from unittest import TestCase from urllib.parse import urlparse diff --git a/tests/test_entity_mixins.py b/tests/test_entity_mixins.py index f92d11f2..4d749ca1 100644 --- a/tests/test_entity_mixins.py +++ b/tests/test_entity_mixins.py @@ -1,19 +1,18 @@ """Tests for :mod:`nailgun.entity_mixins`.""" import http.client as http_client -from unittest import mock -from unittest import TestCase +from unittest import TestCase, mock from fauxfactory import gen_integer from requests.exceptions import HTTPError -from nailgun import client -from nailgun import config -from nailgun import entity_mixins -from nailgun.entity_fields import IntegerField -from nailgun.entity_fields import ListField -from nailgun.entity_fields import OneToManyField -from nailgun.entity_fields import OneToOneField -from nailgun.entity_fields import StringField +from nailgun import client, config, entity_mixins +from nailgun.entity_fields import ( + IntegerField, + ListField, + OneToManyField, + OneToOneField, + StringField, +) # The size of this module is a direct reflection of the size of module # `nailgun.entity_mixins`. It would be good to split that module up, then split @@ -155,7 +154,7 @@ def setUp(self): def test_pass_in_emtpy_iterable(self): """Let the ``entity_objs_and_ids`` argument be an empty iterable.""" - for iterable in ([], tuple()): + for iterable in ([], ()): self.assertEqual( entity_mixins._make_entities_from_ids(SampleEntity, iterable, self.cfg), [], @@ -409,7 +408,6 @@ def test_eq(self): # Testing List nested objects mary.list = [alice] self.assertNotEqual(mary, mary_clone) - # noqa mary_clone.list = [alice_clone] self.assertEqual(mary, mary_clone) @@ -958,9 +956,8 @@ def test_delete_v1(self): entity_mixins.EntityDeleteMixin, 'delete_raw', return_value=response, - ): - with self.assertRaises(HTTPError): - self.entity.delete() + ), self.assertRaises(HTTPError): + self.entity.delete() def test_delete_v2(self): """What happens if the server returns an HTTP ACCEPTED status code?""" @@ -971,9 +968,8 @@ def test_delete_v2(self): entity_mixins.EntityDeleteMixin, 'delete_raw', return_value=response, - ) as delete_raw: - with mock.patch.object(entity_mixins, '_poll_task') as poll_task: - self.entity.delete() + ) as delete_raw, mock.patch.object(entity_mixins, '_poll_task') as poll_task: + self.entity.delete() self.assertEqual(delete_raw.call_count, 1) self.assertEqual(poll_task.call_count, 1) self.assertEqual( @@ -989,9 +985,8 @@ def test_delete_v3(self): entity_mixins.EntityDeleteMixin, 'delete_raw', return_value=response, - ): - with mock.patch.object(entity_mixins, '_poll_task') as poll_task: - self.assertEqual(self.entity.delete(), None) + ), mock.patch.object(entity_mixins, '_poll_task') as poll_task: + self.assertEqual(self.entity.delete(), None) self.assertEqual(poll_task.call_count, 0) def test_delete_v4(self): @@ -1016,9 +1011,8 @@ def test_delete_v5(self): entity_mixins.EntityDeleteMixin, 'delete_raw', return_value=response, - ): - with mock.patch.object(entity_mixins, '_poll_task') as poll_task: - self.assertEqual(self.entity.delete(), None) + ), mock.patch.object(entity_mixins, '_poll_task') as poll_task: + self.assertEqual(self.entity.delete(), None) self.assertEqual(poll_task.call_count, 0) def test_delete_v6(self): @@ -1033,9 +1027,8 @@ def test_delete_v6(self): entity_mixins.EntityDeleteMixin, 'delete_raw', return_value=response, - ): - with mock.patch.object(entity_mixins, '_poll_task') as poll_task: - self.assertEqual(self.entity.delete(), None) + ), mock.patch.object(entity_mixins, '_poll_task') as poll_task: + self.assertEqual(self.entity.delete(), None) self.assertEqual(poll_task.call_count, 0) @@ -1227,12 +1220,11 @@ def test_search_filter_v2(self): """ for filter_ in ({'one': 'foo'}, {'many': 'bar'}): - with self.subTest(filter_): - with self.assertRaises(NotImplementedError): - entity_mixins.EntitySearchMixin.search_filter( - [EntityWithSearch2(self.cfg)], - filter_, - ) + with self.subTest(filter_), self.assertRaises(NotImplementedError): + entity_mixins.EntitySearchMixin.search_filter( + [EntityWithSearch2(self.cfg)], + filter_, + ) def test_search_filter_v3(self): """Test :meth:`nailgun.entity_mixins.EntitySearchMixin.search_filter`.