diff --git a/pytest_fixtures/component/satellite_auth.py b/pytest_fixtures/component/satellite_auth.py index c6df8df1897..9af5a98287a 100644 --- a/pytest_fixtures/component/satellite_auth.py +++ b/pytest_fixtures/component/satellite_auth.py @@ -456,10 +456,10 @@ def configure_hammer_no_negotiate(parametrized_enrolled_sat): def hammer_logout(parametrized_enrolled_sat): """Logout in Hammer.""" result = parametrized_enrolled_sat.cli.Auth.logout() - assert result[0]['message'] == LOGGEDOUT + assert result.split("\n")[1] == LOGGEDOUT yield result = parametrized_enrolled_sat.cli.Auth.logout() - assert result[0]['message'] == LOGGEDOUT + assert result.split("\n")[1] == LOGGEDOUT @pytest.fixture diff --git a/pytest_plugins/markers.py b/pytest_plugins/markers.py index abf54997bd8..b7e0f8f6346 100644 --- a/pytest_plugins/markers.py +++ b/pytest_plugins/markers.py @@ -24,6 +24,7 @@ def pytest_configure(config): "no_containers: Disable container hosts from being used in favor of VMs", "include_capsule: For satellite-maintain tests to run on Satellite and Capsule both", "capsule_only: For satellite-maintain tests to run only on Capsules", + "manifester: Tests that require manifester", ] markers.extend(module_markers()) for marker in markers: diff --git a/requirements.txt b/requirements.txt index 5922b56c189..9636539201e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ betelgeuse==1.11.0 broker[docker]==0.4.9 cryptography==42.0.5 deepdiff==6.7.1 -dynaconf[vault]==3.2.4 +dynaconf[vault]==3.2.5 fauxfactory==3.1.0 jinja2==3.1.3 manifester==0.0.14 diff --git a/robottelo/host_helpers/capsule_mixins.py b/robottelo/host_helpers/capsule_mixins.py index 1f3769e0f22..0e7ffd21065 100644 --- a/robottelo/host_helpers/capsule_mixins.py +++ b/robottelo/host_helpers/capsule_mixins.py @@ -1,6 +1,8 @@ -from datetime import datetime +from datetime import datetime, timedelta import time +from dateutil.parser import parse + from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS from robottelo.logging import logger from robottelo.utils.installer import InstallerCommand @@ -60,26 +62,79 @@ def wait_for_tasks( raise AssertionError(f"No task was found using query '{search_query}'") return tasks - def wait_for_sync(self, timeout=600, start_time=None): - """Wait for capsule sync to finish and assert the sync task succeeded""" - # Assert that a task to sync lifecycle environment to the capsule - # is started (or finished already) + def wait_for_sync(self, start_time=None, timeout=600): + """Wait for capsule sync to finish and assert success. + Assert that a task to sync lifecycle environment to the + capsule is started (or finished already), and succeeded. + :raises: ``AssertionError``: If a capsule sync verification fails based on the conditions. + + - Found some active sync task(s) for capsule, or it just finished (recent sync time). + - Any active sync task(s) polled, succeeded, and the capsule last_sync_time is updated. + - last_sync_time after final task is on or newer than start_time. + - The total sync time duration (seconds) is within timeout and not negative. + + :param start_time: (datetime): UTC time to compare against capsule's last_sync_time. + Default: None (current UTC). + :param timeout: (int) maximum seconds for active task(s) and queries to finish. + + :return: + list of polled finished tasks that were in-progress from `active_sync_tasks`. + """ + # Fetch initial capsule sync status + logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") + sync_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + # Current UTC time for start_time, if not provided if start_time is None: start_time = datetime.utcnow().replace(microsecond=0) - logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") - sync_status = self.nailgun_capsule.content_get_sync() - logger.info(f"Active tasks {sync_status['active_sync_tasks']}") + # 1s margin of safety for rounding + start_time = ( + (start_time - timedelta(seconds=1)) + .replace(microsecond=0) + .strftime('%Y-%m-%d %H:%M:%S UTC') + ) + # Assert presence of recent sync activity: + # one or more ongoing sync tasks for the capsule, + # Or, capsule's last_sync_time is on or after start_time + assert len(sync_status['active_sync_tasks']) or ( + parse(sync_status['last_sync_time']) >= parse(start_time) + ), ( + f"No active or recent sync found for capsule {self.hostname}." + f" `active_sync_tasks` was empty: {sync_status['active_sync_tasks']}," + f" and the `last_sync_time`: {sync_status['last_sync_time']}," + f" was prior to the `start_time`: {start_time}." + ) + sync_tasks = [] + # Poll and verify succeeds, any active sync task from initial status. + logger.info(f"Active tasks: {sync_status['active_sync_tasks']}") + for task in sync_status['active_sync_tasks']: + sync_tasks.append(self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout)) + logger.info(f"Active sync task :id {task['id']} succeeded.") + + # Fetch updated capsule status (expect no ongoing sync) + logger.info(f"Querying updated sync status from capsule {self.hostname}.") + updated_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + # Last sync task end time is the same as capsule's last sync time. + assert parse(updated_status['last_sync_time']) == parse( + updated_status['last_sync_task']['ended_at'] + ), f"`last_sync_time` does not match final task's end time. Capsule: {self.hostname}" + + # Total time taken is not negative (sync prior to start_time), + # and did not exceed timeout. assert ( - len(sync_status['active_sync_tasks']) - or datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') - >= start_time + timedelta(seconds=0) + <= parse(updated_status['last_sync_time']) - parse(start_time) + <= timedelta(seconds=timeout) + ), ( + f"No recent sync task(s) were found for capsule: {self.hostname}, or task(s) timed out." + f" `last_sync_time`: ({updated_status['last_sync_time']}) was prior to `start_time`: ({start_time})" + f" or exceeded timeout ({timeout}s)." ) + # No failed or active tasks remaining + assert len(updated_status['last_failed_sync_tasks']) == 0 + assert len(updated_status['active_sync_tasks']) == 0 - # Wait till capsule sync finishes and assert the sync task succeeded - for task in sync_status['active_sync_tasks']: - self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout) - sync_status = self.nailgun_capsule.content_get_sync() - assert len(sync_status['last_failed_sync_tasks']) == 0 + # return any polled sync tasks, that were initially in-progress + return sync_tasks def get_published_repo_url(self, org, prod, repo, lce=None, cv=None): """Forms url of a repo or CV published on a Satellite or Capsule. diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index 99c2bb816fc..fe471df8eed 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -12,19 +12,35 @@ :CaseImportance: High """ -from datetime import datetime + +from datetime import datetime, timedelta import re from time import sleep from nailgun import client +from nailgun.config import ServerConfig from nailgun.entity_mixins import call_entity_method_with_timeout import pytest +from requests.exceptions import HTTPError -from robottelo import constants from robottelo.config import settings from robottelo.constants import ( + CONTAINER_CLIENTS, CONTAINER_REGISTRY_HUB, CONTAINER_UPSTREAM_NAME, + ENVIRONMENT, + FAKE_1_YUM_REPOS_COUNT, + FAKE_3_YUM_REPO_RPMS, + FAKE_3_YUM_REPOS_COUNT, + FAKE_FILE_LARGE_COUNT, + FAKE_FILE_LARGE_URL, + FAKE_FILE_NEW_NAME, + KICKSTART_CONTENT, + PRDS, + REPOS, + REPOSET, + RH_CONTAINER_REGISTRY_HUB, + RPM_TO_UPLOAD, DataFile, ) from robottelo.constants.repos import ANSIBLE_GALAXY, CUSTOM_FILE_REPO @@ -33,9 +49,25 @@ get_repomd, get_repomd_revision, ) +from robottelo.utils.datafactory import gen_string from robottelo.utils.issue_handlers import is_open +@pytest.fixture +def default_non_admin_user(target_sat, default_org, default_location): + """Non-admin user with no roles assigned in the Default org/loc.""" + password = gen_string('alphanumeric') + user = target_sat.api.User( + login=gen_string('alpha'), + password=password, + organization=[default_org], + location=[default_location], + ).create() + user.password = password + yield user + user.delete() + + @pytest.mark.run_in_one_thread class TestCapsuleContentManagement: """Content Management related tests, which exercise katello with pulp @@ -83,14 +115,14 @@ def test_positive_uploaded_content_library_sync( assert repo.read().content_counts['rpm'] == 1 + timestamp = datetime.utcnow().replace(microsecond=0) # Publish new version of the content view cv.publish() + # query sync status as publish invokes sync, task succeeds + module_capsule_configured.wait_for_sync(start_time=timestamp) cv = cv.read() - assert len(cv.version) == 1 - module_capsule_configured.wait_for_sync() - # Verify the RPM published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -101,7 +133,7 @@ def test_positive_uploaded_content_library_sync( ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 - assert caps_files[0] == constants.RPM_TO_UPLOAD + assert caps_files[0] == RPM_TO_UPLOAD @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') @@ -149,13 +181,13 @@ def test_positive_checksum_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type is sha256, not sha1 on capsule repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -182,13 +214,13 @@ def test_positive_checksum_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type has updated to sha1 on capsule repomd = get_repomd(repo_url) checksum_types = re.findall(r'(?<=checksum type=").*?(?=")', repomd) @@ -257,12 +289,13 @@ def test_positive_sync_updated_repo( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Upload more content to the repository with open(DataFile.SRPM_TO_UPLOAD, 'rb') as handle: repo.upload_content(files={'content': handle}) @@ -276,12 +309,13 @@ def test_positive_sync_updated_repo( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check the content is synced on the Capsule side properly sat_repo_url = target_sat.get_published_repo_url( org=function_org.label, @@ -357,10 +391,18 @@ def test_positive_capsule_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() - # Promote content view to lifecycle environment + # prior to trigger (promoting), assert no active sync tasks + active_tasks = module_capsule_configured.nailgun_capsule.content_get_sync( + synchronous=True, timeout=600 + )['active_sync_tasks'] + assert len(active_tasks) == 0 + # Promote content view to lifecycle environment, + # invoking capsule sync task(s) + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Content of the published content view in @@ -368,8 +410,6 @@ def test_positive_capsule_sync( # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in repository on satellite sat_repo_url = target_sat.get_published_repo_url( @@ -404,14 +444,14 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() - # Promote new content view version to lifecycle environment + # Promote new content view version to lifecycle environment, + # capsule sync task(s) invoked and succeed + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - - module_capsule_configured.wait_for_sync() - # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision(caps_repo_url) @@ -427,21 +467,22 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that packages count in the repository is updated - assert repo.content_counts['rpm'] == (constants.FAKE_1_YUM_REPOS_COUNT + 1) + assert repo.content_counts['rpm'] == (FAKE_1_YUM_REPOS_COUNT + 1) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in the repository sat_files = get_repo_files_by_url(sat_repo_url) @@ -451,7 +492,7 @@ def test_positive_capsule_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients') def test_positive_iso_library_sync( - self, module_capsule_configured, module_entitlement_manifest_org, module_target_sat + self, module_capsule_configured, module_sca_manifest_org, module_target_sat ): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically @@ -467,18 +508,18 @@ def test_positive_iso_library_sync( # Enable & sync RH repository with ISOs rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhsc'], - repo=constants.REPOS['rhsc7_iso']['name'], - reposet=constants.REPOSET['rhsc7_iso'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhsc'], + repo=REPOS['rhsc7_iso']['name'], + reposet=REPOSET['rhsc7_iso'], releasever=None, ) rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization lce = module_target_sat.api.LifecycleEnvironment( - organization=module_entitlement_manifest_org - ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] + organization=module_sca_manifest_org + ).search(query={'search': f'name={ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -491,23 +532,23 @@ def test_positive_iso_library_sync( # Create a content view with the repository cv = module_target_sat.api.ContentView( - organization=module_entitlement_manifest_org, repository=[rh_repo] + organization=module_sca_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view + timestamp = datetime.utcnow() cv.publish() - cv = cv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cv = cv.read() assert len(cv.version) == 1 # Verify ISOs are present on satellite sat_isos = get_repo_files_by_url(rh_repo.full_path, extension='iso') assert len(sat_isos) == 4 - module_capsule_configured.wait_for_sync() - # Verify all the ISOs are present on capsule caps_path = ( - f'{module_capsule_configured.url}/pulp/content/{module_entitlement_manifest_org.label}' + f'{module_capsule_configured.url}/pulp/content/{module_sca_manifest_org.label}' f'/{lce.label}/{cv.label}/content/dist/rhel/server/7/7Server/x86_64/sat-capsule/6.4/' 'iso/' ) @@ -540,8 +581,8 @@ def test_positive_on_demand_sync( the original package from the upstream repo """ repo_url = settings.repos.yum_3.url - packages_count = constants.FAKE_3_YUM_REPOS_COUNT - package = constants.FAKE_3_YUM_REPO_RPMS[0] + packages_count = FAKE_3_YUM_REPOS_COUNT + package = FAKE_3_YUM_REPO_RPMS[0] repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -573,13 +614,13 @@ def test_positive_on_demand_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify packages on Capsule match the source caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -624,7 +665,7 @@ def test_positive_update_with_immediate_sync( filesystem contains valid links to packages """ repo_url = settings.repos.yum_1.url - packages_count = constants.FAKE_1_YUM_REPOS_COUNT + packages_count = FAKE_1_YUM_REPOS_COUNT repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -655,13 +696,13 @@ def test_positive_update_with_immediate_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) @@ -683,13 +724,13 @@ def test_positive_update_with_immediate_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify the count of RPMs published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -730,7 +771,7 @@ def test_positive_capsule_pub_url_accessible(self, module_capsule_configured): @pytest.mark.skip_if_not_set('capsule', 'clients') @pytest.mark.parametrize('distro', ['rhel7', 'rhel8_bos', 'rhel9_bos']) def test_positive_sync_kickstart_repo( - self, target_sat, module_capsule_configured, function_entitlement_manifest_org, distro + self, target_sat, module_capsule_configured, function_sca_manifest_org, distro ): """Sync kickstart repository to the capsule. @@ -751,16 +792,14 @@ def test_positive_sync_kickstart_repo( """ repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['kickstart'][distro]['product'], - reposet=constants.REPOS['kickstart'][distro]['reposet'], - repo=constants.REPOS['kickstart'][distro]['name'], - releasever=constants.REPOS['kickstart'][distro]['version'], + org_id=function_sca_manifest_org.id, + product=REPOS['kickstart'][distro]['product'], + reposet=REPOS['kickstart'][distro]['reposet'], + repo=REPOS['kickstart'][distro]['name'], + releasever=REPOS['kickstart'][distro]['version'], ) repo = target_sat.api.Repository(id=repo_id).read() - lce = target_sat.api.LifecycleEnvironment( - organization=function_entitlement_manifest_org - ).create() + lce = target_sat.api.LifecycleEnvironment(organization=function_sca_manifest_org).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -775,7 +814,7 @@ def test_positive_sync_kickstart_repo( # Create a content view with the repository cv = target_sat.api.ContentView( - organization=function_entitlement_manifest_org, repository=[repo] + organization=function_sca_manifest_org, repository=[repo] ).create() # Sync repository repo.sync(timeout='10m') @@ -788,26 +827,26 @@ def test_positive_sync_kickstart_repo( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check for kickstart content on SAT and CAPS tail = ( - f'rhel/server/7/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' + f'rhel/server/7/{REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' if distro == 'rhel7' - else f'{distro.split("_")[0]}/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 + else f'{distro.split("_")[0]}/{REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 ) url_base = ( - f'pulp/content/{function_entitlement_manifest_org.label}/{lce.label}/{cv.label}/' + f'pulp/content/{function_sca_manifest_org.label}/{lce.label}/{cv.label}/' f'content/dist/{tail}' ) # Check kickstart specific files - for file in constants.KICKSTART_CONTENT: + for file in KICKSTART_CONTENT: sat_file = target_sat.md5_by_url(f'{target_sat.url}/{url_base}/{file}') caps_file = target_sat.md5_by_url(f'{module_capsule_configured.url}/{url_base}/{file}') assert sat_file == caps_file @@ -887,12 +926,13 @@ def test_positive_sync_container_repo_end_to_end( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Pull the images from capsule to the content host repo_paths = [ ( @@ -902,7 +942,7 @@ def test_positive_sync_container_repo_end_to_end( for repo in repos ] - for con_client in constants.CONTAINER_CLIENTS: + for con_client in CONTAINER_CLIENTS: result = container_contenthost.execute( f'{con_client} login -u {settings.server.admin_username}' f' -p {settings.server.admin_password} {module_capsule_configured.hostname}' @@ -1005,10 +1045,12 @@ def test_positive_sync_collection_repo( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync the repo + timestamp = datetime.utcnow() repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['ansible_collection'] == 2 - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) repo_path = repo.full_path.replace(target_sat.hostname, module_capsule_configured.hostname) coll_path = './collections' @@ -1063,7 +1105,7 @@ def test_positive_sync_file_repo( repo = target_sat.api.Repository( content_type='file', product=function_product, - url=constants.FAKE_FILE_LARGE_URL, + url=FAKE_FILE_LARGE_URL, ).create() repo.sync() @@ -1087,12 +1129,13 @@ def test_positive_sync_file_repo( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Run one more sync, check for status (BZ#1985122) sync_status = module_capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success' @@ -1114,8 +1157,8 @@ def test_positive_sync_file_repo( ) sat_files = get_repo_files_by_url(sat_repo_url, extension='iso') caps_files = get_repo_files_by_url(caps_repo_url, extension='iso') - assert len(sat_files) == len(caps_files) == constants.FAKE_FILE_LARGE_COUNT + 1 - assert constants.FAKE_FILE_NEW_NAME in caps_files + assert len(sat_files) == len(caps_files) == FAKE_FILE_LARGE_COUNT + 1 + assert FAKE_FILE_NEW_NAME in caps_files assert sat_files == caps_files for file in sat_files: @@ -1126,7 +1169,7 @@ def test_positive_sync_file_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_CV_to_multiple_LCEs( - self, target_sat, module_capsule_configured, module_manifest_org + self, target_sat, module_capsule_configured, module_sca_manifest_org ): """Synchronize a CV to multiple LCEs at the same time. All sync tasks should succeed. @@ -1151,19 +1194,19 @@ def test_positive_sync_CV_to_multiple_LCEs( # Sync a repository to the Satellite. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_manifest_org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhel7_extra']['name'], - reposet=constants.REPOSET['rhel7_extra'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhel'], + repo=REPOS['rhel7_extra']['name'], + reposet=REPOSET['rhel7_extra'], releasever=None, ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() # Create two LCEs, assign them to the Capsule. - lce1 = target_sat.api.LifecycleEnvironment(organization=module_manifest_org).create() + lce1 = target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() lce2 = target_sat.api.LifecycleEnvironment( - organization=module_manifest_org, prior=lce1 + organization=module_sca_manifest_org, prior=lce1 ).create() module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': [lce1.id, lce2.id]} @@ -1175,7 +1218,7 @@ def test_positive_sync_CV_to_multiple_LCEs( # Create a Content View, add the repository and publish it. cv = target_sat.api.ContentView( - organization=module_manifest_org, repository=[repo] + organization=module_sca_manifest_org, repository=[repo] ).create() cv.publish() cv = cv.read() @@ -1183,16 +1226,20 @@ def test_positive_sync_CV_to_multiple_LCEs( # Promote the CV to both Capsule's LCEs without waiting for Capsule sync task completion. cvv = cv.version[-1].read() + assert len(cvv.environment) == 1 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce1.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce2.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 3 - # Check all sync tasks finished without errors. - module_capsule_configured.wait_for_sync() - @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_capsule_sync_status_persists( @@ -1235,7 +1282,8 @@ def test_positive_capsule_sync_status_persists( cvv = cv.version[-1].read() timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) # Delete all capsule sync tasks so that we fall back for audits. task_result = target_sat.execute( @@ -1265,7 +1313,7 @@ def test_positive_remove_capsule_orphans( target_sat, pytestconfig, capsule_configured, - function_entitlement_manifest_org, + function_sca_manifest_org, function_lce_library, ): """Synchronize RPM content to the capsule, disassociate the capsule form the content @@ -1293,15 +1341,15 @@ def test_positive_remove_capsule_orphans( :BZ: 22043089, 2211962 """ - if not pytestconfig.option.n_minus: + if pytestconfig.option.n_minus: pytest.skip('Test cannot be run on n-minus setups session-scoped capsule') # Enable RHST repo and sync it to the Library LCE. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['rhst8']['product'], - repo=constants.REPOS['rhst8']['name'], - reposet=constants.REPOSET['rhst8'], + org_id=function_sca_manifest_org.id, + product=REPOS['rhst8']['product'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() @@ -1334,13 +1382,20 @@ def test_positive_remove_capsule_orphans( sync_status = capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success', 'Capsule sync task failed.' + # datetime string (local time) to search for proper task. + timestamp = (datetime.now().replace(microsecond=0) - timedelta(seconds=1)).strftime( + '%B %d, %Y at %I:%M:%S %p' + ) # Run orphan cleanup for the capsule. target_sat.execute( 'foreman-rake katello:delete_orphaned_content RAILS_ENV=production ' f'SMART_PROXY_ID={capsule_configured.nailgun_capsule.id}' ) target_sat.wait_for_tasks( - search_query=('label = Actions::Katello::OrphanCleanup::RemoveOrphans'), + search_query=( + 'label = Actions::Katello::OrphanCleanup::RemoveOrphans' + f' and started_at >= "{timestamp}"' + ), search_rate=5, max_tries=10, ) @@ -1391,7 +1446,7 @@ def test_positive_capsule_sync_openstack_container_repos( content_type='docker', docker_upstream_name=ups_name, product=function_product, - url=constants.RH_CONTAINER_REGISTRY_HUB, + url=RH_CONTAINER_REGISTRY_HUB, upstream_username=settings.subscription.rhn_username, upstream_password=settings.subscription.rhn_password, ).create() @@ -1414,12 +1469,13 @@ def test_positive_capsule_sync_openstack_container_repos( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - @pytest.mark.parametrize( 'repos_collection', [ @@ -1595,3 +1651,67 @@ def test_positive_content_counts_blank_update( assert ( counts is None or len(counts['content_view_versions']) == 0 ), f"No content counts expected, but got:\n{counts['content_view_versions']}." + + def test_positive_read_with_non_admin_user( + self, + target_sat, + module_capsule_configured, + default_org, + default_non_admin_user, + ): + """Try to list and read Capsules with a non-admin user with and without permissions. + + :id: f3ee19fa-9b91-4b49-b00a-8debee903ce6 + + :setup: + 1. Satellite with registered external Capsule. + 2. Non-admin user without any roles/permissions. + + :steps: + 1. Using the non-admin user try to list all or particular Capsule. + 2. Add Viewer role to the user and try again. + + :expectedresults: + 1. Read should fail without Viewer role. + 2. Read should succeed when Viewer role added. + + :BZ: 2096930 + + :customerscenario: true + """ + # Using the non-admin user try to list all or particular Capsule + user = default_non_admin_user + sc = ServerConfig( + auth=(user.login, user.password), + url=target_sat.url, + verify=settings.server.verify_ca, + ) + + with pytest.raises(HTTPError) as error: + target_sat.api.Capsule(server_config=sc).search() + assert error.value.response.status_code == 403 + assert 'Access denied' in error.value.response.text + + with pytest.raises(HTTPError) as error: + target_sat.api.Capsule( + server_config=sc, id=module_capsule_configured.nailgun_capsule.id + ).read() + assert error.value.response.status_code == 403 + assert 'Access denied' in error.value.response.text + + # Add Viewer role to the user and try again. + v_role = target_sat.api.Role().search(query={'search': 'name="Viewer"'}) + assert len(v_role) == 1, 'Expected just one Viewer to be found.' + user.role = [v_role[0]] + user.update(['role']) + + res = target_sat.api.Capsule(server_config=sc).search() + assert len(res) >= 2, 'Expected at least one internal and one or more external Capsule(s).' + assert {target_sat.hostname, module_capsule_configured.hostname}.issubset( + [caps.name for caps in res] + ), 'Internal and/or external Capsule was not listed.' + + res = target_sat.api.Capsule( + server_config=sc, id=module_capsule_configured.nailgun_capsule.id + ).read() + assert res.name == module_capsule_configured.hostname, 'External Capsule not found.' diff --git a/tests/foreman/api/test_computeresource_libvirt.py b/tests/foreman/api/test_computeresource_libvirt.py index 5189af9d445..ad95fb9f202 100644 --- a/tests/foreman/api/test_computeresource_libvirt.py +++ b/tests/foreman/api/test_computeresource_libvirt.py @@ -113,9 +113,9 @@ def test_positive_create_with_name_description( location=[module_location], url=LIBVIRT_URL, ).create() + request.addfinalizer(compresource.delete) assert compresource.name == name assert compresource.description == name - request.addfinalizer(compresource.delete) @pytest.mark.tier2 @@ -134,9 +134,9 @@ def test_positive_create_with_orgs_and_locs(request, module_target_sat): compresource = module_target_sat.api.LibvirtComputeResource( location=locs, organization=orgs, url=LIBVIRT_URL ).create() + request.addfinalizer(compresource.delete) assert {org.name for org in orgs} == {org.read().name for org in compresource.organization} assert {loc.name for loc in locs} == {loc.read().name for loc in compresource.location} - request.addfinalizer(compresource.delete) @pytest.mark.tier2 @@ -175,8 +175,8 @@ def test_negative_create_with_same_name(request, module_target_sat, module_org, cr = module_target_sat.api.LibvirtComputeResource( location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() - assert cr.name == name request.addfinalizer(cr.delete) + assert cr.name == name with pytest.raises(HTTPError): module_target_sat.api.LibvirtComputeResource( name=name, @@ -245,19 +245,16 @@ def test_negative_update_same_name(request, module_target_sat, module_org, modul compresource = module_target_sat.api.LibvirtComputeResource( location=[module_location], name=name, organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(compresource.delete) new_compresource = module_target_sat.api.LibvirtComputeResource( location=[module_location], organization=[module_org], url=LIBVIRT_URL ).create() + request.addfinalizer(new_compresource.delete) new_compresource.name = name with pytest.raises(HTTPError): new_compresource.update(['name']) assert new_compresource.read().name != name - @request.addfinalizer - def _finalize(): - compresource.delete() - new_compresource.delete() - @pytest.mark.tier2 @pytest.mark.parametrize('url', **parametrized({'random': gen_string('alpha'), 'empty': ''})) diff --git a/tests/foreman/api/test_discoveryrule.py b/tests/foreman/api/test_discoveryrule.py index f5dffd5be14..3d1c1f4bf48 100644 --- a/tests/foreman/api/test_discoveryrule.py +++ b/tests/foreman/api/test_discoveryrule.py @@ -171,8 +171,11 @@ def test_positive_multi_provision_with_rule_limit( :CaseImportance: High """ + discovered_host1 = module_target_sat.api_factory.create_discovered_host() + request.addfinalizer(module_target_sat.api.Host(id=discovered_host1['id']).delete) discovered_host2 = module_target_sat.api_factory.create_discovered_host() + request.addfinalizer(module_target_sat.api.DiscoveredHost(id=discovered_host2['id']).delete) rule = module_target_sat.api.DiscoveryRule( max_count=1, hostgroup=module_discovery_hostgroup, @@ -181,14 +184,6 @@ def test_positive_multi_provision_with_rule_limit( organization=[discovery_org], priority=1000, ).create() + request.addfinalizer(rule.delete) result = module_target_sat.api.DiscoveredHost().auto_provision_all() assert '1 discovered hosts were provisioned' in result['message'] - - # Delete discovery rule - @request.addfinalizer - def _finalize(): - rule.delete() - module_target_sat.api.Host(id=discovered_host1['id']).delete() - module_target_sat.api.DiscoveredHost(id=discovered_host2['id']).delete() - with pytest.raises(HTTPError): - rule.read() diff --git a/tests/foreman/api/test_http_proxy.py b/tests/foreman/api/test_http_proxy.py index 50012a0ab70..1bd00051e67 100644 --- a/tests/foreman/api/test_http_proxy.py +++ b/tests/foreman/api/test_http_proxy.py @@ -303,6 +303,11 @@ def test_positive_sync_proxy_with_certificate(request, target_sat, module_org, m :customerscenario: true """ + + @request.addfinalizer + def _finalize(): + target_sat.custom_certs_cleanup() + # Cleanup any existing certs that may conflict target_sat.custom_certs_cleanup() proxy_host = settings.http_proxy.auth_proxy_url.replace('http://', '').replace(':3128', '') diff --git a/tests/foreman/api/test_parameters.py b/tests/foreman/api/test_parameters.py index 1e415d15dc8..5d70374a546 100644 --- a/tests/foreman/api/test_parameters.py +++ b/tests/foreman/api/test_parameters.py @@ -39,7 +39,9 @@ def test_positive_parameter_precedence_impact( param_value = gen_string('alpha') cp = module_target_sat.api.CommonParameter(name=param_name, value=param_value).create() + request.addfinalizer(cp.delete) host = module_target_sat.api.Host(organization=module_org, location=module_location).create() + request.addfinalizer(host.delete) result = [res for res in host.all_parameters if res['name'] == param_name] assert result[0]['name'] == param_name assert result[0]['associated_type'] == 'global' @@ -48,6 +50,7 @@ def test_positive_parameter_precedence_impact( organization=[module_org], group_parameters_attributes=[{'name': param_name, 'value': param_value}], ).create() + request.addfinalizer(hg.delete) host.hostgroup = hg host = host.update(['hostgroup']) result = [res for res in host.all_parameters if res['name'] == param_name] @@ -55,12 +58,6 @@ def test_positive_parameter_precedence_impact( assert result[0]['associated_type'] != 'global' assert result[0]['associated_type'] == 'host group' - @request.addfinalizer - def _finalize(): - host.delete() - hg.delete() - cp.delete() - host.host_parameters_attributes = [{'name': param_name, 'value': param_value}] host = host.update(['host_parameters_attributes']) result = [res for res in host.all_parameters if res['name'] == param_name] diff --git a/tests/foreman/api/test_repository.py b/tests/foreman/api/test_repository.py index 206dddb976f..728249f097c 100644 --- a/tests/foreman/api/test_repository.py +++ b/tests/foreman/api/test_repository.py @@ -2295,13 +2295,14 @@ def test_positive_upload_file_to_file_repo(self, repo, target_sat): :CaseAutomation: Automated """ - repo.upload_content(files={'content': DataFile.RPM_TO_UPLOAD.read_bytes()}) + with open(DataFile.FAKE_FILE_NEW_NAME, 'rb') as handle: + repo.upload_content(files={'content': handle}) assert repo.read().content_counts['file'] == 1 filesearch = target_sat.api.File().search( - query={"search": f"name={constants.RPM_TO_UPLOAD}"} + query={"search": f"name={constants.FAKE_FILE_NEW_NAME}"} ) - assert filesearch[0].name == constants.RPM_TO_UPLOAD + assert filesearch[0].name == constants.FAKE_FILE_NEW_NAME @pytest.mark.tier1 @pytest.mark.upgrade diff --git a/tests/foreman/cli/test_classparameters.py b/tests/foreman/cli/test_classparameters.py index a4e75c1f76d..022ceed0aac 100644 --- a/tests/foreman/cli/test_classparameters.py +++ b/tests/foreman/cli/test_classparameters.py @@ -86,8 +86,8 @@ def test_positive_list( location=module_puppet_loc.id, environment=module_puppet['env'].name, ).create() - host.add_puppetclass(data={'puppetclass_id': module_puppet['class']['id']}) request.addfinalizer(host.delete) + host.add_puppetclass(data={'puppetclass_id': module_puppet['class']['id']}) hostgroup = session_puppet_enabled_sat.cli_factory.hostgroup( { 'puppet-environment-id': module_puppet['env'].id, diff --git a/tests/foreman/cli/test_computeresource_osp.py b/tests/foreman/cli/test_computeresource_osp.py index d88daaf1ad8..ff6779056ae 100644 --- a/tests/foreman/cli/test_computeresource_osp.py +++ b/tests/foreman/cli/test_computeresource_osp.py @@ -76,6 +76,7 @@ def test_crud_and_duplicate_name(self, request, id_type, osp_version, target_sat 'url': osp_version, } ) + request.addfinalizer(lambda: self.cr_cleanup(compute_resource['id'], id_type, target_sat)) assert compute_resource['name'] == name assert target_sat.cli.ComputeResource.exists(search=(id_type, compute_resource[id_type])) @@ -102,7 +103,6 @@ def test_crud_and_duplicate_name(self, request, id_type, osp_version, target_sat else: compute_resource = target_sat.cli.ComputeResource.info({'id': compute_resource['id']}) assert new_name == compute_resource['name'] - request.addfinalizer(lambda: self.cr_cleanup(compute_resource['id'], id_type, target_sat)) @pytest.mark.tier3 def test_negative_create_osp_with_url(self, target_sat): diff --git a/tests/foreman/cli/test_contentview.py b/tests/foreman/cli/test_contentview.py index 9b261f57218..82f12ca1faa 100644 --- a/tests/foreman/cli/test_contentview.py +++ b/tests/foreman/cli/test_contentview.py @@ -4065,6 +4065,75 @@ def test_version_info_by_lce(self, module_org, module_target_sat): ) assert content_view['version'] == '1.0' + def test_positive_validate_force_promote_warning(self, target_sat, function_org): + """Test cv promote shows warning of 'force promotion' for out of sequence LCE + + :id: 1bfb76be-ab40-48b4-b5a3-428a2a9ab99b + + :steps: + 1. Create an environment path ex- Library >> Test >> Preprod >> Prod + 2. Create a CV and publish into the Library + 3. Promote version 1.0 to Preprod, skip Test, this should fail with warning + 4. Promote version 1.0 to Preprod using force, this should success + 5. Try to promote version 1.0 from Preprod to Prod, this should success without warning + + :expectedresults: + 1. CV version 1.0 should be present on Prod LCE + + :CaseImportance: High + + :BZ: 2125728 + + :customerscenario: true + """ + # Create an environment path ex- Library >> Test >> Preprod >> Prod + lce_test = target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': function_org.id} + ) + lce_preprod = target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': function_org.id, 'prior': lce_test['name']} + ) + lce_prod = target_sat.cli_factory.make_lifecycle_environment( + {'organization-id': function_org.id, 'prior': lce_preprod['name']} + ) + + # Create a CV and publish into the Library + cv = target_sat.cli_factory.make_content_view({'organization-id': function_org.id}) + target_sat.cli.ContentView.publish({'id': cv['id']}) + + # Promote version 1.0 to Preprod, skip Test, this should fail with warning + cv_version = target_sat.cli.ContentView.info({'id': cv['id']})['versions'][0] + with pytest.raises(CLIReturnCodeError) as error: + target_sat.cli.ContentView.version_promote( + {'id': cv_version['id'], 'to-lifecycle-environment-id': lce_preprod['id']} + ) + assert ( + 'Cannot promote environment out of sequence. Use force to bypass restriction' + in error.value.stderr + ) + + # Promote version 1.0 to Preprod using force, this should success + target_sat.cli.ContentView.version_promote( + { + 'id': cv_version['id'], + 'to-lifecycle-environment-id': lce_preprod['id'], + 'force': True, + } + ) + promoted_lce = target_sat.cli.ContentView.info({'id': cv['id']})['lifecycle-environments'][ + -1 + ] + assert lce_preprod['id'] == promoted_lce['id'] + + # Try to promote version 1.0 from Preprod to Prod, this should success without warning + target_sat.cli.ContentView.version_promote( + {'id': cv_version['id'], 'to-lifecycle-environment-id': lce_prod['id']} + ) + promoted_lce = target_sat.cli.ContentView.info({'id': cv['id']})['lifecycle-environments'][ + -1 + ] + assert lce_prod['id'] == promoted_lce['id'] + class TestContentViewFileRepo: """Specific tests for Content Views with File Repositories containing diff --git a/tests/foreman/cli/test_discoveredhost.py b/tests/foreman/cli/test_discoveredhost.py index 2cd1297224e..e578f35e1ac 100644 --- a/tests/foreman/cli/test_discoveredhost.py +++ b/tests/foreman/cli/test_discoveredhost.py @@ -70,10 +70,8 @@ def test_rhel_pxe_discovery_provisioning( assert 'Host created' in result[0]['message'] host = sat.api.Host().search(query={"search": f'id={discovered_host.id}'})[0] - assert host - - # teardown request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + assert host wait_for( lambda: host.read().build_status_label != 'Pending installation', @@ -131,10 +129,8 @@ def test_rhel_pxeless_discovery_provisioning( ) assert 'Host created' in result[0]['message'] host = sat.api.Host().search(query={"search": f'id={discovered_host.id}'})[0] - assert host - - # teardown request.addfinalizer(lambda: sat.provisioning_cleanup(host.name)) + assert host wait_for( lambda: host.read().build_status_label != 'Pending installation', diff --git a/tests/foreman/cli/test_errata.py b/tests/foreman/cli/test_errata.py index 95bdbc3d256..4af3d9be2d3 100644 --- a/tests/foreman/cli/test_errata.py +++ b/tests/foreman/cli/test_errata.py @@ -688,16 +688,11 @@ def test_positive_list_affected_chosts_by_erratum_restrict_flag( 'inclusion': 'false', } ) - - @request.addfinalizer - def cleanup(): - cv_filter_cleanup( - target_sat, - cv_filter['filter-id'], - module_cv, - module_sca_manifest_org, - module_lce, + request.addfinalizer( + lambda: cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_sca_manifest_org, module_lce ) + ) # Make rule to hide the RPM that creates the need for the installable erratum target_sat.cli_factory.content_view_filter_rule( @@ -861,17 +856,11 @@ def test_host_errata_search_commands( 'inclusion': 'false', } ) - - @request.addfinalizer - def cleanup(): - cv_filter_cleanup( - target_sat, - cv_filter['filter-id'], - module_cv, - module_sca_manifest_org, - module_lce, + request.addfinalizer( + lambda: cv_filter_cleanup( + target_sat, cv_filter['filter-id'], module_cv, module_sca_manifest_org, module_lce ) - + ) # Make rule to exclude the specified bugfix package target_sat.cli_factory.content_view_filter_rule( { diff --git a/tests/foreman/cli/test_hammer.py b/tests/foreman/cli/test_hammer.py index c795352c5ed..aa3bfcf4aeb 100644 --- a/tests/foreman/cli/test_hammer.py +++ b/tests/foreman/cli/test_hammer.py @@ -136,6 +136,13 @@ def test_positive_disable_hammer_defaults(request, function_product, target_sat) :BZ: 1640644, 1368173 """ + + @request.addfinalizer + def _finalize(): + target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) + result = target_sat.execute('hammer defaults list') + assert str(function_product.organization.id) not in result.stdout + target_sat.cli.Defaults.add( {'param-name': 'organization_id', 'param-value': function_product.organization.id} ) @@ -154,12 +161,6 @@ def test_positive_disable_hammer_defaults(request, function_product, target_sat) assert result.status == 0 assert function_product.name in result.stdout - @request.addfinalizer - def _finalize(): - target_sat.cli.Defaults.delete({'param-name': 'organization_id'}) - result = target_sat.execute('hammer defaults list') - assert str(function_product.organization.id) not in result.stdout - @pytest.mark.upgrade def test_positive_check_debug_log_levels(target_sat): diff --git a/tests/foreman/cli/test_ldapauthsource.py b/tests/foreman/cli/test_ldapauthsource.py index ce7b625c3a6..e3cb8bb32ab 100644 --- a/tests/foreman/cli/test_ldapauthsource.py +++ b/tests/foreman/cli/test_ldapauthsource.py @@ -134,7 +134,7 @@ def test_positive_refresh_usergroup_with_ad(self, member_group, ad_data, module_ result = module_target_sat.cli.Auth.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).status() - assert LOGEDIN_MSG.format(ad_data['ldap_user_name']) in result[0]['message'] + assert LOGEDIN_MSG.format(ad_data['ldap_user_name']) in result.split("\n")[1] module_target_sat.cli.UserGroupExternal.refresh( {'user-group-id': user_group['id'], 'name': member_group} ) diff --git a/tests/foreman/cli/test_satellitesync.py b/tests/foreman/cli/test_satellitesync.py index 1f177a2b053..90dcb18c6d4 100644 --- a/tests/foreman/cli/test_satellitesync.py +++ b/tests/foreman/cli/test_satellitesync.py @@ -2272,6 +2272,7 @@ def test_positive_custom_cdn_with_credential( meta_file = 'metadata.json' crt_file = 'source.crt' pub_dir = '/var/www/html/pub/repos' + request.addfinalizer(lambda: target_sat.execute(f'rm -rf {pub_dir}')) # Export the repository in syncable format and move it # to /var/www/html/pub/repos to mimic custom CDN. @@ -2288,7 +2289,6 @@ def test_positive_custom_cdn_with_credential( exp_dir = exp_dir[0].replace(meta_file, '') assert target_sat.execute(f'mv {exp_dir} {pub_dir}').status == 0 - request.addfinalizer(lambda: target_sat.execute(f'rm -rf {pub_dir}')) target_sat.execute(f'semanage fcontext -a -t httpd_sys_content_t "{pub_dir}(/.*)?"') target_sat.execute(f'restorecon -R {pub_dir}') diff --git a/tests/foreman/cli/test_subnet.py b/tests/foreman/cli/test_subnet.py index 12272ed57c2..74a6e1727fc 100644 --- a/tests/foreman/cli/test_subnet.py +++ b/tests/foreman/cli/test_subnet.py @@ -199,8 +199,8 @@ def test_negative_update_attributes(request, options, module_target_sat): :CaseImportance: Medium """ subnet = module_target_sat.cli_factory.make_subnet() - options['id'] = subnet['id'] request.addfinalizer(lambda: module_target_sat.cli.Subnet.delete({'id': subnet['id']})) + options['id'] = subnet['id'] with pytest.raises(CLIReturnCodeError, match='Could not update the subnet:'): module_target_sat.cli.Subnet.update(options) # check - subnet is not updated @@ -223,8 +223,8 @@ def test_negative_update_address_pool(request, options, module_target_sat): :CaseImportance: Medium """ subnet = module_target_sat.cli_factory.make_subnet() - opts = {'id': subnet['id']} request.addfinalizer(lambda: module_target_sat.cli.Subnet.delete({'id': subnet['id']})) + opts = {'id': subnet['id']} # generate pool range from network address for key, val in options.items(): opts[key] = re.sub(r'\d+$', str(val), subnet['network-addr']) diff --git a/tests/foreman/conftest.py b/tests/foreman/conftest.py index e867e118be3..339eb6016f9 100644 --- a/tests/foreman/conftest.py +++ b/tests/foreman/conftest.py @@ -30,6 +30,8 @@ def pytest_collection_modifyitems(session, items, config): deselected_items = [] for item in items: + if any("manifest" in f for f in getattr(item, "fixturenames", ())): + item.add_marker("manifester") # 1. Deselect tests marked with @pytest.mark.deselect # WONTFIX BZs makes test to be dynamically marked as deselect. deselect = item.get_closest_marker('deselect') diff --git a/tests/foreman/destructive/test_capsule_loadbalancer.py b/tests/foreman/destructive/test_capsule_loadbalancer.py index 67bc0762c11..3949d2bf71a 100644 --- a/tests/foreman/destructive/test_capsule_loadbalancer.py +++ b/tests/foreman/destructive/test_capsule_loadbalancer.py @@ -191,6 +191,7 @@ def test_loadbalancer_install_package( registration. """ + # Register content host result = rhel7_contenthost.register( org=module_org, @@ -219,6 +220,9 @@ def test_loadbalancer_install_package( if loadbalancer_setup['setup_capsules']['capsule_1'].hostname in result.stdout else loadbalancer_setup['setup_capsules']['capsule_2'] ) + request.addfinalizer( + lambda: registered_to_capsule.power_control(state=VmState.RUNNING, ensure=True) + ) # Remove the packages from the client result = rhel7_contenthost.execute('yum remove -y tree') @@ -231,10 +235,6 @@ def test_loadbalancer_install_package( result = rhel7_contenthost.execute('yum install -y tree') assert result.status == 0 - @request.addfinalizer - def _finalize(): - registered_to_capsule.power_control(state=VmState.RUNNING, ensure=True) - @pytest.mark.rhel_ver_match('[^6]') @pytest.mark.tier1 diff --git a/tests/foreman/installer/test_installer.py b/tests/foreman/installer/test_installer.py index 6c038351914..da64593642e 100644 --- a/tests/foreman/installer/test_installer.py +++ b/tests/foreman/installer/test_installer.py @@ -1381,8 +1381,7 @@ def sat_default_install(module_sat_ready_rhels): f'foreman-initial-admin-password {settings.server.admin_password}', ] install_satellite(module_sat_ready_rhels[0], installer_args) - yield module_sat_ready_rhels[0] - common_sat_install_assertions(module_sat_ready_rhels[0]) + return module_sat_ready_rhels[0] @pytest.fixture(scope='module') @@ -1395,8 +1394,7 @@ def sat_non_default_install(module_sat_ready_rhels): 'foreman-proxy-content-pulpcore-hide-guarded-distributions false', ] install_satellite(module_sat_ready_rhels[1], installer_args, enable_fapolicyd=True) - yield module_sat_ready_rhels[1] - common_sat_install_assertions(module_sat_ready_rhels[1]) + return module_sat_ready_rhels[1] @pytest.mark.e2e diff --git a/tests/foreman/maintain/test_advanced.py b/tests/foreman/maintain/test_advanced.py index 6a66c9aefcf..68b00e5c071 100644 --- a/tests/foreman/maintain/test_advanced.py +++ b/tests/foreman/maintain/test_advanced.py @@ -75,6 +75,22 @@ def test_positive_advanced_run_hammer_setup(request, sat_maintain): :BZ: 1830355 """ + + @request.addfinalizer + def _finalize(): + result = sat_maintain.execute( + f'hammer -u admin -p admin user update --login admin --password {default_admin_pass}' + ) + assert result.status == 0 + # Make default admin creds available in MAINTAIN_HAMMER_YML + assert sat_maintain.cli.Advanced.run_hammer_setup().status == 0 + # Make sure default password available in MAINTAIN_HAMMER_YML + result = sat_maintain.execute( + f"grep -i ':password: {default_admin_pass}' {MAINTAIN_HAMMER_YML}" + ) + assert result.status == 0 + assert default_admin_pass in result.stdout + default_admin_pass = settings.server.admin_password result = sat_maintain.execute( f'hammer -u admin -p {default_admin_pass} user update --login admin --password admin' @@ -100,21 +116,6 @@ def test_positive_advanced_run_hammer_setup(request, sat_maintain): assert result.status == 0 assert 'admin' in result.stdout - @request.addfinalizer - def _finalize(): - result = sat_maintain.execute( - f'hammer -u admin -p admin user update --login admin --password {default_admin_pass}' - ) - assert result.status == 0 - # Make default admin creds available in MAINTAIN_HAMMER_YML - assert sat_maintain.cli.Advanced.run_hammer_setup().status == 0 - # Make sure default password available in MAINTAIN_HAMMER_YML - result = sat_maintain.execute( - f"grep -i ':password: {default_admin_pass}' {MAINTAIN_HAMMER_YML}" - ) - assert result.status == 0 - assert default_admin_pass in result.stdout - @pytest.mark.e2e @pytest.mark.upgrade @@ -131,6 +132,12 @@ def test_positive_advanced_run_packages(request, sat_maintain): :expectedresults: packages should install/downgrade/check-update/update. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') + # Setup custom_repo and install walrus package sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) result = sat_maintain.cli.Advanced.run_packages_install( @@ -160,11 +167,6 @@ def test_positive_advanced_run_packages(request, sat_maintain): assert result.status == 0 assert 'walrus-5.21-1' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') - @pytest.mark.parametrize( 'tasks_state', @@ -250,6 +252,7 @@ def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_o :customerscenario: true """ + sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': module_org.id}) sync_plans = [] @@ -258,16 +261,6 @@ def test_positive_sync_plan_with_hammer_defaults(request, sat_maintain, module_o sat_maintain.api.SyncPlan(enabled=True, name=name, organization=module_org).create() ) - result = sat_maintain.cli.Advanced.run_sync_plans_disable() - assert 'FAIL' not in result.stdout - assert result.status == 0 - - sync_plans[0].delete() - - result = sat_maintain.cli.Advanced.run_sync_plans_enable() - assert 'FAIL' not in result.stdout - assert result.status == 0 - @request.addfinalizer def _finalize(): sat_maintain.cli.Defaults.delete({'param-name': 'organization_id'}) @@ -278,6 +271,16 @@ def _finalize(): if sync_plan: sync_plans[0].delete() + result = sat_maintain.cli.Advanced.run_sync_plans_disable() + assert 'FAIL' not in result.stdout + assert result.status == 0 + + sync_plans[0].delete() + + result = sat_maintain.cli.Advanced.run_sync_plans_enable() + assert 'FAIL' not in result.stdout + assert result.status == 0 + @pytest.mark.e2e def test_positive_satellite_repositories_setup(sat_maintain): diff --git a/tests/foreman/maintain/test_health.py b/tests/foreman/maintain/test_health.py index 9da62960142..9ba117f72cf 100644 --- a/tests/foreman/maintain/test_health.py +++ b/tests/foreman/maintain/test_health.py @@ -201,6 +201,13 @@ def test_negative_health_check_upstream_repository(sat_maintain, request): :expectedresults: check-upstream-repository health check should fail. """ + + @request.addfinalizer + def _finalize(): + for name in upstream_url: + sat_maintain.execute(f'rm -fr /etc/yum.repos.d/{name}.repo') + sat_maintain.execute('dnf clean all') + for name, url in upstream_url.items(): sat_maintain.create_custom_repos(**{name: url}) result = sat_maintain.cli.Health.check( @@ -216,12 +223,6 @@ def test_negative_health_check_upstream_repository(sat_maintain, request): elif name in ['foreman_repo', 'puppet_repo']: assert 'enabled=0' in result.stdout - @request.addfinalizer - def _finalize(): - for name in upstream_url: - sat_maintain.execute(f'rm -fr /etc/yum.repos.d/{name}.repo') - sat_maintain.execute('dnf clean all') - def test_positive_health_check_available_space(sat_maintain): """Verify available-space check @@ -260,15 +261,16 @@ def test_positive_hammer_defaults_set(sat_maintain, request): :customerscenario: true """ - sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': 1}) - result = sat_maintain.cli.Health.check(options={'assumeyes': True}) - assert result.status == 0 - assert 'FAIL' not in result.stdout @request.addfinalizer def _finalize(): sat_maintain.cli.Defaults.delete({'param-name': 'organization_id'}) + sat_maintain.cli.Defaults.add({'param-name': 'organization_id', 'param-value': 1}) + result = sat_maintain.cli.Health.check(options={'assumeyes': True}) + assert result.status == 0 + assert 'FAIL' not in result.stdout + @pytest.mark.include_capsule def test_positive_health_check_hotfix_installed(sat_maintain, request): @@ -288,6 +290,13 @@ def test_positive_health_check_hotfix_installed(sat_maintain, request): :expectedresults: check-hotfix-installed check should detect modified file and installed hotfix. """ + + @request.addfinalizer + def _finalize(): + sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo') + sat_maintain.execute('dnf remove -y hotfix-package') + assert sat_maintain.execute(f'sed -i "/#modifying_file/d" {fpath.stdout}').status == 0 + # Verify check-hotfix-installed without hotfix package. result = sat_maintain.cli.Health.check(options={'label': 'check-hotfix-installed'}) assert result.status == 0 @@ -307,12 +316,6 @@ def test_positive_health_check_hotfix_installed(sat_maintain, request): assert 'WARNING' in result.stdout assert 'hotfix-package' in result.stdout - @request.addfinalizer - def _finalize(): - sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo') - sat_maintain.execute('dnf remove -y hotfix-package') - assert sat_maintain.execute(f'sed -i "/#modifying_file/d" {fpath.stdout}').status == 0 - @pytest.mark.include_capsule def test_positive_health_check_validate_dnf_config(sat_maintain): @@ -365,6 +368,11 @@ def test_negative_health_check_epel_repository(request, sat_maintain): :expectedresults: check-non-redhat-repository health check should fail. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y epel-release').status == 0 + epel_repo = 'https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm' sat_maintain.execute(f'dnf install -y {epel_repo}') result = sat_maintain.cli.Health.check(options={'label': 'check-non-redhat-repository'}) @@ -372,10 +380,6 @@ def test_negative_health_check_epel_repository(request, sat_maintain): assert result.status == 1 assert 'FAIL' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y epel-release').status == 0 - def test_positive_health_check_old_foreman_tasks(sat_maintain): """Verify check-old-foreman-tasks. @@ -471,6 +475,14 @@ def test_positive_health_check_tftp_storage(sat_maintain, request): :expectedresults: check-tftp-storage health check should pass. """ + + @request.addfinalizer + def _finalize(): + sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '360'}) + assert ( + sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '360' + ) + sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '2'}) assert sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '2' files_to_delete = [ @@ -504,13 +516,6 @@ def test_positive_health_check_tftp_storage(sat_maintain, request): assert result.status == 0 assert 'FAIL' not in result.stdout - @request.addfinalizer - def _finalize(): - sat_maintain.cli.Settings.set({'name': 'token_duration', 'value': '360'}) - assert ( - sat_maintain.cli.Settings.list({'search': 'name=token_duration'})[0]['value'] == '360' - ) - @pytest.mark.include_capsule def test_positive_health_check_env_proxy(sat_maintain): @@ -661,10 +666,20 @@ def test_positive_health_check_corrupted_roles(sat_maintain, request): :BZ: 1703041, 1908846 """ - # Check the filter created to verify the role, resource type, and permissions assigned. role_name = 'test_role' resource_type = gen_string("alpha") sat_maintain.cli.Role.create(options={'name': role_name}) + + @request.addfinalizer + def _finalize(): + resource_type = r"'\''Host'\''" + sat_maintain.execute( + f'''sudo su - postgres -c "psql -d foreman -c 'UPDATE permissions SET + resource_type = {resource_type} WHERE name = {permission_name};'"''' + ) + sat_maintain.cli.Role.delete(options={'name': role_name}) + + # Check the filter created to verify the role, resource type, and permissions assigned. sat_maintain.cli.Filter.create( options={'role': role_name, 'permissions': ['view_hosts', 'console_hosts']} ) @@ -686,15 +701,6 @@ def test_positive_health_check_corrupted_roles(sat_maintain, request): result = sat_maintain.cli.Filter.list(options={'search': role_name}, output_format='yaml') assert result.count('Id') == 4 - @request.addfinalizer - def _finalize(): - resource_type = r"'\''Host'\''" - sat_maintain.execute( - f'''sudo su - postgres -c "psql -d foreman -c 'UPDATE permissions SET - resource_type = {resource_type} WHERE name = {permission_name};'"''' - ) - sat_maintain.cli.Role.delete(options={'name': role_name}) - @pytest.mark.include_capsule def test_positive_health_check_non_rh_packages(sat_maintain, request): @@ -719,6 +725,12 @@ def test_positive_health_check_non_rh_packages(sat_maintain, request): :CaseImportance: High """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + assert sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo').status == 0 + sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) assert ( sat_maintain.cli.Packages.install(packages='walrus', options={'assumeyes': True}).status @@ -730,11 +742,6 @@ def test_positive_health_check_non_rh_packages(sat_maintain, request): assert result.status == 78 assert 'WARNING' in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - assert sat_maintain.execute('rm -fr /etc/yum.repos.d/custom_repo.repo').status == 0 - def test_positive_health_check_duplicate_permissions(sat_maintain): """Verify duplicate-permissions check diff --git a/tests/foreman/maintain/test_maintenance_mode.py b/tests/foreman/maintain/test_maintenance_mode.py index 50d130ac55a..b8d5024e1b1 100644 --- a/tests/foreman/maintain/test_maintenance_mode.py +++ b/tests/foreman/maintain/test_maintenance_mode.py @@ -47,6 +47,11 @@ def test_positive_maintenance_mode(request, sat_maintain, setup_sync_plan): to disable/enable sync-plan, stop/start crond.service and is able to add FOREMAN_MAINTAIN_TABLE rule in nftables. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.cli.MaintenanceMode.stop().status == 0 + enable_sync_ids = setup_sync_plan data_yml_path = '/var/lib/foreman-maintain/data.yml' local_data_yml_path = f'{robottelo_tmp_dir}/data.yml' @@ -142,7 +147,3 @@ def test_positive_maintenance_mode(request, sat_maintain, setup_sync_plan): assert 'OK' in result.stdout assert result.status == 1 assert 'Maintenance mode is Off' in result.stdout - - @request.addfinalizer - def _finalize(): - assert sat_maintain.cli.MaintenanceMode.stop().status == 0 diff --git a/tests/foreman/maintain/test_packages.py b/tests/foreman/maintain/test_packages.py index a4c56eac531..f0ae22d6d10 100644 --- a/tests/foreman/maintain/test_packages.py +++ b/tests/foreman/maintain/test_packages.py @@ -160,6 +160,15 @@ def test_positive_fm_packages_install(request, sat_maintain): :expectedresults: Packages get install/update when lock/unlocked. """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y zsh').status == 0 + if sat_maintain.__class__.__name__ == 'Satellite': + result = sat_maintain.install(InstallerCommand('lock-package-versions')) + assert result.status == 0 + assert 'Success!' in result.stdout + # Test whether packages are locked or not result = sat_maintain.install(InstallerCommand('lock-package-versions')) assert result.status == 0 @@ -216,14 +225,6 @@ def test_positive_fm_packages_install(request, sat_maintain): assert result.status == 0 assert 'Use foreman-maintain packages install/update ' not in result.stdout - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y zsh').status == 0 - if sat_maintain.__class__.__name__ == 'Satellite': - result = sat_maintain.install(InstallerCommand('lock-package-versions')) - assert result.status == 0 - assert 'Success!' in result.stdout - @pytest.mark.include_capsule def test_positive_fm_packages_update(request, sat_maintain): @@ -244,6 +245,12 @@ def test_positive_fm_packages_update(request, sat_maintain): :customerscenario: true """ + + @request.addfinalizer + def _finalize(): + assert sat_maintain.execute('dnf remove -y walrus').status == 0 + sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') + # Setup custom_repo and packages update sat_maintain.create_custom_repos(custom_repo=settings.repos.yum_0.url) disableplugin = '--disableplugin=foreman-protector' @@ -263,8 +270,3 @@ def test_positive_fm_packages_update(request, sat_maintain): result = sat_maintain.execute('rpm -qa walrus') assert result.status == 0 assert 'walrus-5.21-1' in result.stdout - - @request.addfinalizer - def _finalize(): - assert sat_maintain.execute('dnf remove -y walrus').status == 0 - sat_maintain.execute('rm -rf /etc/yum.repos.d/custom_repo.repo') diff --git a/tests/foreman/sys/test_webpack.py b/tests/foreman/sys/test_webpack.py new file mode 100644 index 00000000000..c71e9016e97 --- /dev/null +++ b/tests/foreman/sys/test_webpack.py @@ -0,0 +1,30 @@ +"""Test class for Webpack + +:CaseAutomation: Automated + +:CaseComponent: Installation + +:Requirement: Installation + +:Team: Endeavour + +:CaseImportance: High + +""" +import pytest + + +@pytest.mark.tier2 +def test_positive_webpack5(target_sat): + """Check whether Webpack 5 was used at packaging time + + :id: b7f3fbb2-ef4b-4634-877f-b8ea10373e04 + + :expectedresults: There is a file "public/webpack/foreman_tasks/foreman_tasks_remoteEntry.js" when Webpack 5 has been used. It used to be "public/webpack/foreman-tasks-.js" before. + """ + assert ( + target_sat.execute( + "find /usr/share/gems | grep public/webpack/foreman_tasks/foreman_tasks_remoteEntry.js" + ).status + == 0 + ) diff --git a/tests/foreman/ui/test_ansible.py b/tests/foreman/ui/test_ansible.py index 82ffd5bf187..89eba62ff56 100644 --- a/tests/foreman/ui/test_ansible.py +++ b/tests/foreman/ui/test_ansible.py @@ -178,6 +178,13 @@ def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_cont :CaseComponent: Ansible-RemoteExecution """ + + @request.addfinalizer + def _finalize(): + result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) + assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] + target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') + SELECTED_ROLE = 'custom_role' playbook = f'{robottelo_tmp_dir}/playbook.yml' data = { @@ -231,12 +238,6 @@ def test_positive_ansible_custom_role(target_sat, session, module_org, rhel_cont session.configreport.delete(rhel_contenthost.hostname) assert len(session.configreport.read()['table']) == 0 - @request.addfinalizer - def _finalize(): - result = target_sat.cli.Ansible.roles_delete({'name': SELECTED_ROLE}) - assert f'Ansible role [{SELECTED_ROLE}] was deleted.' in result[0]['message'] - target_sat.execute('rm -rvf /etc/ansible/roles/custom_role') - @pytest.mark.tier2 def test_positive_host_role_information(target_sat, function_host): diff --git a/tests/foreman/ui/test_computeresource_gce.py b/tests/foreman/ui/test_computeresource_gce.py index 73f4fb11daf..a25edb1da44 100644 --- a/tests/foreman/ui/test_computeresource_gce.py +++ b/tests/foreman/ui/test_computeresource_gce.py @@ -161,8 +161,17 @@ def test_positive_gce_provision_end_to_end( :expectedresults: Host is provisioned successfully """ + name = f'test{gen_string("alpha", 4).lower()}' hostname = f'{name}.{gce_domain.name}' + + @request.addfinalizer + def _finalize(): + gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) + if gcehost: + gcehost[0].delete() + googleclient.disconnect() + gceapi_vmname = hostname.replace('.', '-') root_pwd = gen_string('alpha', 15) storage = [{'size': 20}] @@ -214,13 +223,6 @@ def test_positive_gce_provision_end_to_end( # 2.2 GCE Backend Assertions assert gceapi_vm.is_stopping or gceapi_vm.is_stopped - @request.addfinalizer - def _finalize(): - gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) - if gcehost: - gcehost[0].delete() - googleclient.disconnect() - @pytest.mark.tier4 @pytest.mark.upgrade @@ -247,6 +249,14 @@ def test_positive_gce_cloudinit_provision_end_to_end( """ name = f'test{gen_string("alpha", 4).lower()}' hostname = f'{name}.{gce_domain.name}' + + @request.addfinalizer + def _finalize(): + gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) + if gcehost: + gcehost[0].delete() + googleclient.disconnect() + gceapi_vmname = hostname.replace('.', '-') storage = [{'size': 20}] root_pwd = gen_string('alpha', random.choice([8, 15])) @@ -290,10 +300,3 @@ def test_positive_gce_cloudinit_provision_end_to_end( assert not sat_gce.api.Host().search(query={'search': f'name="{hostname}"'}) # 2.2 GCE Backend Assertions assert gceapi_vm.is_stopping or gceapi_vm.is_stopped - - @request.addfinalizer - def _finalize(): - gcehost = sat_gce.api.Host().search(query={'search': f'name={hostname}'}) - if gcehost: - gcehost[0].delete() - googleclient.disconnect() diff --git a/tests/foreman/ui/test_computeresource_libvirt.py b/tests/foreman/ui/test_computeresource_libvirt.py index 518e26817b8..42477b8d46a 100644 --- a/tests/foreman/ui/test_computeresource_libvirt.py +++ b/tests/foreman/ui/test_computeresource_libvirt.py @@ -171,10 +171,8 @@ def test_positive_provision_end_to_end( } ) name = f'{hostname}.{module_libvirt_provisioning_sat.domain.name}' - assert session.host.search(name)[0]['Name'] == name - - # teardown request.addfinalizer(lambda: sat.provisioning_cleanup(name)) + assert session.host.search(name)[0]['Name'] == name # Check on Libvirt, if VM exists result = sat.execute( diff --git a/tests/upgrades/test_contentview.py b/tests/upgrades/test_contentview.py index 9b7fa90bb56..2b581297b1e 100644 --- a/tests/upgrades/test_contentview.py +++ b/tests/upgrades/test_contentview.py @@ -87,6 +87,7 @@ def test_cv_postupgrade_scenario(self, request, target_sat, pre_upgrade_data): cv = target_sat.api.ContentView(organization=org.id).search( query={'search': f'name="{cv_name}"'} )[0] + request.addfinalizer(cv.delete) yum_repo = target_sat.api.Repository(organization=org.id).search( query={'search': f'name="{pre_test_name}_yum_repo"'} )[0] @@ -95,7 +96,6 @@ def test_cv_postupgrade_scenario(self, request, target_sat, pre_upgrade_data): query={'search': f'name="{pre_test_name}_file_repo"'} )[0] request.addfinalizer(file_repo.delete) - request.addfinalizer(cv.delete) cv.repository = [] cv.update(['repository']) assert len(cv.read_json()['repositories']) == 0 diff --git a/tests/upgrades/test_host.py b/tests/upgrades/test_host.py index 5d0e9fda247..b60585d63bd 100644 --- a/tests/upgrades/test_host.py +++ b/tests/upgrades/test_host.py @@ -161,6 +161,7 @@ def test_post_create_gce_cr_and_host( pre_upgrade_host = sat_gce.api.Host().search( query={'search': f'name={pre_upgrade_data.provision_host_name}'} )[0] + request.addfinalizer(pre_upgrade_host.delete) org = sat_gce.api.Organization(id=pre_upgrade_host.organization.id).read() loc = sat_gce.api.Location(id=pre_upgrade_host.location.id).read() domain = sat_gce.api.Domain(id=pre_upgrade_host.domain.id).read() @@ -185,7 +186,6 @@ def test_post_create_gce_cr_and_host( image=image, root_pass=gen_string('alphanumeric'), ).create() - request.addfinalizer(pre_upgrade_host.delete) request.addfinalizer(host.delete) assert host.name == f"{self.hostname.lower()}.{domain.name}" assert host.build_status_label == 'Installed' diff --git a/tests/upgrades/test_provisioningtemplate.py b/tests/upgrades/test_provisioningtemplate.py index 9d681a56ead..7c603c30a53 100644 --- a/tests/upgrades/test_provisioningtemplate.py +++ b/tests/upgrades/test_provisioningtemplate.py @@ -105,6 +105,7 @@ def test_post_scenario_provisioning_templates( pre_upgrade_host = module_target_sat.api.Host().search( query={'search': f'id={pre_upgrade_data.provision_host_id}'} )[0] + request.addfinalizer(pre_upgrade_host.delete) org = module_target_sat.api.Organization(id=pre_upgrade_host.organization.id).read() loc = module_target_sat.api.Location(id=pre_upgrade_host.location.id).read() domain = module_target_sat.api.Domain(id=pre_upgrade_host.domain.id).read() @@ -129,7 +130,6 @@ def test_post_scenario_provisioning_templates( root_pass=settings.provisioning.host_root_password, pxe_loader=pxe_loader, ).create() - request.addfinalizer(pre_upgrade_host.delete) request.addfinalizer(new_host.delete) for kind in provisioning_template_kinds: diff --git a/tests/upgrades/test_subscription.py b/tests/upgrades/test_subscription.py index 116d8956120..dc6d33b84b2 100644 --- a/tests/upgrades/test_subscription.py +++ b/tests/upgrades/test_subscription.py @@ -173,5 +173,5 @@ def test_post_subscription_scenario_auto_attach(self, request, target_sat, pre_u sub.delete_manifest(data={'organization_id': org.id}) assert len(sub.search()) == 0 manifester = Manifester(manifest_category=settings.manifest.entitlement) - manifester.allocation_uuid = pre_upgrade_data.allocation_uuid request.addfinalizer(manifester.delete_subscription_allocation) + manifester.allocation_uuid = pre_upgrade_data.allocation_uuid diff --git a/tests/upgrades/test_usergroup.py b/tests/upgrades/test_usergroup.py index 0832602dbb3..11ac95e2af8 100644 --- a/tests/upgrades/test_usergroup.py +++ b/tests/upgrades/test_usergroup.py @@ -102,16 +102,16 @@ def test_post_verify_user_group_membership( user_group = target_sat.api.UserGroup().search( query={'search': f'name={pre_upgrade_data["user_group_name"]}'} ) + request.addfinalizer(user_group[0].delete) auth_source = target_sat.api.AuthSourceLDAP().search( query={'search': f'name={pre_upgrade_data["auth_source_name"]}'} )[0] request.addfinalizer(auth_source.delete) - request.addfinalizer(user_group[0].delete) user = target_sat.api.User().search(query={'search': f'login={ad_data["ldap_user_name"]}'})[ 0 ] - assert user.read().id == user_group[0].read().user[0].id request.addfinalizer(user.delete) + assert user.read().id == user_group[0].read().user[0].id role_list = target_sat.cli.Role.with_user( username=ad_data['ldap_user_name'], password=ad_data['ldap_user_passwd'] ).list()