diff --git a/robottelo/host_helpers/capsule_mixins.py b/robottelo/host_helpers/capsule_mixins.py index 1f3769e0f22..f5162748769 100644 --- a/robottelo/host_helpers/capsule_mixins.py +++ b/robottelo/host_helpers/capsule_mixins.py @@ -1,6 +1,8 @@ -from datetime import datetime +from datetime import datetime, timedelta import time +from dateutil.parser import parse + from robottelo.constants import PUPPET_CAPSULE_INSTALLER, PUPPET_COMMON_INSTALLER_OPTS from robottelo.logging import logger from robottelo.utils.installer import InstallerCommand @@ -60,26 +62,78 @@ def wait_for_tasks( raise AssertionError(f"No task was found using query '{search_query}'") return tasks - def wait_for_sync(self, timeout=600, start_time=None): - """Wait for capsule sync to finish and assert the sync task succeeded""" - # Assert that a task to sync lifecycle environment to the capsule - # is started (or finished already) + def wait_for_sync(self, start_time=None, timeout=600): + """Wait for capsule sync to finish and assert success. + Assert that a task to sync lifecycle environment to the + capsule is started (or finished already), and succeeded. + :raises: ``AssertionError``: If a capsule sync verification fails based on the conditions. + + - Found some active sync task(s) for capsule, or it just finished (recent sync time). + - Any active sync task(s) polled, succeeded, and the capsule last_sync_time is updated. + - last_sync_time after final task is on or newer than start_time. + - The total sync time duration (seconds) is within timeout and not negative. + + :param start_time: (datetime): UTC time to compare against capsule's last_sync_time. + Default: None (current UTC). + :param timeout: (int) maximum seconds for active task(s) and queries to finish. + + :return: (list) of active tasks found from initial capsule sync status. + + """ + # Fetch initial capsule sync status + logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") + sync_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + # Current UTC time for start_time, if not provided if start_time is None: start_time = datetime.utcnow().replace(microsecond=0) - logger.info(f"Waiting for capsule {self.hostname} sync to finish ...") - sync_status = self.nailgun_capsule.content_get_sync() - logger.info(f"Active tasks {sync_status['active_sync_tasks']}") - assert ( - len(sync_status['active_sync_tasks']) - or datetime.strptime(sync_status['last_sync_time'], '%Y-%m-%d %H:%M:%S UTC') - >= start_time + # 1s margin of safety for rounding + start_time = ( + (start_time - timedelta(seconds=1)) + .replace(microsecond=0) + .strftime('%Y-%m-%d %H:%M:%S UTC') + ) + # Assert presence of recent sync activity: + # one or more ongoing sync tasks for the capsule, + # Or, capsule's last_sync_time is on or after start_time + assert len(sync_status['active_sync_tasks']) or ( + parse(sync_status['last_sync_time']) >= parse(start_time) + ), ( + f"No active or recent sync found for capsule {self.hostname}." + f" `active_sync_tasks` was empty: {sync_status['active_sync_tasks']}," + f" and the `last_sync_time`: {sync_status['last_sync_time']}," + f" was prior to the `start_time`: {start_time}." ) - # Wait till capsule sync finishes and assert the sync task succeeded + # Poll and verify succeed, any active sync tasks from initial status. + logger.info(f"Active tasks: {sync_status['active_sync_tasks']}") for task in sync_status['active_sync_tasks']: self.satellite.api.ForemanTask(id=task['id']).poll(timeout=timeout) - sync_status = self.nailgun_capsule.content_get_sync() - assert len(sync_status['last_failed_sync_tasks']) == 0 + logger.info(f"Active sync task :id {task['id']} succeeded.") + + # Fetch updated sync status + logger.info(f"Querying updated sync status from capsule {self.hostname}.") + updated_status = self.nailgun_capsule.content_get_sync(timeout=timeout, synchronous=True) + # Last sync task end time is the same as capsule's last sync time. + assert parse(updated_status['last_sync_time']) == parse( + updated_status['last_sync_task']['ended_at'] + ), f"`last_sync_time` does not match final task's end time. Capsule: {self.hostname}" + + # Total time taken is not negative (sync prior to start_time), + # and did not exceed timeout. + assert ( + timedelta(seconds=0) + <= parse(updated_status['last_sync_time']) - parse(start_time) + <= timedelta(seconds=timeout) + ), ( + f"No recent sync task(s) were found for capsule: {self.hostname}, or task(s) timed out." + f" `last_sync_time`: ({updated_status['last_sync_time']}) was prior to `start_time`: ({start_time})" + f" or exceeded timeout ({timeout}s)." + ) + # No failed or active tasks remaining + assert len(updated_status['last_failed_sync_tasks']) == 0 + assert len(updated_status['active_sync_tasks']) == 0 + + return sync_status['active_sync_tasks'] def get_published_repo_url(self, org, prod, repo, lce=None, cv=None): """Forms url of a repo or CV published on a Satellite or Capsule. diff --git a/tests/foreman/api/test_capsulecontent.py b/tests/foreman/api/test_capsulecontent.py index 99c2bb816fc..be2d6d87407 100644 --- a/tests/foreman/api/test_capsulecontent.py +++ b/tests/foreman/api/test_capsulecontent.py @@ -12,7 +12,8 @@ :CaseImportance: High """ -from datetime import datetime + +from datetime import datetime, timedelta import re from time import sleep @@ -20,11 +21,24 @@ from nailgun.entity_mixins import call_entity_method_with_timeout import pytest -from robottelo import constants from robottelo.config import settings from robottelo.constants import ( + CONTAINER_CLIENTS, CONTAINER_REGISTRY_HUB, CONTAINER_UPSTREAM_NAME, + ENVIRONMENT, + FAKE_1_YUM_REPOS_COUNT, + FAKE_3_YUM_REPO_RPMS, + FAKE_3_YUM_REPOS_COUNT, + FAKE_FILE_LARGE_COUNT, + FAKE_FILE_LARGE_URL, + FAKE_FILE_NEW_NAME, + KICKSTART_CONTENT, + PRDS, + REPOS, + REPOSET, + RH_CONTAINER_REGISTRY_HUB, + RPM_TO_UPLOAD, DataFile, ) from robottelo.constants.repos import ANSIBLE_GALAXY, CUSTOM_FILE_REPO @@ -83,14 +97,14 @@ def test_positive_uploaded_content_library_sync( assert repo.read().content_counts['rpm'] == 1 + timestamp = datetime.utcnow().replace(microsecond=0) # Publish new version of the content view cv.publish() + # query sync status as publish invokes sync, task succeeds + module_capsule_configured.wait_for_sync(start_time=timestamp) cv = cv.read() - assert len(cv.version) == 1 - module_capsule_configured.wait_for_sync() - # Verify the RPM published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -101,7 +115,7 @@ def test_positive_uploaded_content_library_sync( ) caps_files = get_repo_files_by_url(caps_repo_url) assert len(caps_files) == 1 - assert caps_files[0] == constants.RPM_TO_UPLOAD + assert caps_files[0] == RPM_TO_UPLOAD @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients', 'fake_manifest') @@ -149,13 +163,13 @@ def test_positive_checksum_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type is sha256, not sha1 on capsule repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -182,13 +196,13 @@ def test_positive_checksum_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify repodata's checksum type has updated to sha1 on capsule repomd = get_repomd(repo_url) checksum_types = re.findall(r'(?<=checksum type=").*?(?=")', repomd) @@ -257,12 +271,13 @@ def test_positive_sync_updated_repo( assert len(cv.version) == 1 cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Upload more content to the repository with open(DataFile.SRPM_TO_UPLOAD, 'rb') as handle: repo.upload_content(files={'content': handle}) @@ -276,12 +291,13 @@ def test_positive_sync_updated_repo( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check the content is synced on the Capsule side properly sat_repo_url = target_sat.get_published_repo_url( org=function_org.label, @@ -357,10 +373,18 @@ def test_positive_capsule_sync( assert len(cv.version) == 1 cvv = cv.version[-1].read() - # Promote content view to lifecycle environment + # prior to trigger (promoting), assert no active sync tasks + active_tasks = module_capsule_configured.nailgun_capsule.content_get_sync( + synchronous=True, timeout=600 + )['active_sync_tasks'] + assert len(active_tasks) == 0 + # Promote content view to lifecycle environment, + # invoking capsule sync task(s) + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Content of the published content view in @@ -368,8 +392,6 @@ def test_positive_capsule_sync( # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in repository on satellite sat_repo_url = target_sat.get_published_repo_url( @@ -404,14 +426,14 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() - # Promote new content view version to lifecycle environment + # Promote new content view version to lifecycle environment, + # capsule sync task(s) invoked and succeed + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - - module_capsule_configured.wait_for_sync() - # Assert that the value of repomd revision of repository in # lifecycle environment on the capsule has not changed new_lce_revision_capsule = get_repomd_revision(caps_repo_url) @@ -427,21 +449,22 @@ def test_positive_capsule_sync( cv = cv.read() cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() + + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 # Assert that packages count in the repository is updated - assert repo.content_counts['rpm'] == (constants.FAKE_1_YUM_REPOS_COUNT + 1) + assert repo.content_counts['rpm'] == (FAKE_1_YUM_REPOS_COUNT + 1) # Assert that the content of the published content view in # lifecycle environment is exactly the same as content of the # repository assert repo.content_counts['rpm'] == cvv.package_count - module_capsule_configured.wait_for_sync() - # Assert that the content published on the capsule is exactly the # same as in the repository sat_files = get_repo_files_by_url(sat_repo_url) @@ -451,7 +474,7 @@ def test_positive_capsule_sync( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule', 'clients') def test_positive_iso_library_sync( - self, module_capsule_configured, module_entitlement_manifest_org, module_target_sat + self, module_capsule_configured, module_sca_manifest_org, module_target_sat ): """Ensure RH repo with ISOs after publishing to Library is synchronized to capsule automatically @@ -467,18 +490,18 @@ def test_positive_iso_library_sync( # Enable & sync RH repository with ISOs rh_repo_id = module_target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_entitlement_manifest_org.id, - product=constants.PRDS['rhsc'], - repo=constants.REPOS['rhsc7_iso']['name'], - reposet=constants.REPOSET['rhsc7_iso'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhsc'], + repo=REPOS['rhsc7_iso']['name'], + reposet=REPOSET['rhsc7_iso'], releasever=None, ) rh_repo = module_target_sat.api.Repository(id=rh_repo_id).read() call_entity_method_with_timeout(rh_repo.sync, timeout=2500) # Find "Library" lifecycle env for specific organization lce = module_target_sat.api.LifecycleEnvironment( - organization=module_entitlement_manifest_org - ).search(query={'search': f'name={constants.ENVIRONMENT}'})[0] + organization=module_sca_manifest_org + ).search(query={'search': f'name={ENVIRONMENT}'})[0] # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( @@ -491,23 +514,23 @@ def test_positive_iso_library_sync( # Create a content view with the repository cv = module_target_sat.api.ContentView( - organization=module_entitlement_manifest_org, repository=[rh_repo] + organization=module_sca_manifest_org, repository=[rh_repo] ).create() # Publish new version of the content view + timestamp = datetime.utcnow() cv.publish() - cv = cv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cv = cv.read() assert len(cv.version) == 1 # Verify ISOs are present on satellite sat_isos = get_repo_files_by_url(rh_repo.full_path, extension='iso') assert len(sat_isos) == 4 - module_capsule_configured.wait_for_sync() - # Verify all the ISOs are present on capsule caps_path = ( - f'{module_capsule_configured.url}/pulp/content/{module_entitlement_manifest_org.label}' + f'{module_capsule_configured.url}/pulp/content/{module_sca_manifest_org.label}' f'/{lce.label}/{cv.label}/content/dist/rhel/server/7/7Server/x86_64/sat-capsule/6.4/' 'iso/' ) @@ -540,8 +563,8 @@ def test_positive_on_demand_sync( the original package from the upstream repo """ repo_url = settings.repos.yum_3.url - packages_count = constants.FAKE_3_YUM_REPOS_COUNT - package = constants.FAKE_3_YUM_REPO_RPMS[0] + packages_count = FAKE_3_YUM_REPOS_COUNT + package = FAKE_3_YUM_REPO_RPMS[0] repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -573,13 +596,13 @@ def test_positive_on_demand_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify packages on Capsule match the source caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -624,7 +647,7 @@ def test_positive_update_with_immediate_sync( filesystem contains valid links to packages """ repo_url = settings.repos.yum_1.url - packages_count = constants.FAKE_1_YUM_REPOS_COUNT + packages_count = FAKE_1_YUM_REPOS_COUNT repo = target_sat.api.Repository( download_policy='on_demand', mirroring_policy='mirror_complete', @@ -655,13 +678,13 @@ def test_positive_update_with_immediate_sync( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Update download policy to 'immediate' repo.download_policy = 'immediate' repo = repo.update(['download_policy']) @@ -683,13 +706,13 @@ def test_positive_update_with_immediate_sync( cv.version.sort(key=lambda version: version.id) cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Verify the count of RPMs published on Capsule caps_repo_url = module_capsule_configured.get_published_repo_url( org=function_org.label, @@ -730,7 +753,7 @@ def test_positive_capsule_pub_url_accessible(self, module_capsule_configured): @pytest.mark.skip_if_not_set('capsule', 'clients') @pytest.mark.parametrize('distro', ['rhel7', 'rhel8_bos', 'rhel9_bos']) def test_positive_sync_kickstart_repo( - self, target_sat, module_capsule_configured, function_entitlement_manifest_org, distro + self, target_sat, module_capsule_configured, function_sca_manifest_org, distro ): """Sync kickstart repository to the capsule. @@ -751,16 +774,14 @@ def test_positive_sync_kickstart_repo( """ repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['kickstart'][distro]['product'], - reposet=constants.REPOS['kickstart'][distro]['reposet'], - repo=constants.REPOS['kickstart'][distro]['name'], - releasever=constants.REPOS['kickstart'][distro]['version'], + org_id=function_sca_manifest_org.id, + product=REPOS['kickstart'][distro]['product'], + reposet=REPOS['kickstart'][distro]['reposet'], + repo=REPOS['kickstart'][distro]['name'], + releasever=REPOS['kickstart'][distro]['version'], ) repo = target_sat.api.Repository(id=repo_id).read() - lce = target_sat.api.LifecycleEnvironment( - organization=function_entitlement_manifest_org - ).create() + lce = target_sat.api.LifecycleEnvironment(organization=function_sca_manifest_org).create() # Associate the lifecycle environment with the capsule module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': lce.id} @@ -775,7 +796,7 @@ def test_positive_sync_kickstart_repo( # Create a content view with the repository cv = target_sat.api.ContentView( - organization=function_entitlement_manifest_org, repository=[repo] + organization=function_sca_manifest_org, repository=[repo] ).create() # Sync repository repo.sync(timeout='10m') @@ -788,26 +809,26 @@ def test_positive_sync_kickstart_repo( cvv = cv.version[-1].read() # Promote content view to lifecycle environment + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce.id}) - cvv = cvv.read() + module_capsule_configured.wait_for_sync(start_time=timestamp) + cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Check for kickstart content on SAT and CAPS tail = ( - f'rhel/server/7/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' + f'rhel/server/7/{REPOS["kickstart"][distro]["version"]}/x86_64/kickstart' if distro == 'rhel7' - else f'{distro.split("_")[0]}/{constants.REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 + else f'{distro.split("_")[0]}/{REPOS["kickstart"][distro]["version"]}/x86_64/baseos/kickstart' # noqa:E501 ) url_base = ( - f'pulp/content/{function_entitlement_manifest_org.label}/{lce.label}/{cv.label}/' + f'pulp/content/{function_sca_manifest_org.label}/{lce.label}/{cv.label}/' f'content/dist/{tail}' ) # Check kickstart specific files - for file in constants.KICKSTART_CONTENT: + for file in KICKSTART_CONTENT: sat_file = target_sat.md5_by_url(f'{target_sat.url}/{url_base}/{file}') caps_file = target_sat.md5_by_url(f'{module_capsule_configured.url}/{url_base}/{file}') assert sat_file == caps_file @@ -887,12 +908,13 @@ def test_positive_sync_container_repo_end_to_end( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Pull the images from capsule to the content host repo_paths = [ ( @@ -902,7 +924,7 @@ def test_positive_sync_container_repo_end_to_end( for repo in repos ] - for con_client in constants.CONTAINER_CLIENTS: + for con_client in CONTAINER_CLIENTS: result = container_contenthost.execute( f'{con_client} login -u {settings.server.admin_username}' f' -p {settings.server.admin_password} {module_capsule_configured.hostname}' @@ -1005,10 +1027,12 @@ def test_positive_sync_collection_repo( assert function_lce_library.id in [capsule_lce['id'] for capsule_lce in result['results']] # Sync the repo + timestamp = datetime.utcnow() repo.sync(timeout=600) repo = repo.read() assert repo.content_counts['ansible_collection'] == 2 - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) repo_path = repo.full_path.replace(target_sat.hostname, module_capsule_configured.hostname) coll_path = './collections' @@ -1063,7 +1087,7 @@ def test_positive_sync_file_repo( repo = target_sat.api.Repository( content_type='file', product=function_product, - url=constants.FAKE_FILE_LARGE_URL, + url=FAKE_FILE_LARGE_URL, ).create() repo.sync() @@ -1087,12 +1111,13 @@ def test_positive_sync_file_repo( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - # Run one more sync, check for status (BZ#1985122) sync_status = module_capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success' @@ -1114,8 +1139,8 @@ def test_positive_sync_file_repo( ) sat_files = get_repo_files_by_url(sat_repo_url, extension='iso') caps_files = get_repo_files_by_url(caps_repo_url, extension='iso') - assert len(sat_files) == len(caps_files) == constants.FAKE_FILE_LARGE_COUNT + 1 - assert constants.FAKE_FILE_NEW_NAME in caps_files + assert len(sat_files) == len(caps_files) == FAKE_FILE_LARGE_COUNT + 1 + assert FAKE_FILE_NEW_NAME in caps_files assert sat_files == caps_files for file in sat_files: @@ -1126,7 +1151,7 @@ def test_positive_sync_file_repo( @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_sync_CV_to_multiple_LCEs( - self, target_sat, module_capsule_configured, module_manifest_org + self, target_sat, module_capsule_configured, module_sca_manifest_org ): """Synchronize a CV to multiple LCEs at the same time. All sync tasks should succeed. @@ -1151,19 +1176,19 @@ def test_positive_sync_CV_to_multiple_LCEs( # Sync a repository to the Satellite. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=module_manifest_org.id, - product=constants.PRDS['rhel'], - repo=constants.REPOS['rhel7_extra']['name'], - reposet=constants.REPOSET['rhel7_extra'], + org_id=module_sca_manifest_org.id, + product=PRDS['rhel'], + repo=REPOS['rhel7_extra']['name'], + reposet=REPOSET['rhel7_extra'], releasever=None, ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() # Create two LCEs, assign them to the Capsule. - lce1 = target_sat.api.LifecycleEnvironment(organization=module_manifest_org).create() + lce1 = target_sat.api.LifecycleEnvironment(organization=module_sca_manifest_org).create() lce2 = target_sat.api.LifecycleEnvironment( - organization=module_manifest_org, prior=lce1 + organization=module_sca_manifest_org, prior=lce1 ).create() module_capsule_configured.nailgun_capsule.content_add_lifecycle_environment( data={'environment_id': [lce1.id, lce2.id]} @@ -1175,7 +1200,7 @@ def test_positive_sync_CV_to_multiple_LCEs( # Create a Content View, add the repository and publish it. cv = target_sat.api.ContentView( - organization=module_manifest_org, repository=[repo] + organization=module_sca_manifest_org, repository=[repo] ).create() cv.publish() cv = cv.read() @@ -1183,16 +1208,20 @@ def test_positive_sync_CV_to_multiple_LCEs( # Promote the CV to both Capsule's LCEs without waiting for Capsule sync task completion. cvv = cv.version[-1].read() + assert len(cvv.environment) == 1 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce1.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': lce2.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 3 - # Check all sync tasks finished without errors. - module_capsule_configured.wait_for_sync() - @pytest.mark.tier4 @pytest.mark.skip_if_not_set('capsule') def test_positive_capsule_sync_status_persists( @@ -1235,7 +1264,8 @@ def test_positive_capsule_sync_status_persists( cvv = cv.version[-1].read() timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) - module_capsule_configured.wait_for_sync() + + module_capsule_configured.wait_for_sync(start_time=timestamp) # Delete all capsule sync tasks so that we fall back for audits. task_result = target_sat.execute( @@ -1265,7 +1295,7 @@ def test_positive_remove_capsule_orphans( target_sat, pytestconfig, capsule_configured, - function_entitlement_manifest_org, + function_sca_manifest_org, function_lce_library, ): """Synchronize RPM content to the capsule, disassociate the capsule form the content @@ -1298,10 +1328,10 @@ def test_positive_remove_capsule_orphans( # Enable RHST repo and sync it to the Library LCE. repo_id = target_sat.api_factory.enable_rhrepo_and_fetchid( basearch='x86_64', - org_id=function_entitlement_manifest_org.id, - product=constants.REPOS['rhst8']['product'], - repo=constants.REPOS['rhst8']['name'], - reposet=constants.REPOSET['rhst8'], + org_id=function_sca_manifest_org.id, + product=REPOS['rhst8']['product'], + repo=REPOS['rhst8']['name'], + reposet=REPOSET['rhst8'], ) repo = target_sat.api.Repository(id=repo_id).read() repo.sync() @@ -1334,13 +1364,20 @@ def test_positive_remove_capsule_orphans( sync_status = capsule_configured.nailgun_capsule.content_sync() assert sync_status['result'] == 'success', 'Capsule sync task failed.' + # datetime string (local time) to search for proper task. + timestamp = (datetime.now().replace(microsecond=0) - timedelta(seconds=1)).strftime( + '%B %d, %Y at %I:%M:%S %p' + ) # Run orphan cleanup for the capsule. target_sat.execute( 'foreman-rake katello:delete_orphaned_content RAILS_ENV=production ' f'SMART_PROXY_ID={capsule_configured.nailgun_capsule.id}' ) target_sat.wait_for_tasks( - search_query=('label = Actions::Katello::OrphanCleanup::RemoveOrphans'), + search_query=( + 'label = Actions::Katello::OrphanCleanup::RemoveOrphans' + f' and started_at >= "{timestamp}"' + ), search_rate=5, max_tries=10, ) @@ -1391,7 +1428,7 @@ def test_positive_capsule_sync_openstack_container_repos( content_type='docker', docker_upstream_name=ups_name, product=function_product, - url=constants.RH_CONTAINER_REGISTRY_HUB, + url=RH_CONTAINER_REGISTRY_HUB, upstream_username=settings.subscription.rhn_username, upstream_password=settings.subscription.rhn_password, ).create() @@ -1414,12 +1451,13 @@ def test_positive_capsule_sync_openstack_container_repos( # Promote the latest CV version into capsule's LCE cvv = cv.version[-1].read() + timestamp = datetime.utcnow() cvv.promote(data={'environment_ids': function_lce.id}) + + module_capsule_configured.wait_for_sync(start_time=timestamp) cvv = cvv.read() assert len(cvv.environment) == 2 - module_capsule_configured.wait_for_sync() - @pytest.mark.parametrize( 'repos_collection', [