From c9aaa3b8891954863e87a3cc5e83941fa706d9c6 Mon Sep 17 00:00:00 2001 From: Martin Malina Date: Fri, 8 Nov 2024 10:36:35 +0100 Subject: [PATCH] refactor: patching ContainerImage to add repositories This is a followup of what was done in #303 Most notably: - rename the image_already_exists to find_image to reflect what it does - it used to return bool, now it returns the image if found. - only include the repositories field in the patch payload - for consistency, create the same two repo entries when adding a new repository like we do when creating a brand new image Signed-off-by: Martin Malina --- pyxis/create_container_image.py | 135 +++++++-------- pyxis/test_create_container_image.py | 247 +++++++++++++++++++-------- 2 files changed, 241 insertions(+), 141 deletions(-) diff --git a/pyxis/create_container_image.py b/pyxis/create_container_image.py index 1670598..8d912f3 100755 --- a/pyxis/create_container_image.py +++ b/pyxis/create_container_image.py @@ -149,22 +149,16 @@ def proxymap(repository: str) -> str: return repository.split("/")[-1].replace("----", "/") -def image_already_exists(args, digest: str, repository: str) -> Any: - """Function to check if a containerImage with the given digest and repository - already exists in the pyxis instance +def find_image(pyxis_url, digest: str) -> Any: + """Function to find a containerImage with the given digest. - If `repository` is None, then the return True if the image exists at all. - - :return: the image id, if one exists, else None if not found + :return: the image, if one exists, else None if none found """ - # quote is needed to urlparse the quotation marks raw_filter = f'repositories.manifest_schema2_digest=="{digest}";not(deleted==true)' - if repository: - raw_filter += f';repositories.repository=="{proxymap(repository)}"' + # quote is needed to urlparse the quotation marks filter_str = quote(raw_filter) - - check_url = urljoin(args.pyxis_url, f"v1/images?page_size=1&filter={filter_str}") + check_url = urljoin(pyxis_url, f"v1/images?page_size=1&filter={filter_str}") # Get the list of the ContainerImages with given parameters rsp = pyxis.get(check_url) @@ -183,6 +177,18 @@ def image_already_exists(args, digest: str, repository: str) -> Any: return query_results[0] +def repo_in_image(repository_str: str, image: Dict[str, Any]) -> bool: + """Check if a repository already exists in the ContainerImage + + :return: True if repository_str string is found in the ContainerImage repositories, + False otherwise + """ + for repository in image["repositories"]: + if repository["repository"] == repository_str: + return True + return False + + def prepare_parsed_data(args) -> Dict[str, Any]: """Function to extract the data this script needs from provided oras manifest fetch output @@ -193,8 +199,6 @@ def prepare_parsed_data(args) -> Dict[str, Any]: oras_manifest_fetch = json.load(json_file) parsed_data = { - "name": args.name, - "digest": args.architecture_digest, "architecture": args.architecture, "layers": [ layer["digest"] for layer in reversed(oras_manifest_fetch.get("layers", [])) @@ -259,19 +263,7 @@ def create_container_image(args, parsed_data: Dict[str, Any]): LOGGER.info("Creating new container image") - date_now = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f+00:00") - - if "name" not in parsed_data: - raise Exception("Name was not found in the passed oras manifest json") - - # digest isn't accepted in the parsed_data payload to pyxis; we ignore it - del parsed_data["digest"] - - image_name = parsed_data["name"] - image_registry = image_name.split("/")[0] - image_repo = image_name.split("/", 1)[1] - # name isn't accepted in the parsed_data payload to pyxis - del parsed_data["name"] + repositories = construct_repositories(args) # sum_layer_size_bytes isn't accepted in the parsed_data payload to pyxis sum_layer_size_bytes = parsed_data.pop("sum_layer_size_bytes", 0) @@ -285,15 +277,7 @@ def create_container_image(args, parsed_data: Dict[str, Any]): upload_url = urljoin(args.pyxis_url, "v1/images") container_image_payload = { - "repositories": [ - { - "published": False, - "registry": image_registry, - "repository": image_repo, - "push_date": date_now, - "tags": pyxis_tags(args, date_now), - } - ], + "repositories": repositories, "certified": json.loads(args.certified.lower()), "image_id": args.architecture_digest, "architecture": parsed_data["architecture"], @@ -307,22 +291,6 @@ def create_container_image(args, parsed_data: Dict[str, Any]): if uncompressed_top_layer_id: container_image_payload["uncompressed_top_layer_id"] = uncompressed_top_layer_id - container_image_payload["repositories"][0].update(repository_digest_values(args)) - - # For images released to registry.redhat.io we need a second repository item - # with published=true and registry and repository converted. - # E.g. if the name in the oras manifest result is - # "quay.io/redhat-prod/rhtas-tech-preview----cosign-rhel9", - # repository will be "rhtas-tech-preview/cosign-rhel9" - if not args.rh_push == "true": - LOGGER.info("--rh-push is not set. Skipping public registry association.") - else: - repo = container_image_payload["repositories"][0].copy() - repo["published"] = True - repo["registry"] = "registry.access.redhat.com" - repo["repository"] = proxymap(image_name) - container_image_payload["repositories"].append(repo) - rsp = pyxis.post(upload_url, container_image_payload).json() # Make sure container metadata was successfully added to Pyxis @@ -332,7 +300,7 @@ def create_container_image(args, parsed_data: Dict[str, Any]): raise Exception("Image metadata was not successfully added to Pyxis.") -def add_container_image_repository(args, parsed_data: Dict[str, Any], image: Dict[str, Any]): +def add_container_image_repository(args: Dict[str, Any], image: Dict[str, Any]): if not args.rh_push == "true": LOGGER.info("--rh-push is not set. Skipping public registry association.") return @@ -340,30 +308,54 @@ def add_container_image_repository(args, parsed_data: Dict[str, Any], image: Dic identifier = image["_id"] LOGGER.info(f"Adding repository to container image {identifier}") - date_now = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f+00:00") + patch_url = urljoin(args.pyxis_url, f"v1/images/id/{identifier}") - image_name = parsed_data["name"] + payload = {"repositories": image["repositories"]} + payload["repositories"].extend(construct_repositories(args)) - patch_url = urljoin(args.pyxis_url, f"v1/images/id/{identifier}") + rsp = pyxis.patch(patch_url, payload).json() + + # Make sure container metadata was successfully added to Pyxis + if "_id" in rsp: + emit_id(rsp["_id"]) + else: + raise Exception("Image metadata was not successfully added to Pyxis.") - image["repositories"].append( + +def construct_repositories(args): + image_name = args.name + image_registry = image_name.split("/")[0] + image_repo = image_name.split("/", 1)[1] + + date_now = datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f+00:00") + + repos = [ { - "published": True, - "registry": "registry.access.redhat.com", - "repository": proxymap(image_name), + "published": False, + "registry": image_registry, + "repository": image_repo, "push_date": date_now, "tags": pyxis_tags(args, date_now), } - ) - image["repositories"][-1].update(repository_digest_values(args)) + ] - rsp = pyxis.patch(patch_url, image).json() + repos[0].update(repository_digest_values(args)) - # Make sure container metadata was successfully added to Pyxis - if "_id" in rsp: - emit_id(rsp["_id"]) + # For images released to registry.redhat.io we need a second repository item + # with published=true and registry and repository converted. + # E.g. if the name in the oras manifest result is + # "quay.io/redhat-prod/rhtas-tech-preview----cosign-rhel9", + # repository will be "rhtas-tech-preview/cosign-rhel9" + if not args.rh_push == "true": + LOGGER.info("--rh-push is not set. Skipping public registry association.") else: - raise Exception("Image metadata was not successfully added to Pyxis.") + rh_repo = repos[0].copy() + rh_repo["published"] = True + rh_repo["registry"] = "registry.access.redhat.com" + rh_repo["repository"] = proxymap(image_name) + repos.append(rh_repo) + + return repos def main(): # pragma: no cover @@ -378,12 +370,11 @@ def main(): # pragma: no cover # First check if it exists at all LOGGER.info(f"Checking to see if digest {args.architecture_digest} exists in pyxis") - image = image_already_exists(args, args.architecture_digest, repository=None) - if image: - # Then, check if it exists in association with the given repository + image = find_image(args.pyxis_url, args.architecture_digest) + if image is not None: identifier = image["_id"] - LOGGER.info(f"It does! Checking to see if it's associated with {args.name}") - if image_already_exists(args, args.architecture_digest, repository=args.name): + # Then, check if it already references the given repository + if repo_in_image(proxymap(args.name), image): LOGGER.info( f"Image with given docker_image_digest already exists as {identifier} " f"and is associated with repository {args.name}. " @@ -394,7 +385,7 @@ def main(): # pragma: no cover f"Image with given docker_image_digest exists as {identifier}, but " f"is not yet associated with repository {args.name}." ) - add_container_image_repository(args, parsed_data, image) + add_container_image_repository(args, image) else: LOGGER.info("Image with given docker_image_digest doesn't exist yet.") create_container_image(args, parsed_data) diff --git a/pyxis/test_create_container_image.py b/pyxis/test_create_container_image.py index e42e8b4..59c1254 100644 --- a/pyxis/test_create_container_image.py +++ b/pyxis/test_create_container_image.py @@ -4,87 +4,86 @@ from unittest.mock import patch, MagicMock from create_container_image import ( - image_already_exists, + proxymap, + find_image, + repo_in_image, + prepare_parsed_data, + pyxis_tags, + repository_digest_values, create_container_image, add_container_image_repository, - prepare_parsed_data, + construct_repositories, ) -mock_pyxis_url = "https://catalog.redhat.com/api/containers/" +PYXIS_URL = "https://catalog.redhat.com/api/containers/" + + +def test_proxymap(): + repository = "quay.io/redhat-pending/foo----bar" + + mapped = proxymap(repository) + + assert mapped == "foo/bar" @patch("create_container_image.pyxis.get") -def test_image_already_exists__image_does_exist(mock_get): +def test_find_image__image_does_exist(mock_get): # Arrange mock_rsp = MagicMock() mock_get.return_value = mock_rsp - args = MagicMock() - args.pyxis_url = mock_pyxis_url - args.architecture_digest = "some_digest" - args.name = "server/org/some_name" + architecture_digest = "some_digest" + mock_image = {"_id": 1} # Image already exists - mock_rsp.json.return_value = {"data": [{"_id": 0}]} + mock_rsp.json.return_value = {"data": [mock_image]} # Act - exists = image_already_exists(args, args.architecture_digest, args.name) + image = find_image(PYXIS_URL, architecture_digest) # Assert - assert exists + assert image == mock_image mock_get.assert_called_once_with( - mock_pyxis_url + PYXIS_URL + "v1/images?page_size=1&filter=" + "repositories.manifest_schema2_digest%3D%3D%22some_digest%22" + "%3Bnot%28deleted%3D%3Dtrue%29" - + "%3Brepositories.repository%3D%3D%22some_name%22" ) @patch("create_container_image.pyxis.get") -def test_image_already_exists__image_does_not_exist(mock_get): +def test_find_image__image_does_not_exist(mock_get): # Arrange mock_rsp = MagicMock() mock_get.return_value = mock_rsp - args = MagicMock() - args.pyxis_url = mock_pyxis_url digest = "some_digest" - name = "server/org/some----name" # Image doesn't exist mock_rsp.json.return_value = {"data": []} # Act - exists = image_already_exists(args, digest, name) + image = find_image(PYXIS_URL, digest) # Assert - assert not exists + assert image is None -@patch("create_container_image.pyxis.get") -def test_image_already_exists__image_does_exist_but_no_repo(mock_get): - # Arrange - mock_rsp = MagicMock() - mock_get.return_value = mock_rsp - args = MagicMock() - args.pyxis_url = mock_pyxis_url - args.architecture_digest = "some_digest" - args.name = "server/org/some_name" +# scenario where repo is present in the image +def test_repo_in_image__true(): + image = {"repositories": [{"repository": "my/repo"}, {"repository": "foo/bar"}]} - # Image already exists - mock_rsp.json.return_value = {"data": [{"_id": 0}]} + result = repo_in_image("foo/bar", image) - # Act - exists = image_already_exists(args, args.architecture_digest, None) + assert result - # Assert - assert exists - mock_get.assert_called_once_with( - mock_pyxis_url - + "v1/images?page_size=1&filter=" - + "repositories.manifest_schema2_digest%3D%3D%22some_digest%22" - + "%3Bnot%28deleted%3D%3Dtrue%29" - ) + +# scenario where repo is not present in the image +def test_repo_in_image__false(): + image = {"repositories": [{"repository": "my/repo"}, {"repository": "foo/bar"}]} + + result = repo_in_image("something/missing", image) + + assert not result @patch("create_container_image.pyxis.post") @@ -97,22 +96,24 @@ def test_create_container_image(mock_datetime, mock_post): mock_datetime.now = MagicMock(return_value=datetime(1970, 10, 10, 10, 10, 10)) args = MagicMock() - args.pyxis_url = mock_pyxis_url + args.pyxis_url = PYXIS_URL args.tags = "some_version" args.certified = "false" args.rh_push = "false" args.architecture_digest = "arch specific digest" + args.digest = "some_digest" args.media_type = "single architecture" + args.name = "quay.io/some_repo" # Act create_container_image( args, - {"architecture": "ok", "digest": "some_digest", "name": "quay.io/some_repo"}, + {"architecture": "ok"}, ) # Assert mock_post.assert_called_with( - mock_pyxis_url + "v1/images", + PYXIS_URL + "v1/images", { "repositories": [ { @@ -149,29 +150,42 @@ def test_add_container_image_repository(mock_datetime, mock_patch): mock_datetime.now = MagicMock(return_value=datetime(1970, 10, 10, 10, 10, 10)) args = MagicMock() - args.pyxis_url = mock_pyxis_url + args.pyxis_url = PYXIS_URL args.tags = "some_version" args.rh_push = "true" args.architecture_digest = "arch specific digest" args.media_type = "single architecture" + args.name = "quay.io/redhat-pending/some_product----some_repo" # Act add_container_image_repository( args, - {"architecture": "ok", "digest": "some_digest", "name": "quay.io/namespace/some_repo"}, {"_id": "some_id", "repositories": []}, ) # Assert mock_patch.assert_called_with( - mock_pyxis_url + "v1/images/id/some_id", + PYXIS_URL + "v1/images/id/some_id", { - "_id": "some_id", "repositories": [ + { + "published": False, + "registry": "quay.io", + "repository": "redhat-pending/some_product----some_repo", + "push_date": "1970-10-10T10:10:10.000000+00:00", + "tags": [ + { + "added_date": "1970-10-10T10:10:10.000000+00:00", + "name": "some_version", + } + ], + # Note, no manifest_list_digest here. Single arch. + "manifest_schema2_digest": "arch specific digest", + }, { "published": True, "registry": "registry.access.redhat.com", - "repository": "some_repo", + "repository": "some_product/some_repo", "push_date": "1970-10-10T10:10:10.000000+00:00", "tags": [ { @@ -181,7 +195,7 @@ def test_add_container_image_repository(mock_datetime, mock_patch): ], # Note, no manifest_list_digest here. Single arch. "manifest_schema2_digest": "arch specific digest", - } + }, ], }, ) @@ -197,7 +211,7 @@ def test_create_container_image_latest(mock_datetime, mock_post): mock_datetime.now = MagicMock(return_value=datetime(1970, 10, 10, 10, 10, 10)) args = MagicMock() - args.pyxis_url = mock_pyxis_url + args.pyxis_url = PYXIS_URL args.tags = "some_version" args.certified = "false" args.is_latest = "true" @@ -205,20 +219,20 @@ def test_create_container_image_latest(mock_datetime, mock_post): args.digest = "some_digest" args.architecture_digest = "arch specific digest" args.media_type = "application/vnd.oci.image.index.v1+json" + args.digest = "some_digest" + args.name = "redhat.com/some_repo/foobar" # Act create_container_image( args, { "architecture": "ok", - "digest": "some_digest", - "name": "redhat.com/some_repo/foobar", }, ) # Assert mock_post.assert_called_with( - mock_pyxis_url + "v1/images", + PYXIS_URL + "v1/images", { "repositories": [ { @@ -259,27 +273,27 @@ def test_create_container_image_rh_push_multiple_tags(mock_datetime, mock_post): mock_datetime.now = MagicMock(return_value=datetime(1970, 10, 10, 10, 10, 10)) args = MagicMock() - args.pyxis_url = mock_pyxis_url + args.pyxis_url = PYXIS_URL args.tags = "tagprefix tagprefix-timestamp" args.certified = "false" args.rh_push = "true" args.digest = "some_digest" args.architecture_digest = "arch specific digest" args.media_type = "application/vnd.oci.image.index.v1+json" + args.digest = "some_digest" + args.name = "quay.io/redhat-pending/some-product----some-image" # Act create_container_image( args, { "architecture": "ok", - "digest": "some_digest", - "name": "quay.io/redhat-pending/some-product----some-image", }, ) # Assert mock_post.assert_called_with( - mock_pyxis_url + "v1/images", + PYXIS_URL + "v1/images", { "repositories": [ { @@ -358,8 +372,6 @@ def test_create_container_image_no_name(): def test_prepare_parsed_data__success(mock_open): args = MagicMock() args.architecture = "test" - args.architecture_digest = "sha:abc" - args.name = "quay.io/hacbs-release/release-service-utils" args.dockerfile = "mydockerfile" manifest_content = json.dumps( { @@ -379,9 +391,7 @@ def test_prepare_parsed_data__success(mock_open): assert parsed_data == { "architecture": "test", - "digest": "sha:abc", "layers": ["2", "1"], - "name": "quay.io/hacbs-release/release-service-utils", "files": [ {"key": "buildfile", "filename": "Dockerfile", "content": dockerfile_content} ], @@ -397,8 +407,6 @@ def test_prepare_parsed_data__success(mock_open): def test_prepare_parsed_data__success_no_dockerfile(mock_open): args = MagicMock() args.architecture = "test" - args.architecture_digest = "sha:abc" - args.name = "quay.io/hacbs-release/release-service-utils" args.dockerfile = "" manifest_content = json.dumps( { @@ -412,9 +420,7 @@ def test_prepare_parsed_data__success_no_dockerfile(mock_open): assert parsed_data == { "architecture": "test", - "digest": "sha:abc", "layers": ["2", "1"], - "name": "quay.io/hacbs-release/release-service-utils", "sum_layer_size_bytes": 0, "uncompressed_layer_sizes": [], "uncompressed_size_bytes": 0, @@ -427,8 +433,6 @@ def test_prepare_parsed_data__success_no_dockerfile(mock_open): def test_prepare_parsed_data__with_layer_sizes(mock_open): args = MagicMock() args.architecture = "test" - args.architecture_digest = "sha:abc" - args.name = "quay.io/hacbs-release/release-service-utils" args.dockerfile = "mydockerfile" manifest_content = json.dumps( { @@ -452,9 +456,7 @@ def test_prepare_parsed_data__with_layer_sizes(mock_open): assert parsed_data == { "architecture": "test", - "digest": "sha:abc", "layers": ["2", "1"], - "name": "quay.io/hacbs-release/release-service-utils", "files": [ {"key": "buildfile", "filename": "Dockerfile", "content": dockerfile_content} ], @@ -467,3 +469,110 @@ def test_prepare_parsed_data__with_layer_sizes(mock_open): "top_layer_id": "2", "uncompressed_top_layer_id": "4", } + + +def test_pyxis_tags__with_latest(): + args = MagicMock() + args.tags = "tag1 tag2" + args.is_latest = "true" + now = "now" + + tags = pyxis_tags(args, now) + + assert tags == [ + {"added_date": "now", "name": "tag1"}, + {"added_date": "now", "name": "tag2"}, + {"added_date": "now", "name": "latest"}, + ] + + +def test_pyxis_tags__without_latest(): + args = MagicMock() + args.tags = "tag1 tag2" + args.is_latest = "false" + now = "now" + + tags = pyxis_tags(args, now) + + assert tags == [ + {"added_date": "now", "name": "tag1"}, + {"added_date": "now", "name": "tag2"}, + ] + + +def test_repository_digest_values__single_arch(): + args = MagicMock() + args.media_type = "application/vnd.docker.distribution.manifest.v2+json" + args.architecture_digest = "mydigest" + + result = repository_digest_values(args) + + assert result == {"manifest_schema2_digest": "mydigest"} + + +def test_repository_digest_values__multi_arch(): + args = MagicMock() + args.media_type = "application/vnd.docker.distribution.manifest.list.v2+json" + args.architecture_digest = "mydigest" + args.digest = "mytopdigest" + + result = repository_digest_values(args) + + assert result == { + "manifest_schema2_digest": "mydigest", + "manifest_list_digest": "mytopdigest", + } + + +@patch("create_container_image.datetime") +def test_construct_repositories(mock_datetime): + mock_datetime.now = MagicMock(return_value=datetime(1970, 10, 10, 10, 10, 10)) + args = MagicMock() + args.media_type = "application/vnd.docker.distribution.manifest.list.v2+json" + args.architecture_digest = "arch specific digest" + args.digest = "some_digest" + args.tags = "tagprefix tagprefix-timestamp" + args.is_latest = "false" + args.rh_push = "true" + args.name = "quay.io/redhat-pending/some-product----some-image" + + repos = construct_repositories(args) + + assert repos == [ + { + "published": False, + "registry": "quay.io", + "repository": "redhat-pending/some-product----some-image", + "push_date": "1970-10-10T10:10:10.000000+00:00", + "tags": [ + { + "added_date": "1970-10-10T10:10:10.000000+00:00", + "name": "tagprefix", + }, + { + "added_date": "1970-10-10T10:10:10.000000+00:00", + "name": "tagprefix-timestamp", + }, + ], + "manifest_list_digest": "some_digest", + "manifest_schema2_digest": "arch specific digest", + }, + { + "published": True, + "registry": "registry.access.redhat.com", + "repository": "some-product/some-image", + "push_date": "1970-10-10T10:10:10.000000+00:00", + "tags": [ + { + "added_date": "1970-10-10T10:10:10.000000+00:00", + "name": "tagprefix", + }, + { + "added_date": "1970-10-10T10:10:10.000000+00:00", + "name": "tagprefix-timestamp", + }, + ], + "manifest_list_digest": "some_digest", + "manifest_schema2_digest": "arch specific digest", + }, + ]