Skip to content

Commit

Permalink
Use new key for core metadata reference (#28)
Browse files Browse the repository at this point in the history
Implements PEP 714, swapping dist-info-metadata for core-metadata

Still supports old key in source index responses, and provides old key in HTML responses

Tests added for core-metadata keys/attributes
  • Loading branch information
EpicWink authored Jun 8, 2023
1 parent d20e2e4 commit caa09de
Show file tree
Hide file tree
Showing 9 changed files with 112 additions and 7 deletions.
23 changes: 19 additions & 4 deletions src/proxpi/_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,8 @@ def to_json_response(self) -> t.Dict[str, t.Any]:
if self.requires_python is not None:
data["requires-python"] = self.requires_python
if self.dist_info_metadata is not None:
data["dist-info-metadata"] = self.dist_info_metadata
# PEP 714: only emit new key in JSON
data["core-metadata"] = self.dist_info_metadata
if self.gpg_sig is not None:
data["gpg-sig"] = self.gpg_sig
if self.yanked is not None:
Expand All @@ -124,11 +125,20 @@ def from_html_element(
) -> "File":
"""Construct from HTML API response."""
url = urllib.parse.urljoin(request_url, el.attrib["href"])

attributes = {k: v for k, v in el.attrib.items() if k != "href"}

# PEP 714: accept both core-metadata attributes, and emit both in HTML
if "data-core-metadata" in attributes:
attributes["data-dist-info-metadata"] = attributes["data-core-metadata"]
elif "data-dist-info-metadata" in attributes:
attributes["data-core-metadata"] = attributes["data-dist-info-metadata"]

return cls(
name=el.text,
url=url,
fragment=urllib.parse.urlsplit(url).fragment,
attributes={k: v for k, v in el.attrib.items() if k != "href"},
attributes=attributes,
)

@property
Expand All @@ -141,7 +151,7 @@ def requires_python(self):

@property
def dist_info_metadata(self):
metadata = self.attributes.get("data-dist-info-metadata")
metadata = self.attributes.get("data-core-metadata")
if metadata is None:
return None
hashes = self._parse_hash(metadata)
Expand Down Expand Up @@ -198,7 +208,10 @@ def from_json_response(cls, data: t.Dict[str, t.Any], request_url: str) -> "File
url=urllib.parse.urljoin(request_url, data["url"]),
hashes=data["hashes"],
requires_python=data.get("requires-python"),
dist_info_metadata=data.get("dist-info-metadata"),
# PEP 714: accept both core-metadata keys
dist_info_metadata=(
data.get("core-metadata") or data.get("dist-info-metadata")
),
gpg_sig=data.get("gpg-sig"),
yanked=data.get("yanked"),
)
Expand All @@ -218,6 +231,8 @@ def attributes(self) -> t.Dict[str, str]:
attributes["data-dist-info-metadata"] = self._stringify_hashes(
self.dist_info_metadata,
) if isinstance(self.dist_info_metadata, dict) else "" # fmt: skip
# PEP 714: emit both core-metadata attributes in HTML
attributes["data-core-metadata"] = attributes["data-dist-info-metadata"]
if self.gpg_sig is not None:
attributes["data-gpg-sig"] = "true" if self.gpg_sig else "false"
if self.yanked:
Expand Down
2 changes: 2 additions & 0 deletions tests/data/indexes/root/numpy/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@
</head>
<body>
<a href="numpy-1.23.1-cp310-cp310-manylinux_2_17_x86_64.whl#sha256=38d1e325e3d8828d3361730fc935aabb9cc5ab14f7ea99f9438f2768e2c5f272" data-requires-python="&gt;=3.8">numpy-1.23.1-cp310-cp310-manylinux_2_17_x86_64.whl</a><br />
<a href="numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl#sha256=6e8d4970bf49f5b1d682dd5d74e1fc383bd462ec973f0a12bfbaae0734dbf53a" data-requires-python="&gt;=3.8" data-core-metadata>numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl</a><br />
<a href="numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl#sha256=57fddb18d8862f6240443ea00bf413c3d4b967b50b0d416317dac8a0365138a6" data-requires-python="&gt;=3.8" data-core-metadata="sha256=9e4a576848f4ea0051b63fe503b9357eb7200f9d979eb983a7139478450d2e6a">numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl</a><br />
<a href="numpy-1.23.1-cp310-cp310-win_amd64.whl#sha256=c82522a5e593eba845234044e3ad5c1d2a666e2be7e872fca8a65287bc72b383" data-requires-python="&gt;=3.8" data-yanked="">numpy-1.23.1-cp310-cp310-win_amd64.whl</a><br />
<a href="numpy-1.23.1.tar.gz#sha256=0024c61ffaf1d4b424eaff554bb6cec1cfd580cc2ff64ca7858c7ced6b880429" data-requires-python="&gt;=3.8">numpy-1.23.1.tar.gz</a><br />
</body>
Expand Down
20 changes: 20 additions & 0 deletions tests/data/indexes/root/numpy/index.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,26 @@
"requires-python": ">=3.8",
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_17_x86_64.whl"
},
{
"core-metadata": true,
"filename": "numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl",
"hashes": {
"sha256": "6e8d4970bf49f5b1d682dd5d74e1fc383bd462ec973f0a12bfbaae0734dbf53a"
},
"requires-python": ">=3.8",
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl"
},
{
"core-metadata": {
"sha256": "9e4a576848f4ea0051b63fe503b9357eb7200f9d979eb983a7139478450d2e6a"
},
"filename": "numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl",
"hashes": {
"sha256": "57fddb18d8862f6240443ea00bf413c3d4b967b50b0d416317dac8a0365138a6"
},
"requires-python": ">=3.8",
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl"
},
{
"filename": "numpy-1.23.1-cp310-cp310-win_amd64.whl",
"hashes": {
Expand Down
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Metadata-Version: 2.3
Name: NumPy
Version: 1.23.1
Binary file not shown.
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
Metadata-Version: 2.3
Name: NumPy
Version: 1.23.1
22 changes: 22 additions & 0 deletions tests/data/indexes/root/numpy/yanked.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,28 @@
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_17_x86_64.whl",
"yanked": false
},
{
"core-metadata": true,
"filename": "numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl",
"hashes": {
"sha256": "6e8d4970bf49f5b1d682dd5d74e1fc383bd462ec973f0a12bfbaae0734dbf53a"
},
"requires-python": ">=3.8",
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_24_x86_64.whl",
"yanked": false
},
{
"core-metadata": {
"sha256": "9e4a576848f4ea0051b63fe503b9357eb7200f9d979eb983a7139478450d2e6a"
},
"filename": "numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl",
"hashes": {
"sha256": "57fddb18d8862f6240443ea00bf413c3d4b967b50b0d416317dac8a0365138a6"
},
"requires-python": ">=3.8",
"url": "numpy-1.23.1-cp310-cp310-manylinux_2_28_x86_64.whl",
"yanked": false
},
{
"filename": "numpy-1.23.1-cp310-cp310-win_amd64.whl",
"hashes": {
Expand Down
46 changes: 43 additions & 3 deletions tests/test_integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,26 @@ def test_package(server, project, accept, index_json_response, clear_projects_ca
else:
assert gpg_response.status_code == 404

if any(k == "data-dist-info-metadata" for k, _ in attributes):
(value,) = (v for k, v in attributes if k == "data-dist-info-metadata")
(expected,) = (v for k, v in attributes if k == "data-core-metadata")
assert value == expected

if any(k == "data-core-metadata" for k, _ in attributes):
(expected_core_metadata_hash,) = (
v for k, v in attributes if k == "data-core-metadata"
)
core_metadata_response = requests.get(urllib_parse.urljoin(
project_url, href_stripped + ".metadata"
))
core_metadata_response.raise_for_status()
if expected_core_metadata_hash and expected_core_metadata_hash != "true":
hash_name, expected_hash_value = expected_core_metadata_hash.split("=")
core_metadata_hash_value = hashlib.new(
hash_name, core_metadata_response.content
).hexdigest()
assert core_metadata_hash_value == expected_hash_value

if any(k == "data-requires-python" for k, _ in attributes):
(python_requirement,) = (
v for k, v in attributes if k == "data-requires-python"
Expand Down Expand Up @@ -279,10 +299,10 @@ def test_package_json(
params = {"format": accept}
else:
headers = {"Accept": accept}
project_url = f"{server}/index/{project}/"

with set_mock_index_response_is_json(index_json_response):
response = requests.get(
f"{server}/index/{project}/", params=params, headers=headers
)
response = requests.get(project_url, params=params, headers=headers)

assert response.status_code == 200
assert response.headers["Content-Type"][:35] == (
Expand All @@ -301,6 +321,26 @@ def test_package_json(
assert file["filename"] == file["url"]
assert isinstance(file["hashes"], dict)

assert not file.get("dist-info-metadata")

url_parts: urllib_parse.SplitResult = urllib_parse.urlsplit(file["url"])
url_parts_stripped = url_parts._replace(fragment="")
url_stripped = url_parts_stripped.geturl()
assert url_stripped == file["filename"]

if file.get("core-metadata"):
core_metadata_response = requests.get(
urllib_parse.urljoin(project_url, url_stripped + ".metadata"),
)
core_metadata_response.raise_for_status()

if isinstance(file["core-metadata"], dict):
for hash_name, expected_hash_value in file["core-metadata"].items():
core_metadata_hash_value = hashlib.new(
hash_name, core_metadata_response.content
).hexdigest()
assert core_metadata_hash_value == expected_hash_value

files_by_filename = {f["filename"]: f for f in response_data["files"]}
if project == "proxpi":
assert not files_by_filename["proxpi-1.0.0.tar.gz"].get("requires-python")
Expand Down

0 comments on commit caa09de

Please sign in to comment.