Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove JSON customization for anyOf elements #264

Draft
wants to merge 12 commits into
base: rf-unionany
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/lint.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.8'
python-version: '3.9'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: 3.8
python-version: 3.9

- name: Install dandischema
run: python -m pip install .
Expand Down
7 changes: 3 additions & 4 deletions .github/workflows/test-dandi-cli.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,23 +24,22 @@ jobs:
- macos-latest
python:
# Use the only Python which is ATM also used by dandi-api
# - 3.7
# - 3.8
# - 3.9
# - '3.10'
- '3.11'
# - '3.12'
version:
- master
- release
mode:
- normal
include:
- os: ubuntu-latest
python: 3.8
python: 3.9
mode: dandi-devel
version: master
- os: ubuntu-latest
python: 3.8
python: 3.9
mode: dandi-devel
version: release
exclude:
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/test-nonetwork.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,9 +17,8 @@ jobs:
os:
- windows-2019
- ubuntu-latest
- macos-12
- macos-latest
python:
- 3.8
- 3.9
- '3.10'
- '3.11'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-schema.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.8'
python-version: '3.9'

- name: Install dandischema
run: python -m pip install .
Expand Down
3 changes: 1 addition & 2 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,8 @@ jobs:
os:
- windows-2019
- ubuntu-latest
- macos-12
- macos-latest
python:
- 3.8
- 3.9
- '3.10'
- '3.11'
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/typing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.8'
python-version: '3.9'

- name: Install dependencies
run: |
Expand Down
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,14 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
rev: v5.0.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- repo: https://github.com/psf/black
rev: 24.8.0
rev: 24.10.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
Expand Down
13 changes: 9 additions & 4 deletions dandischema/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -380,13 +380,18 @@
stats["tissuesample"].append(sample)

stats["dataStandard"] = stats.get("dataStandard", [])

def add_if_missing(standard: dict) -> None:
if standard not in stats["dataStandard"]:
stats["dataStandard"].append(standard)

if "nwb" in assetmeta["encodingFormat"]:
if models.nwb_standard not in stats["dataStandard"]:
stats["dataStandard"].append(models.nwb_standard)
add_if_missing(models.nwb_standard)
# TODO: RF assumption that any .json implies BIDS
if set(Path(assetmeta["path"]).suffixes).intersection((".json", ".nii")):
if models.bids_standard not in stats["dataStandard"]:
stats["dataStandard"].append(models.bids_standard)
add_if_missing(models.bids_standard)

Check warning on line 392 in dandischema/metadata.py

View check run for this annotation

Codecov / codecov/patch

dandischema/metadata.py#L392

Added line #L392 was not covered by tests
if Path(assetmeta["path"]).suffixes == [".ome", ".zarr"]:
add_if_missing(models.ome_ngff_standard)


# TODO?: move/bind such helpers as .from_metadata or alike within
Expand Down
25 changes: 17 additions & 8 deletions dandischema/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,18 @@
from enum import Enum
import os
import re
from typing import Any, Dict, List, Literal, Optional, Sequence, Type, TypeVar, Union
from typing import (
Annotated,
Any,
Dict,
List,
Literal,
Optional,
Sequence,
Type,
TypeVar,
Union,
)
from warnings import warn

from pydantic import (
Expand All @@ -23,9 +34,6 @@
)
from pydantic.json_schema import JsonSchemaValue
from pydantic_core import CoreSchema
from typing_extensions import (
Annotated, # TODO: import from `typing` when Python 3.8 support is dropped
)
from zarr_checksum.checksum import InvalidZarrChecksum, ZarrDirectoryDigest

from .consts import DANDI_SCHEMA_VERSION
Expand Down Expand Up @@ -563,7 +571,6 @@ def __get_pydantic_json_schema__(
if value.get("format", None) == "uri":
value["maxLength"] = 1000
allOf = value.get("allOf")
anyOf = value.get("anyOf")
items = value.get("items")
if allOf is not None:
if len(allOf) == 1 and "$ref" in allOf[0]:
Expand All @@ -573,9 +580,6 @@ def __get_pydantic_json_schema__(
value["oneOf"] = value["allOf"]
value["type"] = "object"
del value["allOf"]
if anyOf is not None:
if len(anyOf) > 1 and any(["$ref" in val for val in anyOf]):
value["type"] = "object"
if items is not None:
anyOf = items.get("anyOf")
if (
Expand Down Expand Up @@ -807,6 +811,11 @@ class StandardsType(BaseType):
identifier="RRID:SCR_016124",
).model_dump(mode="json", exclude_none=True)

ome_ngff_standard = StandardsType(
name="OME/NGFF Standard",
identifier="DOI:10.25504/FAIRsharing.9af712",
).model_dump(mode="json", exclude_none=True)


class ContactPoint(DandiBaseModel):
email: Optional[EmailStr] = Field(
Expand Down
12 changes: 8 additions & 4 deletions dandischema/tests/test_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ def test_aggregation_bids() -> None:
{
"id": "dandiasset:6668d37f-e842-4b73-8c20-082a1dd0d31a",
"path": "sub-MITU01/ses-20210703h01m05s04/microscopy/sub-MITU01_"
"run-1_sample-163_stain-YO_chunk-5_spim.h5",
"run-1_sample-163_stain-YO_chunk-5_spim.ome.zarr",
"access": [
{"status": "dandi:OpenAccess", "schemaKey": "AccessRequirements"}
],
Expand All @@ -565,7 +565,7 @@ def test_aggregation_bids() -> None:
"contentSize": 38474544973,
"dateModified": "2021-07-22T23:59:16.060551-04:00",
"schemaVersion": "0.4.4",
"encodingFormat": "application/x-hdf5",
"encodingFormat": "application/x-zarr",
"wasGeneratedBy": [
{
"id": "urn:uuid:aef77d59-7a7f-4320-9d4b-9b03f3e25e54",
Expand All @@ -588,7 +588,7 @@ def test_aggregation_bids() -> None:
{
"id": "dandiasset:84dd580f-8d4a-43f8-bda3-6fb53fb5d3a2",
"path": "sub-MITU01/ses-20210703h16m32s10/microscopy/sub-MITU01_"
"ses-20210703h16m32s10_run-1_sample-162_stain-LEC_chunk-5_spim.h5",
"ses-20210703h16m32s10_run-1_sample-162_stain-LEC_chunk-5_spim.ome.zarr",
"access": [
{"status": "dandi:OpenAccess", "schemaKey": "AccessRequirements"}
],
Expand All @@ -611,7 +611,7 @@ def test_aggregation_bids() -> None:
"contentSize": 61774316916,
"dateModified": "2021-10-01T18:28:16.038990-04:00",
"schemaVersion": "0.6.0",
"encodingFormat": "application/x-hdf5",
"encodingFormat": "application/x-zarr",
"wasGeneratedBy": [
{
"id": "urn:uuid:8f69a248-0e6a-4fa1-8369-ae1cc63d59d8",
Expand All @@ -634,3 +634,7 @@ def test_aggregation_bids() -> None:
]
summary = aggregate_assets_summary(data)
assert summary["numberOfSamples"] == 2
assert (
sum(_.get("name", "").startswith("OME/NGFF") for _ in summary["dataStandard"])
== 1
) # only a single entry so we do not duplicate them
3 changes: 1 addition & 2 deletions dandischema/types.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
# This file is for defining types that extend existing types through the use of
# `typing.Annotated`.

from typing import Type
from typing import Annotated, Type

from pydantic import ByteSize, GetCoreSchemaHandler, GetJsonSchemaHandler
from pydantic.json_schema import JsonSchemaValue
from pydantic_core import CoreSchema, core_schema
from typing_extensions import Annotated


class _ByteSizeJsonSchemaAnnotation:
Expand Down
4 changes: 1 addition & 3 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ classifiers =
License :: OSI Approved :: Apache Software License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Expand All @@ -27,12 +26,11 @@ project_urls =
Source Code = https://github.com/dandi/dandischema

[options]
python_requires = >=3.8
python_requires = >=3.9
install_requires =
jsonschema[format]
pydantic[email] ~= 2.4
requests
typing_extensions; python_version < "3.9"
zarr_checksum
zip_safe = False
packages = find_namespace:
Expand Down