Skip to content

Commit

Permalink
fix more mypy issues
Browse files Browse the repository at this point in the history
  • Loading branch information
ytausch committed Nov 25, 2024
1 parent b351afc commit f6a342f
Show file tree
Hide file tree
Showing 5 changed files with 18 additions and 21 deletions.
23 changes: 9 additions & 14 deletions conda_forge_tick/feedstock_parser.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import collections.abc
import hashlib
import logging
import os
import re
import tempfile
import typing
Expand All @@ -19,16 +18,12 @@
)
from requests.models import Response

from conda_forge_tick.migrators_types import RecipeTypedDict

from .migrators_types import PackageName, RequirementsTypedDict

if typing.TYPE_CHECKING:
from mypy_extensions import TestTypedDict

from conda_forge_tick.lazy_json_backends import LazyJson, dumps, loads
from conda_forge_tick.migrators_types import RecipeTypedDict
from conda_forge_tick.utils import as_iterable, parse_meta_yaml, parse_recipe_yaml

from .migrators_types import PackageName, RequirementsTypedDict, TestTypedDict

logger = logging.getLogger(__name__)

PIN_SEP_PAT = re.compile(r" |>|<|=|\[")
Expand All @@ -37,7 +32,7 @@
# that would be available in a bootstrapping scenario
# for these nodes, we only use the bootstrap requirements
# to build graph edges
BOOTSTRAP_MAPPINGS = {}
BOOTSTRAP_MAPPINGS: dict = {}


def _dedupe_list_ordered(list_with_dupes):
Expand Down Expand Up @@ -76,7 +71,7 @@ def _get_requirements(
host: bool = True,
run: bool = True,
outputs_to_keep: Optional[Set["PackageName"]] = None,
) -> "Set[PackageName]":
) -> set[PackageName]:
"""Get the list of recipe requirements from a meta.yaml dict
Parameters
Expand All @@ -97,7 +92,7 @@ def _get_requirements(
"""
kw = dict(build=build, host=host, run=run)
if outputs_to_keep:
reqs = set()
reqs: set[PackageName] = set()
outputs_ = meta_yaml.get("outputs", []) or [] if outputs else []
for output in outputs_:
if output.get("name") in outputs_to_keep:
Expand All @@ -116,7 +111,7 @@ def _parse_requirements(
build: bool = True,
host: bool = True,
run: bool = True,
) -> typing.MutableSet["PackageName"]:
) -> set["PackageName"]:
"""Flatten a YAML requirements section into a list of names"""
if not req: # handle None as empty
return set()
Expand Down Expand Up @@ -145,7 +140,7 @@ def _extract_requirements(meta_yaml, outputs_to_keep=None):
metas = [meta_yaml] + meta_yaml.get("outputs", []) or []

for block in metas:
req: "RequirementsTypedDict" = block.get("requirements", {}) or {}
req: "RequirementsTypedDict" | list = block.get("requirements", {}) or {} # type: ignore[assignment]
if isinstance(req, list):
requirements_dict["run"].update(set(req))
continue
Expand All @@ -154,7 +149,7 @@ def _extract_requirements(meta_yaml, outputs_to_keep=None):
list(as_iterable(req.get(section, []) or [])),
)

test: "TestTypedDict" = block.get("test", {}) or {}
test: "TestTypedDict" = block.get("test", {}) or {} # type: ignore[assignment]
requirements_dict["test"].update(test.get("requirements", []) or [])
requirements_dict["test"].update(test.get("requires", []) or [])

Expand Down
2 changes: 1 addition & 1 deletion conda_forge_tick/lazy_json_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -458,7 +458,7 @@ def hget(self, name, key):
return dumps(data["value"])


LAZY_JSON_BACKENDS = {
LAZY_JSON_BACKENDS: dict[str, type[LazyJsonBackend]] = {
"file": FileLazyJsonBackend,
"mongodb": MongoDBLazyJsonBackend,
"github": GithubLazyJsonBackend,
Expand Down
6 changes: 3 additions & 3 deletions conda_forge_tick/make_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from .all_feedstocks import get_all_feedstocks, get_archived_feedstocks
from .cli_context import CliContext
from .executors import executor
from .utils import as_iterable, dump_graph, load_graph
from .utils import as_iterable, dump_graph, load_existing_graph

# from conda_forge_tick.profiler import profiling

Expand Down Expand Up @@ -257,7 +257,7 @@ def _create_edges(gx: nx.DiGraph) -> nx.DiGraph:


def _add_run_exports(nodes_to_update):
gx = load_graph()
gx = load_existing_graph()

new_names = [name for name in nodes_to_update if name not in gx.nodes]
for name in nodes_to_update:
Expand Down Expand Up @@ -336,7 +336,7 @@ def main(
logger.info(f"archived nodes: {len(archived_names)}")

if update_nodes_and_edges:
gx = load_graph()
gx = load_existing_graph()

new_names = [name for name in names if name not in gx.nodes]
with lazy_json_override_backends(
Expand Down
3 changes: 2 additions & 1 deletion conda_forge_tick/migration_runner.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import shutil
import tempfile
from pathlib import Path
from typing import Any

from conda_forge_feedstock_ops.container_utils import (
get_default_log_level_args,
Expand Down Expand Up @@ -239,7 +240,7 @@ def run_migration_local(
)
recipe_dir = os.path.join(feedstock_dir, "recipe")

data = {
data: dict[str, Any] = {
"migrate_return_value": None,
"commit_message": None,
"pr_title": None,
Expand Down
5 changes: 3 additions & 2 deletions conda_forge_tick/update_upstream_versions.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
List,
Literal,
Mapping,
MutableMapping,
Optional,
Tuple,
TypeVar,
Expand Down Expand Up @@ -181,7 +182,7 @@ def get_latest_version_local(

def get_latest_version_containerized(
name: str,
attrs: Mapping[str, Any],
attrs: MutableMapping[str, Any],
sources: Iterable[AbstractSource],
) -> Dict[str, Union[Literal[False], str]]:
"""
Expand Down Expand Up @@ -253,7 +254,7 @@ def get_latest_version(
The new version information.
"""
if should_use_container(use_container=use_container):
return get_latest_version_containerized(name, attrs, sources)
return get_latest_version_containerized(name, dict(attrs), sources)
else:
return get_latest_version_local(name, attrs, sources)

Expand Down

0 comments on commit f6a342f

Please sign in to comment.