Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add linters to ruff / pre-commit #40

Merged
merged 2 commits into from
Jan 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 25 additions & 14 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -48,35 +48,46 @@ version_scheme = "no-guess-dev" # Will not guess the next version

[tool.ruff]
src = ["src"]
unsafe-fixes = true
select = [
"A", # flake8-builtins
"ARG", # flake8-unused-arguments
"C4", # flake8-comprehensions
"D", # pydocstyle
"E", # pycodestyle (errors)
"W", # pycodestyle (warnings)
"EXE", # flake8-executable
"F", # Pyflakes
"I", # isort
"ISC", # flake8-implicit-str-concat
"N", # pep8-naming
"PTH", # flake8-use-pathlib
"PYI", # flake8-pyi
]

ignore = [
"D100", # Missing docstring in public module
"D104", # Missing docstring in public package
"D105", # Missing docstring in magic method
"D203", # 1 blank line required before class docstring
"D212", # Multi-line docstring summary should start at the first line
"D213", # Multi-line docstring summary should start at the second line
"N803", # Argument name should be lowercase
"N806", # Variable _ in function should be lowercase
"PIE796", # Non-unique values are redundant and likely a mistake.
"PLR", # Pylint Refactor
"PTH123", # `open()` should be replaced by `Path.open()`
"PTH207", # "Replace `glob` with `Path.glob` or `Path.rglob`
]

[tool.ruff.lint]
# Enable the isort rules.
extend-select = ["I"]

[tool.black]
target-version = ["py38", "py39", "py310", "py311"]
preview = true

[tool.isort]
profile = "black"
known_first_party = ["dolphin"]

[tool.mypy]
python_version = "3.10"
ignore_missing_imports = true
plugins = ["pydantic.mypy"]

[tool.ruff.per-file-ignores]
"**/__init__.py" = ["F401"]
"test/**" = ["D"]
"**/__init__.py" = ["F403"]
"tests/**" = ["D", "N", "PTH"]

[tool.pytest.ini_options]
doctest_optionflags = "NORMALIZE_WHITESPACE NUMBER"
Expand Down
14 changes: 7 additions & 7 deletions scripts/release/generate_product_docx_table.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,13 +77,13 @@ def append_dset_to_table(name, item):
description = item.attrs.get("long_name", "")
units = item.attrs.get("units", "")
table_data.append(
dict(
Name=name,
Type=data_type,
Shape=shape,
Units=units,
Description=description,
)
{
"Name": name,
"Type": data_type,
"Shape": shape,
"Units": units,
"Description": description,
}
)

with h5py.File(hdf5_path, "r") as hf:
Expand Down
4 changes: 0 additions & 4 deletions scripts/release/list_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,10 @@ class Package:
class CommandNotFoundError(Exception):
"""Raised when a required Unix shell command was not found."""

pass


class YumListIsAnnoyingError(Exception):
"""Raised when 'yum list' does something annoying."""

pass


def check_command(cmd: str) -> bool:
"""Check if a Unix shell command is available."""
Expand Down
7 changes: 3 additions & 4 deletions scripts/release/setup_delivery_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
# see `ionosphere.download_ionex_for_slcs`.
# The troposphere file download is missing. Dummy files were created.
# for d in `ls input_slcs/t042_088905_iw1* | awk -F'_' '{print $5}' | cut -d'.' -f1`; do
# touch dynamic_ancillary_files/troposphere_files/ERA5_N30_N40_W120_W110_${d}_14.grb;
# touch dynamic_ancillary_files/troposphere_files/ERA5_N30_N40_W120_W110_${d}_14.grb;
# done


Expand All @@ -30,7 +30,6 @@ def setup_delivery(cfg_dir: Path, mode: ProcessingMode):
" --amplitude-mean-files ./dynamic_ancillary_files/ps_files/*mean*"
" --amplitude-dispersion-files ./dynamic_ancillary_files/ps_files/*dispersion*"
# TODO # seasonal coherence averages
# "--seasonal-coherence-files dynamic_ancillary_files/seasonal_coherence_files/* "
# Troposphere files:
" --troposphere-files ./dynamic_ancillary_files/troposphere_files/*"
# Ionosphere files:
Expand All @@ -49,7 +48,7 @@ def setup_delivery(cfg_dir: Path, mode: ProcessingMode):
f" -o {outfile}"
)
print(cmd)
subprocess.run(cmd, shell=True)
subprocess.run(cmd, shell=True, check=False)
return outfile


Expand Down Expand Up @@ -77,7 +76,7 @@ def setup_delivery(cfg_dir: Path, mode: ProcessingMode):
)
cmd = f"python {convert_config} {dolphin_cfg_file} {arg_string}"
print(cmd)
subprocess.run(cmd, shell=True)
subprocess.run(cmd, shell=True, check=False)
# Remove the `dolphin` yamls
for f in cfg_dir.glob("dolphin_config*.yaml"):
f.unlink()
56 changes: 28 additions & 28 deletions scripts/run_repeated_nrt.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,57 +31,57 @@ def _create_cfg(
amplitude_mean_files: Sequence[Filename] = [],
amplitude_dispersion_files: Sequence[Filename] = [],
strides: Mapping[str, int] = {"x": 6, "y": 3},
work_dir: Path = Path("."),
work_dir: Path = Path(),
n_parallel_bursts: int = 1,
):
# strides = {"x": 1, "y": 1}
interferogram_network: dict[str, Any]
if first_ministack:
interferogram_network = dict(
network_type=InterferogramNetworkType.SINGLE_REFERENCE
)
interferogram_network = {
"network_type": InterferogramNetworkType.SINGLE_REFERENCE
}
else:
interferogram_network = dict(
network_type=InterferogramNetworkType.MANUAL_INDEX,
indexes=[(0, -1)],
)
interferogram_network = {
"network_type": InterferogramNetworkType.MANUAL_INDEX,
"indexes": [(0, -1)],
}

cfg = DisplacementWorkflow(
# Things that change with each workflow run
cslc_file_list=slc_files,
input_options=dict(subdataset=OPERA_DATASET_NAME),
input_options={"subdataset": OPERA_DATASET_NAME},
interferogram_network=interferogram_network,
amplitude_mean_files=amplitude_mean_files,
amplitude_dispersion_files=amplitude_dispersion_files,
# Configurable from CLI inputs:
output_options=dict(
strides=strides,
),
phase_linking=dict(
ministack_size=1000, # for single update, process in one ministack
half_window={"x": half_window_size[0], "y": half_window_size[1]},
shp_method=shp_method,
),
output_options={
"strides": strides,
},
phase_linking={
"ministack_size": 1000, # for single update, process in one ministack
"half_window": {"x": half_window_size[0], "y": half_window_size[1]},
"shp_method": shp_method,
},
work_directory=work_dir,
worker_settings=dict(
worker_settings={
# block_size_gb=block_size_gb,
n_parallel_bursts=n_parallel_bursts,
n_workers=4,
threads_per_worker=8,
),
"n_parallel_bursts": n_parallel_bursts,
"n_workers": 4,
"threads_per_worker": 8,
},
# ps_options=dict(
# amp_dispersion_threshold=amp_dispersion_threshold,
# ),
# log_file=log_file,
# )
# Definite hard coded things
unwrap_options=dict(
unwrap_method="snaphu",
run_unwrap=run_unwrap,
ntiles=(2, 2),
downsample_factor=(3, 3),
unwrap_options={
"unwrap_method": "snaphu",
"run_unwrap": run_unwrap,
"ntiles": (2, 2),
"downsample_factor": (3, 3),
# CHANGEME: or else run in background somehow?
),
},
save_compressed_slc=True, # always save, and only sometimes will we grab it
# workflow_name=workflow_name,
)
Expand Down
1 change: 0 additions & 1 deletion src/disp_s1/cli/run.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env python
import click

__all__ = ["run"]
Expand Down
5 changes: 2 additions & 3 deletions src/disp_s1/create.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env python
import logging
from pathlib import Path
from typing import Any
Expand Down Expand Up @@ -30,9 +29,9 @@ def get_params(
Path(f"{process_dir}/unwrapped/").glob(f"{pair}.unw.tif")
)
logger.info(param_dict["unw_filename"])
except StopIteration:
except StopIteration as e:
logger.error("Check if the pair %s exists", pair)
raise FileNotFoundError(f"Pair {pair} not found")
raise FileNotFoundError(f"Pair {pair} not found") from e
param_dict["conncomp_filename"] = next(
Path(f"{process_dir}/unwrapped/").glob(f"{pair}.unw.conncomp.tif")
)
Expand Down
4 changes: 2 additions & 2 deletions src/disp_s1/ionosphere.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ def download_ionex_for_slcs(
logger.info(f"Found {len(date_to_file_list)} dates in the input files.")

output_files = []
for input_date_tuple, file_list in date_to_file_list.items():
for input_date_tuple, _file_list in date_to_file_list.items():
input_date = input_date_tuple[0]
logger.info("Downloading for %s", input_date)
f = download_ionex_for_date(input_date, dest_dir=dest_dir, verbose=verbose)
Expand Down Expand Up @@ -78,7 +78,7 @@ def download_ionex_for_date(
wget_cmd.append("--quiet")

logger.info('Running command: "%s"', " ".join(wget_cmd))
subprocess.run(wget_cmd, cwd=dest_dir)
subprocess.run(wget_cmd, cwd=dest_dir, check=False)
return dest_file


Expand Down
6 changes: 2 additions & 4 deletions src/disp_s1/main.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env python
from __future__ import annotations

import multiprocessing as mp
Expand Down Expand Up @@ -31,8 +30,7 @@ def run(
Parameters
----------
cfg : DisplacementWorkflow
[`DisplacementWorkflow`][dolphin.workflows.config.DisplacementWorkflow] object for controlling the
workflow.
`DisplacementWorkflow` object for controlling the workflow.
debug : bool, optional
Enable debug logging, by default False.
pge_runconfig : RunConfig, optional
Expand Down Expand Up @@ -92,7 +90,7 @@ def run(

else:
# grab the only key (either a burst, or "") and use that
b = list(grouped_slc_files.keys())[0]
b = next(iter(grouped_slc_files.keys()))
wrapped_phase_cfgs = [(b, cfg)]

ifg_file_list: list[Path] = []
Expand Down
20 changes: 10 additions & 10 deletions src/disp_s1/pge_runconfig.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ class InputFileGroup(YamlModel):
class DynamicAncillaryFileGroup(YamlModel):
"""A group of dynamic ancillary files."""

algorithm_parameters_file: Path = Field( # type: ignore
algorithm_parameters_file: Path = Field(
default=...,
description="Path to file containing SAS algorithm parameters.",
)
Expand Down Expand Up @@ -128,7 +128,7 @@ class PrimaryExecutable(YamlModel):
class ProductPathGroup(YamlModel):
"""Group describing the product paths."""

product_path: Path = Field( # type: ignore
product_path: Path = Field(
default=...,
description="Directory where PGE will place results",
)
Expand Down Expand Up @@ -159,7 +159,7 @@ class ProductPathGroup(YamlModel):


class AlgorithmParameters(YamlModel):
"""Class containing all the other [`DisplacementWorkflow`][dolphin.workflows.config] classes."""
"""Class containing all the other `DisplacementWorkflow` classes."""

# Options for each step in the workflow
ps_options: PsOptions = Field(default_factory=PsOptions)
Expand Down Expand Up @@ -217,8 +217,8 @@ def model_construct(cls, **kwargs):
)

def to_workflow(self):
"""Convert to a [`DisplacementWorkflow`][dolphin.workflows.config.DisplacementWorkflow] object."""
# We need to go to/from the PGE format to our internal DisplacementWorkflow object:
"""Convert to a `DisplacementWorkflow` object."""
# We need to go to/from the PGE format to dolphin's DisplacementWorkflow:
# Note that the top two levels of nesting can be accomplished by wrapping
# the normal model export in a dict.
#
Expand All @@ -240,7 +240,7 @@ def to_workflow(self):
self.dynamic_ancillary_file_group.algorithm_parameters_file
)
param_dict = algorithm_parameters.model_dump()
input_options = dict(subdataset=param_dict.pop("subdataset"))
input_options = {"subdataset": param_dict.pop("subdataset")}

# Convert the frame_id into an output bounding box
frame_to_burst_file = self.static_ancillary_file_group.frame_to_burst_json
Expand All @@ -251,7 +251,7 @@ def to_workflow(self):
param_dict["output_options"]["bounds"] = bounds
param_dict["output_options"]["bounds_epsg"] = bounds_epsg

# This get's unpacked to load the rest of the parameters for the DisplacementWorkflow
# unpacked to load the rest of the parameters for the DisplacementWorkflow
return DisplacementWorkflow(
cslc_file_list=cslc_file_list,
input_options=input_options,
Expand All @@ -277,13 +277,13 @@ def from_workflow(
save_compressed_slc: bool = False,
output_directory: Optional[Path] = None,
):
"""Convert from a [`DisplacementWorkflow`][dolphin.workflows.config.DisplacementWorkflow] object.
"""Convert from a `DisplacementWorkflow` object.

This is the inverse of the to_workflow method, although there are more
fields in the PGE version, so it's not a 1-1 mapping.

The arguments, like `frame_id` or `algorithm_parameters_file`, are not in the
[`DisplacementWorkflow`][dolphin.workflows.config.DisplacementWorkflow] object, so we need to pass
`DisplacementWorkflow` object, so we need to pass
those in as arguments.

This is can be used as preliminary setup to further edit the fields, or as a
Expand All @@ -297,7 +297,7 @@ def from_workflow(
algo_keys = set(AlgorithmParameters.model_fields.keys())
alg_param_dict = workflow.model_dump(include=algo_keys)
AlgorithmParameters(**alg_param_dict).to_yaml(algorithm_parameters_file)
# This gets unpacked to load the rest of the parameters for the DisplacementWorkflow
# unpacked to load the rest of the parameters for the DisplacementWorkflow

return cls(
input_file_group=InputFileGroup(
Expand Down
4 changes: 2 additions & 2 deletions src/disp_s1/plotting.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,14 +81,14 @@ def plot_product(
class HDF5Explorer:
"""Class which maps an HDF5 file and allows tab-completion to explore datasets."""

def __init__(self, hdf5_filepath: str, load_less_than: float = 1e3):
def __init__(self, hdf5_filepath: str, load_less_than: float = 1e3): # noqa: D107
self.hdf5_filepath = hdf5_filepath
self._hf = h5py.File(hdf5_filepath, "r")
self._root_group = _HDF5GroupExplorer(
self._hf["/"], load_less_than=load_less_than
)

def close(self):
def close(self): # noqa: D102
self._hf.close()

def __getattr__(self, name):
Expand Down
Loading