From 680db30240bcc7bb0349c94f5f2066b95e268bc9 Mon Sep 17 00:00:00 2001 From: deegan Date: Fri, 18 Oct 2024 13:37:41 +0200 Subject: [PATCH] hier comparae --- pyphare/pyphare/core/phare_utilities.py | 12 +- pyphare/pyphare/pharein/examples/job.py | 110 --------------- pyphare/pyphare/pharein/init.py | 2 + pyphare/pyphare/pharein/load_balancer.py | 6 +- pyphare/pyphare/pharesee/hierarchy/fromh5.py | 3 +- .../pyphare/pharesee/hierarchy/hierarchy.py | 13 +- .../pharesee/hierarchy/hierarchy_utils.py | 130 ++++++++++++++---- .../pyphare/pharesee/hierarchy/patchdata.py | 23 +++- pyphare/pyphare/pharesee/particles.py | 15 +- 9 files changed, 156 insertions(+), 158 deletions(-) delete mode 100644 pyphare/pyphare/pharein/examples/job.py diff --git a/pyphare/pyphare/core/phare_utilities.py b/pyphare/pyphare/core/phare_utilities.py index c6937cc76..29c28bb92 100644 --- a/pyphare/pyphare/core/phare_utilities.py +++ b/pyphare/pyphare/core/phare_utilities.py @@ -128,10 +128,18 @@ def is_fp32(item): return isinstance(item, float) -def assert_fp_any_all_close(a, b, atol=1e-16, rtol=0, atol_fp32=None): +def any_fp_tol(a, b, atol=1e-16, rtol=0, atol_fp32=None): if any([is_fp32(el) for el in [a, b]]): atol = atol_fp32 if atol_fp32 else atol * 1e8 - np.testing.assert_allclose(a, b, atol=atol, rtol=rtol) + return dict(atol=atol, rtol=rtol) + + +def fp_any_all_close(a, b, atol=1e-16, rtol=0, atol_fp32=None): + return np.allclose(a, b, **any_fp_tol(a, b, atol, rtol, atol_fp32)) + + +def assert_fp_any_all_close(a, b, atol=1e-16, rtol=0, atol_fp32=None): + np.testing.assert_allclose(a, b, **any_fp_tol(a, b, atol, rtol, atol_fp32)) def decode_bytes(input, errors="ignore"): diff --git a/pyphare/pyphare/pharein/examples/job.py b/pyphare/pyphare/pharein/examples/job.py deleted file mode 100644 index 24ed09f98..000000000 --- a/pyphare/pyphare/pharein/examples/job.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python - -import numpy as np -import pyphare.pharein as ph - - -# ------------------------------------ -# configure the simulation -# ------------------------------------ - -ph.Simulation( - time_step_nbr=1000, # number of time steps (not specified if time_step and final_time provided) - final_time=1.0, # simulation final time (not specified if time_step and time_step_nbr given) - boundary_types="periodic", # boundary condition, string or tuple, length == len(cell) == len(dl) - cells=80, # integer or tuple length == dimension - dl=0.1, # mesh size of the root level, float or tuple - path="test5", # directory where INI file and diagnostics directories will be - # time_step = 0.005, # simulation time step (not specified if time_step_nbr and final_time given) - # domain_size = 8., # float or tuple, not specified if dl and cells are - # interp_order = 1, # interpolation order, [default = 1] can be 1, 2, 3 or 4 - # layout = "yee", # grid layout, [default="yee"] - # origin = 0., # position of the origin of the domain, float or tuple (length = dimension) - # particle_pusher = "modified_boris", # particle pusher method, [default = "modified_boris"] - # refined_particle_nbr = 2, # number of refined particle a particle is split into [default : ] - # diag_export_format = 'ascii', # export format of the diagnostics [default = 'ascii'] - # refinement = {"level":[0,1], # AMR parameters - # "extent_ratio":[0.4, 0.6], - # "refinement_iterations":[0, 3]}, -) # end Simulation - - -# in the following we use the MaxwellianFluidModel - - -Te = 0.12 - - -def n(x): - return 1.0 - - -def bx(x): - xmax = ph.getSimulation().simulation_domain()[0] - return np.cos(2 * np.pi / xmax * x) - - -ph.MaxwellianFluidModel(bx=bx, protons={"density": n}, background={}) - - -ph.ElectronModel(closure="isothermal", Te=Te) - - -ph.ElectromagDiagnostics( - diag_type="E", # available : ("E", "B") - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - path="ElectromagDiagnostics1", # where output files will be written, [default: name] -) - - -ph.FluidDiagnostics( - diag_type="density", # choose in (rho_s, flux_s) - write_every=10, # write on disk every x iterations - compute_every=5, # compute diagnostics every x iterations ( x <= write_every) - start_iteration=0, # iteration at which diag is enabled - last_iteration=990, # iteration at which diag is turned off - population_name="protons", # name of the population for which the diagnostics is made - # ,path = 'FluidDiagnostics1' # where output files will be written, [default: name] -) - - -ph.FluidDiagnostics( - diag_type="bulkVelocity", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="background", -) - -ph.FluidDiagnostics( - diag_type="density", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="all", -) - -ph.FluidDiagnostics( - diag_type="flux", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, - population_name="background", -) - -ph.ElectromagDiagnostics( - diag_type="B", - write_every=10, - compute_every=5, - start_iteration=0, - last_iteration=990, -) - -for item in ph.getSimulation().electrons.dict_path(): - print(item[0], item[1]) diff --git a/pyphare/pyphare/pharein/init.py b/pyphare/pyphare/pharein/init.py index 5c3a5696f..4728000fc 100644 --- a/pyphare/pyphare/pharein/init.py +++ b/pyphare/pyphare/pharein/init.py @@ -6,4 +6,6 @@ def get_user_inputs(jobname): _init_.PHARE_EXE = True print(jobname) jobmodule = importlib.import_module(jobname) # lgtm [py/unused-local-variable] + if jobmodule is None: + raise RuntimeError("failed to import job") populateDict() diff --git a/pyphare/pyphare/pharein/load_balancer.py b/pyphare/pyphare/pharein/load_balancer.py index b2b542c06..5600469ee 100644 --- a/pyphare/pyphare/pharein/load_balancer.py +++ b/pyphare/pyphare/pharein/load_balancer.py @@ -33,7 +33,7 @@ class LoadBalancer: def __post_init__(self): if self.auto and self.every: - raise RuntimeError(f"LoadBalancer cannot work with both 'every' and 'auto'") + raise RuntimeError("LoadBalancer cannot work with both 'every' and 'auto'") if self.every is None: self.auto = True @@ -50,8 +50,8 @@ def __post_init__(self): if self._register: if not gv.sim: raise RuntimeError( - f"LoadBalancer cannot be registered as no simulation exists" + "LoadBalancer cannot be registered as no simulation exists" ) if gv.sim.load_balancer: - raise RuntimeError(f"LoadBalancer is already registered to simulation") + raise RuntimeError("LoadBalancer is already registered to simulation") gv.sim.load_balancer = self diff --git a/pyphare/pyphare/pharesee/hierarchy/fromh5.py b/pyphare/pyphare/pharesee/hierarchy/fromh5.py index e934bfda5..f79c0cbc3 100644 --- a/pyphare/pyphare/pharesee/hierarchy/fromh5.py +++ b/pyphare/pyphare/pharesee/hierarchy/fromh5.py @@ -24,9 +24,8 @@ particle_files_patterns = ("domain", "patchGhost", "levelGhost") -def get_all_available_quantities_from_h5(filepath, time=0, exclude=["tags"]): +def get_all_available_quantities_from_h5(filepath, time=0, exclude=["tags"], hier=None): time = format_timestamp(time) - hier = None path = Path(filepath) for h5 in path.glob("*.h5"): if h5.parent == path and h5.stem not in exclude: diff --git a/pyphare/pyphare/pharesee/hierarchy/hierarchy.py b/pyphare/pyphare/pharesee/hierarchy/hierarchy.py index 2e3b62df5..cab07023c 100644 --- a/pyphare/pyphare/pharesee/hierarchy/hierarchy.py +++ b/pyphare/pyphare/pharesee/hierarchy/hierarchy.py @@ -1,12 +1,13 @@ +import numpy as np +import matplotlib.pyplot as plt + from .patch import Patch from .patchlevel import PatchLevel from ...core.box import Box from ...core import box as boxm -from ...core.phare_utilities import refinement_ratio from ...core.phare_utilities import listify - -import numpy as np -import matplotlib.pyplot as plt +from ...core.phare_utilities import deep_copy +from ...core.phare_utilities import refinement_ratio def format_timestamp(timestamp): @@ -68,6 +69,10 @@ def __init__( self.update() + def __deepcopy__(self, memo): + no_copy_keys = ["data_files"] # do not copy these things + return deep_copy(self, memo, no_copy_keys) + def __getitem__(self, qty): return self.__dict__[qty] diff --git a/pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py b/pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py index 2efadfc3d..9788f3fb4 100644 --- a/pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py +++ b/pyphare/pyphare/pharesee/hierarchy/hierarchy_utils.py @@ -1,11 +1,19 @@ -from .hierarchy import PatchHierarchy -from .patchdata import FieldData +from dataclasses import dataclass, field +from copy import deepcopy +import numpy as np + +from typing import Any, List, Tuple + +from .hierarchy import PatchHierarchy, format_timestamp +from .patchdata import FieldData, ParticleData from .patchlevel import PatchLevel from .patch import Patch +from ...core.box import Box +from ...core.gridlayout import GridLayout from ...core.phare_utilities import listify from ...core.phare_utilities import refinement_ratio +from pyphare.core import phare_utilities as phut -import numpy as np field_qties = { "EM_B_x": "Bx", @@ -298,7 +306,7 @@ def overlap_mask_2d(x, y, dl, level, qty): return is_overlaped -def flat_finest_field(hierarchy, qty, time=None): +def flat_finest_field(hierarchy, qty, time=None, neghosts=1): """ returns 2 flattened arrays containing the data (with shape [Npoints]) and the coordinates (with shape [Npoints, Ndim]) for the given @@ -311,7 +319,7 @@ def flat_finest_field(hierarchy, qty, time=None): dim = hierarchy.ndim if dim == 1: - return flat_finest_field_1d(hierarchy, qty, time) + return flat_finest_field_1d(hierarchy, qty, time, neghosts) elif dim == 2: return flat_finest_field_2d(hierarchy, qty, time) elif dim == 3: @@ -321,7 +329,7 @@ def flat_finest_field(hierarchy, qty, time=None): raise ValueError("the dim of a hierarchy should be 1, 2 or 3") -def flat_finest_field_1d(hierarchy, qty, time=None): +def flat_finest_field_1d(hierarchy, qty, time=None, neghosts=1): lvl = hierarchy.levels(time) for ilvl in range(hierarchy.finest_level(time) + 1)[::-1]: @@ -333,7 +341,7 @@ def flat_finest_field_1d(hierarchy, qty, time=None): # all but 1 ghost nodes are removed in order to limit # the overlapping, but to keep enough point to avoid # any extrapolation for the interpolator - needed_points = pdata.ghosts_nbr - 1 + needed_points = pdata.ghosts_nbr - neghosts # data = pdata.dataset[patch.box] # TODO : once PR 551 will be merged... data = pdata.dataset[needed_points[0] : -needed_points[0]] @@ -552,34 +560,55 @@ def _compute_scalardiv(patch_datas, **kwargs): return tuple(pd_attrs) -from dataclasses import dataclass - - @dataclass class EqualityReport: - ok: bool - reason: str + failed: List[Tuple[str, Any, Any]] = field(default_factory=lambda: []) def __bool__(self): - return self.ok + return not self.failed + + def __repr__(self): + for msg, ref, cmp in self: + print(msg) + try: + if type(ref) is FieldData: + phut.assert_fp_any_all_close(ref[:], cmp[:], atol=1e-16) + except AssertionError as e: + print(e) + return self.failed[0][0] + + def __call__(self, reason, ref=None, cmp=None): + self.failed.append((reason, ref, cmp)) + return self + + def __getitem__(self, idx): + return (self.failed[idx][1], self.failed[idx][2]) + + def __iter__(self): + return self.failed.__iter__() + + def __reversed__(self): + return reversed(self.failed) -def hierarchy_compare(this, that): +def hierarchy_compare(this, that, atol=1e-16): + eqr = EqualityReport() + if not isinstance(this, PatchHierarchy) or not isinstance(that, PatchHierarchy): - return EqualityReport(False, "class type mismatch") + return eqr("class type mismatch") if this.ndim != that.ndim or this.domain_box != that.domain_box: - return EqualityReport(False, "dimensional mismatch") + return eqr("dimensional mismatch") if this.time_hier.keys() != that.time_hier.keys(): - return EqualityReport(False, "timesteps mismatch") + return eqr("timesteps mismatch") for tidx in this.times(): patch_levels_ref = this.time_hier[tidx] patch_levels_cmp = that.time_hier[tidx] if patch_levels_ref.keys() != patch_levels_cmp.keys(): - return EqualityReport(False, "levels mismatch") + return eqr("levels mismatch") for level_idx in patch_levels_cmp.keys(): patch_level_ref = patch_levels_ref[level_idx] @@ -590,21 +619,62 @@ def hierarchy_compare(this, that): patch_cmp = patch_level_cmp.patches[patch_idx] if patch_ref.patch_datas.keys() != patch_cmp.patch_datas.keys(): - print(list(patch_ref.patch_datas.keys())) - print(list(patch_cmp.patch_datas.keys())) - return EqualityReport(False, "data keys mismatch") + return eqr("data keys mismatch") for patch_data_key in patch_ref.patch_datas.keys(): patch_data_ref = patch_ref.patch_datas[patch_data_key] patch_data_cmp = patch_cmp.patch_datas[patch_data_key] - if patch_data_cmp != patch_data_ref: - return EqualityReport( - False, - "data mismatch: " - + type(patch_data_cmp).__name__ - + " " - + type(patch_data_ref).__name__, - ) + if not patch_data_cmp.compare(patch_data_ref, atol=atol): + msg = f"data mismatch: {type(patch_data_ref).__name__} {patch_data_key}" + eqr(msg, patch_data_cmp, patch_data_ref) + + if not eqr: + return eqr + + return eqr + + +def single_patch_for_LO(hier, qties=None, skip=None): + def _skip(qty): + return (qties is not None and qty not in qties) or ( + skip is not None and qty in skip + ) - return EqualityReport(True, "OK") + cier = deepcopy(hier) + sim = hier.sim + layout = GridLayout( + Box(sim.origin, sim.cells), sim.origin, sim.dl, interp_order=sim.interp_order + ) + p0 = Patch(patch_datas={}, patch_id="", layout=layout) + for t in cier.times(): + cier.time_hier[format_timestamp(t)] = {0: cier.level(0, t)} + cier.level(0, t).patches = [deepcopy(p0)] + l0_pds = cier.level(0, t).patches[0].patch_datas + for k, v in hier.level(0, t).patches[0].patch_datas.items(): + if _skip(k): + continue + if isinstance(v, FieldData): + l0_pds[k] = FieldData( + layout, v.field_name, None, centering=v.centerings + ) + l0_pds[k].dataset = np.zeros(l0_pds[k].size) + patch_box = hier.level(0, t).patches[0].box + l0_pds[k][patch_box] = v[patch_box] + + elif isinstance(v, ParticleData): + l0_pds[k] = deepcopy(v) + else: + raise RuntimeError("unexpected state") + + for patch in hier.level(0, t).patches[1:]: + for k, v in patch.patch_datas.items(): + if _skip(k): + continue + if isinstance(v, FieldData): + l0_pds[k][patch.box] = v[patch.box] + elif isinstance(v, ParticleData): + l0_pds[k].dataset.add(v.dataset) + else: + raise RuntimeError("unexpected state") + return cier diff --git a/pyphare/pyphare/pharesee/hierarchy/patchdata.py b/pyphare/pyphare/pharesee/hierarchy/patchdata.py index 717cd10ac..e0727b9a2 100644 --- a/pyphare/pyphare/pharesee/hierarchy/patchdata.py +++ b/pyphare/pyphare/pharesee/hierarchy/patchdata.py @@ -1,6 +1,6 @@ import numpy as np -from ...core.phare_utilities import deep_copy +from ...core import phare_utilities as phut from ...core import box as boxm from ...core.box import Box @@ -24,7 +24,7 @@ def __init__(self, layout, quantity): def __deepcopy__(self, memo): no_copy_keys = ["dataset"] # do not copy these things - return deep_copy(self, memo, no_copy_keys) + return phut.deep_copy(self, memo, no_copy_keys) class FieldData(PatchData): @@ -80,8 +80,16 @@ def __str__(self): def __repr__(self): return self.__str__() + def compare(self, that, atol=1e-16): + return self.field_name == that.field_name and phut.fp_any_all_close( + self.dataset[:], that.dataset[:], atol=atol + ) + def __eq__(self, that): - return self.field_name == that.field_name and self.dataset[:] == that.dataset[:] + return self.compare(that) + + def __ne__(self, that): + return not (self == that) def select(self, box): """ @@ -112,6 +120,9 @@ def __getitem__(self, box_or_slice): return self.dataset[box_or_slice] return self.select(box_or_slice) + def __setitem__(self, box_or_slice, val): + self.__getitem__(box_or_slice)[:] = val + def __init__(self, layout, field_name, data, **kwargs): """ :param layout: A GridLayout representing the domain on which data is defined @@ -219,5 +230,9 @@ def __getitem__(self, box): def size(self): return self.dataset.size() - def __eq__(self, that): + def compare(self, that, *args, **kwargs): + """args/kwargs may include atol for consistency with field::compare""" return self.name == that.name and self.dataset == that.dataset + + def __eq__(self, that): + return self.compare(that) diff --git a/pyphare/pyphare/pharesee/particles.py b/pyphare/pyphare/pharesee/particles.py index 027113890..3d6fb58b4 100644 --- a/pyphare/pyphare/pharesee/particles.py +++ b/pyphare/pyphare/pharesee/particles.py @@ -77,6 +77,11 @@ def size(self): def __eq__(self, that): if isinstance(that, Particles): + if self.size() != that.size(): + print( + f"particles.py:Particles::eq size diff: {self.size()} != {that.size()}" + ) + return False # fails on OSX for some reason set_check = set(self.as_tuples()) == set(that.as_tuples()) if set_check: @@ -88,9 +93,12 @@ def __eq__(self, that): print(f"particles.py:Particles::eq failed with: {ex}") print_trace() return False - + print(f"particles.py:Particles::eq bad type: {type(that)}") return False + def __ne__(self, that): + return not (self == that) + def select(self, box, box_type="cell"): """ select particles from the given box @@ -292,8 +300,9 @@ def single_patch_per_level_per_pop_from(hier, only_keep_L0=True): # dragons particles[key] = [] for patch in patch_level.patches: - for patch_data_key in patch.patch_datas.keys(): - particles[key] += [patch[patch_data_key].dataset] + for key in patch.patch_datas.keys(): + if key in particles: + particles[key] += [patch[key].dataset] for key in particles.keys(): if particles[key]: