From 4f9acf78071e713991918f80851b389f300d5446 Mon Sep 17 00:00:00 2001 From: Ishaan Desai Date: Mon, 16 Dec 2024 14:36:39 +0100 Subject: [PATCH] Change logging strategy and add new logging for adaptivity metrics (#133) * Change logging by wrapping Python logger in a new class * Use new logging in all parts of the codebase * Fix tests and add CHANGELOG entry * Add a separate logger for adaptivity * Add CSV logging * Adding more logging points to have more information during a coupled simulation * Correctly defining _logger variable in the Config class * Reformulate the way loggers are created and passed into functionality * Change parameter in Micro Manager config files for the dummy * Move logging of adaptivity metrics to only be done by rank 0 * Log time window instead of time step for adaptivity metric * Add optional preCICE export output for adaptivity computation CPU time * Fix domain decomposition unit test * Add option to export adaptivity computation CPU time, and change name of solve CPU time export * Fix unit tests * Move adaptivity metrics logging into the respective adaptivity classes * Fix unit tests --- CHANGELOG.md | 1 + docs/configuration.md | 13 +- .../micro-manager-cpp-adaptivity-config.json | 5 +- examples/micro-manager-cpp-config.json | 2 +- ...icro-manager-python-adaptivity-config.json | 5 +- examples/micro-manager-python-config.json | 2 +- examples/precice-config-adaptivity.xml | 9 +- examples/precice-config.xml | 6 +- micro_manager/adaptivity/adaptivity.py | 4 +- micro_manager/adaptivity/global_adaptivity.py | 33 ++-- micro_manager/adaptivity/local_adaptivity.py | 39 ++-- micro_manager/config.py | 166 ++++++++++++---- micro_manager/domain_decomposition.py | 7 +- micro_manager/interpolation.py | 2 +- micro_manager/micro_manager.py | 179 +++++++++++++----- micro_manager/micro_manager_base.py | 28 +-- micro_manager/snapshot/snapshot.py | 46 +++-- micro_manager/tools/__init__.py | 0 micro_manager/tools/logging_wrapper.py | 94 +++++++++ pyproject.toml | 2 +- .../integration/test_unit_cube/clean-test.sh | 1 + ...ger-config-global-adaptivity-parallel.json | 8 +- ...icro-manager-config-global-adaptivity.json | 2 +- ...micro-manager-config-local-adaptivity.json | 2 +- .../micro-manager-config-parallel-1.json | 2 +- .../micro-manager-config-parallel-2.json | 2 +- .../test_unit_cube/precice-config.xml | 9 +- tests/unit/micro-manager-config.json | 2 +- tests/unit/micro-manager-config_crash.json | 2 +- tests/unit/test_adaptivity_parallel.py | 6 +- tests/unit/test_adaptivity_serial.py | 13 +- tests/unit/test_domain_decomposition.py | 7 +- tests/unit/test_micro_manager.py | 11 +- tests/unit/test_snapshot_computation.py | 3 +- 34 files changed, 507 insertions(+), 206 deletions(-) create mode 100644 micro_manager/tools/__init__.py create mode 100644 micro_manager/tools/logging_wrapper.py diff --git a/CHANGELOG.md b/CHANGELOG.md index ce46bcaf..c07761f4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## latest +- Improve logging by wrapping Python logger in a class https://github.com/precice/micro-manager/pull/133 - Refactor large parts of solve and adaptivity to group datasets and simplify handling https://github.com/precice/micro-manager/pull/135 - Add information about adaptivity tuning parameters https://github.com/precice/micro-manager/pull/131 - Put computation of counting active steps inside the adaptivity variant `if` condition https://github.com/precice/micro-manager/pull/130 diff --git a/docs/configuration.md b/docs/configuration.md index aedbed1b..7b434236 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -13,7 +13,7 @@ The Micro Manager is configured with a JSON file. An example configuration file { "micro_file_name": "micro_solver", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-mesh", "read_data_names": {"temperature": "scalar", "heat-flux": "vector"}, "write_data_names": {"porosity": "scalar", "conductivity": "vector"} @@ -38,7 +38,7 @@ There are three main sections in the configuration file, the `coupling_params`, Parameter | Description --- | --- -`config_file_name` | Path to the preCICE XML configuration file from the current working directory. +`precice_config_file_name` | Path to the preCICE XML configuration file from the current working directory. `macro_mesh_name` | Name of the macro mesh as stated in the preCICE configuration. `read_data_names` | A Python dictionary with the names of the data to be read from preCICE as keys and `"scalar"` or `"vector"` as values depending on the nature of the data. `write_data_names` | A Python dictionary with the names of the data to be written to preCICE as keys and `"scalar"` or `"vector"` as values depending on the nature of the data. @@ -74,14 +74,14 @@ If the parameter `data_from_micro_sims` is set, the data to be output needs to b ``` -If `output_micro_sim_solve_time` is set, add similar entries for the data `micro_sim_time` in the following way: +If `output_micro_sim_solve_time` is set, add similar entries for the data `solve_cpu_time` in the following way: ```xml - + ... - + ``` @@ -120,6 +120,7 @@ Parameter | Description `refining_constant` | Refining constant $$ C_r $$, set as $$ 0 =< C_r < 1 $$. `every_implicit_iteration` | If True, adaptivity is calculated in every implicit iteration.
If False, adaptivity is calculated once at the start of the time window and then reused in every implicit time iteration. `similarity_measure`| Similarity measure to be used for adaptivity. Can be either `L1`, `L2`, `L1rel` or `L2rel`. By default, `L1` is used. The `rel` variants calculate the respective relative norms. This parameter is *optional*. +`output_cpu_time` | Write CPU time of the adaptivity computation to preCICE, to be exported. This parameter is *optional*. The primary tuning parameters for adaptivity are the history parameter $$ \Lambda $$, the coarsening constant $$ C_c $$, and the refining constant $$ C_r $$. Their effects can be interpreted as: @@ -170,6 +171,8 @@ The Micro Manager uses the output functionality of preCICE, hence these data set ``` +If the parameter `output_cpu_time` in `adaptivity_settings` is set to `True`, a scalar data field `adaptivity_cpu_time` needs to be added in the same way as described above. + ## Interpolate a crashed micro simulation If the optional dependency `sklearn` is installed, the Micro Manager will derive the output of a crashed micro simulation by interpolating outputs from similar simulations. To enable this, set diff --git a/examples/micro-manager-cpp-adaptivity-config.json b/examples/micro-manager-cpp-adaptivity-config.json index 89b424ec..d712cea2 100644 --- a/examples/micro-manager-cpp-adaptivity-config.json +++ b/examples/micro-manager-cpp-adaptivity-config.json @@ -1,7 +1,7 @@ { "micro_file_name": "cpp-dummy/micro_dummy", "coupling_params": { - "config_file_name": "precice-config-adaptivity.xml", + "precice_config_file_name": "precice-config-adaptivity.xml", "macro_mesh_name": "macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} @@ -16,7 +16,8 @@ "history_param": 0.5, "coarsening_constant": 0.3, "refining_constant": 0.4, - "every_implicit_iteration": "True" + "every_implicit_iteration": "True", + "output_cpu_time": "True" } }, "diagnostics": { diff --git a/examples/micro-manager-cpp-config.json b/examples/micro-manager-cpp-config.json index d44f70b4..cb6ce619 100644 --- a/examples/micro-manager-cpp-config.json +++ b/examples/micro-manager-cpp-config.json @@ -1,7 +1,7 @@ { "micro_file_name": "cpp-dummy/micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/examples/micro-manager-python-adaptivity-config.json b/examples/micro-manager-python-adaptivity-config.json index 14640faa..1f5b0f61 100644 --- a/examples/micro-manager-python-adaptivity-config.json +++ b/examples/micro-manager-python-adaptivity-config.json @@ -1,7 +1,7 @@ { "micro_file_name": "python-dummy/micro_dummy", "coupling_params": { - "config_file_name": "precice-config-adaptivity.xml", + "precice_config_file_name": "precice-config-adaptivity.xml", "macro_mesh_name": "macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} @@ -16,7 +16,8 @@ "history_param": 0.5, "coarsening_constant": 0.3, "refining_constant": 0.4, - "every_implicit_iteration": "True" + "every_implicit_iteration": "True", + "output_cpu_time": "True" } }, "diagnostics": { diff --git a/examples/micro-manager-python-config.json b/examples/micro-manager-python-config.json index 85f84931..136dc694 100644 --- a/examples/micro-manager-python-config.json +++ b/examples/micro-manager-python-config.json @@ -1,7 +1,7 @@ { "micro_file_name": "python-dummy/micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/examples/precice-config-adaptivity.xml b/examples/precice-config-adaptivity.xml index 1e4180b9..eddc16e5 100644 --- a/examples/precice-config-adaptivity.xml +++ b/examples/precice-config-adaptivity.xml @@ -10,18 +10,20 @@ - + + - + + @@ -38,9 +40,10 @@ - + + diff --git a/examples/precice-config.xml b/examples/precice-config.xml index d89f648f..0faa764f 100644 --- a/examples/precice-config.xml +++ b/examples/precice-config.xml @@ -8,14 +8,14 @@ - + - + @@ -32,7 +32,7 @@ - + diff --git a/micro_manager/adaptivity/adaptivity.py b/micro_manager/adaptivity/adaptivity.py index b1240505..bf0fa3c0 100644 --- a/micro_manager/adaptivity/adaptivity.py +++ b/micro_manager/adaptivity/adaptivity.py @@ -10,7 +10,7 @@ class AdaptivityCalculator: - def __init__(self, configurator, logger) -> None: + def __init__(self, configurator) -> None: """ Class constructor. @@ -26,8 +26,6 @@ def __init__(self, configurator, logger) -> None: self._adaptivity_data_names = configurator.get_data_for_adaptivity() self._adaptivity_type = configurator.get_adaptivity_type() - self._logger = logger - self._coarse_tol = 0.0 self._ref_tol = 0.0 diff --git a/micro_manager/adaptivity/global_adaptivity.py b/micro_manager/adaptivity/global_adaptivity.py index 1b2800d3..bf7b7001 100644 --- a/micro_manager/adaptivity/global_adaptivity.py +++ b/micro_manager/adaptivity/global_adaptivity.py @@ -19,7 +19,6 @@ class GlobalAdaptivityCalculator(AdaptivityCalculator): def __init__( self, configurator, - logger, global_number_of_sims: int, global_ids: list, rank: int, @@ -43,7 +42,7 @@ def __init__( comm : MPI.COMM_WORLD Global communicator of MPI. """ - super().__init__(configurator, logger) + super().__init__(configurator) self._global_ids = global_ids self._comm = comm self._rank = rank @@ -127,19 +126,7 @@ def compute_adaptivity( similarity_dists, is_sim_active, sim_is_associated_to ) - self._logger.info( - "{} active simulations, {} inactive simulations".format( - np.count_nonzero( - is_sim_active[self._global_ids[0] : self._global_ids[-1] + 1] - ), - np.count_nonzero( - is_sim_active[self._global_ids[0] : self._global_ids[-1] + 1] - == False - ), - ) - ) - - return [similarity_dists, is_sim_active, sim_is_associated_to] + return similarity_dists, is_sim_active, sim_is_associated_to def get_active_sim_ids(self, is_sim_active: np.array) -> np.ndarray: """ @@ -203,6 +190,22 @@ def get_full_field_micro_output( return micro_sims_output + def log_metrics(self, logger, adaptivity_data: list, n: int) -> None: + """ """ + is_sim_active = adaptivity_data[1] + global_active_sims = np.count_nonzero(is_sim_active) + global_inactive_sims = np.count_nonzero(is_sim_active == False) + + logger.log_info_one_rank( + "{},{},{},{},{}".format( + n, + np.mean(global_active_sims), + np.mean(global_inactive_sims), + np.max(global_active_sims), + np.max(global_inactive_sims), + ) + ) + def _communicate_micro_output( self, adaptivity_data: list, diff --git a/micro_manager/adaptivity/local_adaptivity.py b/micro_manager/adaptivity/local_adaptivity.py index 161a70ac..c89f229f 100644 --- a/micro_manager/adaptivity/local_adaptivity.py +++ b/micro_manager/adaptivity/local_adaptivity.py @@ -10,7 +10,7 @@ class LocalAdaptivityCalculator(AdaptivityCalculator): - def __init__(self, configurator, logger) -> None: + def __init__(self, configurator, comm) -> None: """ Class constructor. @@ -18,10 +18,11 @@ def __init__(self, configurator, logger) -> None: ---------- configurator : object of class Config Object which has getter functions to get parameters defined in the configuration file. - logger : object of logging - Logger defined from the standard package logging + comm : MPI.COMM_WORLD + Global communicator of MPI. """ - super().__init__(configurator, logger) + super().__init__(configurator) + self._comm = comm def compute_adaptivity( self, @@ -75,14 +76,7 @@ def compute_adaptivity( similarity_dists, is_sim_active, sim_is_associated_to ) - self._logger.info( - "{} active simulations, {} inactive simulations".format( - np.count_nonzero(is_sim_active), - np.count_nonzero(is_sim_active == False), - ) - ) - - return [similarity_dists, is_sim_active, sim_is_associated_to] + return similarity_dists, is_sim_active, sim_is_associated_to def get_active_sim_ids(self, is_sim_active) -> np.ndarray: """ @@ -150,6 +144,27 @@ def get_full_field_micro_output( return micro_sims_output + def log_metrics(self, logger, adaptivity_list: list, n: int) -> None: + """ """ + is_sim_active = adaptivity_list[1] + + # MPI Gather is necessary as local adaptivity only stores local data + local_active_sims = np.count_nonzero(is_sim_active) + global_active_sims = self._comm.gather(local_active_sims) + + local_inactive_sims = np.count_nonzero(is_sim_active == False) + global_inactive_sims = self._comm.gather(local_inactive_sims) + + logger.log_info_one_rank( + "{},{},{},{},{}".format( + n, + np.mean(global_active_sims), + np.mean(global_inactive_sims), + np.max(global_active_sims), + np.max(global_inactive_sims), + ) + ) + def _update_inactive_sims( self, similarity_dists: np.ndarray, diff --git a/micro_manager/config.py b/micro_manager/config.py index b6eb7cd4..58a72be2 100644 --- a/micro_manager/config.py +++ b/micro_manager/config.py @@ -13,20 +13,20 @@ class Config: the config class in https://github.com/precice/fenics-adapter/tree/develop/fenicsadapter """ - def __init__(self, logger, config_filename): + def __init__(self, config_file_name): """ Constructor of the Config class. Parameters ---------- - config_filename : string + config_file_name : string Name of the JSON configuration file """ - self._logger = logger - + self._config_file_name = config_file_name + self._logger = None self._micro_file_name = None - self._config_file_name = None + self._precice_config_file_name = None self._macro_mesh_name = None self._read_data_names = dict() self._write_data_names = dict() @@ -38,6 +38,7 @@ def __init__(self, logger, config_filename): self._diagnostics_data_names = dict() self._output_micro_sim_time = False + self._output_micro_mem_use = False self._interpolate_crash = False @@ -49,6 +50,9 @@ def __init__(self, logger, config_filename): self._adaptivity_refining_constant = 0.5 self._adaptivity_every_implicit_iteration = False self._adaptivity_similarity_measure = "L1" + self._adaptivity_output_n = -2 + self._adaptivity_output_cpu_time = False + self._adaptivity_output_mem_usage = False # Snapshot information self._parameter_file_name = None @@ -57,19 +61,28 @@ def __init__(self, logger, config_filename): self._output_micro_sim_time = False - self.read_json(config_filename) + def set_logger(self, logger): + """ + Set the logger for the Config class. - def read_json(self, config_filename): + Parameters + ---------- + logger : object of logging + Logger defined from the standard package logging + """ + self._logger = logger + + def _read_json(self, config_file_name): """ Reads JSON configuration file. Parameters ---------- - config_filename : string + config_file_name : string Name of the JSON configuration file """ - self._folder = os.path.dirname(os.path.join(os.getcwd(), config_filename)) - path = os.path.join(self._folder, os.path.basename(config_filename)) + self._folder = os.path.dirname(os.path.join(os.getcwd(), config_file_name)) + path = os.path.join(self._folder, os.path.basename(config_file_name)) with open(path, "r") as read_file: self._data = json.load(read_file) @@ -96,7 +109,7 @@ def read_json(self, config_filename): "Write data dictionary as a value other than 'scalar' or 'vector'" ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "No write data names provided. Micro manager will only read data from preCICE." ) @@ -115,19 +128,31 @@ def read_json(self, config_filename): "Read data dictionary as a value other than 'scalar' or 'vector'" ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "No read data names provided. Micro manager will only write data to preCICE." ) self._micro_dt = self._data["simulation_params"]["micro_dt"] try: - if self._data["diagnostics"]["output_micro_sim_solve_time"]: + if self._data["diagnostics"]["output_micro_sim_solve_time"] == "True": self._output_micro_sim_time = True - self._write_data_names["micro_sim_time"] = False + self._write_data_names["solve_cpu_time"] = False + except BaseException: + self._logger.log_info_one_rank( + "Micro manager will not output time required to solve each micro simulation." + ) + + try: + if self._data["diagnostics"]["output_micro_sim_solve_mem_use"] == "True": + self._output_micro_mem_use = True + self._write_data_names["solve_mem_use"] = False + self._logger.log_info_one_rank( + "Calculating memory usage of the solve call of each micro micro simulation will slow down the Micro Manager. This option is intended for diagnostic purposes." + ) except BaseException: - self._logger.info( - "Micro manager will not output time required to solve each micro simulation in each time step." + self._logger.log_info_one_rank( + "Micro manager will not output memory usage for solving each micro simulation." ) def read_json_micro_manager(self): @@ -135,8 +160,10 @@ def read_json_micro_manager(self): Reads Micro Manager relevant information from JSON configuration file and saves the data to the respective instance attributes. """ - self._config_file_name = os.path.join( - self._folder, self._data["coupling_params"]["config_file_name"] + self._read_json(self._config_file_name) # Read base information + + self._precice_config_file_name = os.path.join( + self._folder, self._data["coupling_params"]["precice_config_file_name"] ) self._macro_mesh_name = self._data["coupling_params"]["macro_mesh_name"] @@ -147,7 +174,7 @@ def read_json_micro_manager(self): try: self._ranks_per_axis = self._data["simulation_params"]["decomposition"] except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "Domain decomposition is not specified, so the Micro Manager will expect to be run in serial." ) @@ -165,7 +192,7 @@ def read_json_micro_manager(self): "Adaptivity settings are provided but adaptivity is turned off." ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "Micro Manager will not adaptively run micro simulations, but instead will run all micro simulations." ) @@ -188,12 +215,21 @@ def read_json_micro_manager(self): self._data_for_adaptivity[dname] = exchange_data[dname] if self._data_for_adaptivity.keys() == self._write_data_names.keys(): - warn( + self._logger.log_info_one_rank( "Only micro simulation data is used for similarity computation in adaptivity. This would lead to the" " same set of active and inactive simulations for the entire simulation time. If this is not intended," " please include macro simulation data as well." ) + try: + self._adaptivity_output_n = self._data["simulation_params"][ + "adaptivity_settings" + ]["output_n"] + except BaseException: + self._logger.log_info_one_rank( + "No output interval for adaptivity provided. Adaptivity metrics will be output every time window." + ) + self._adaptivity_history_param = self._data["simulation_params"][ "adaptivity_settings" ]["history_param"] @@ -212,7 +248,7 @@ def read_json_micro_manager(self): "adaptivity_settings" ]["similarity_measure"] else: - self._logger.info( + self._logger.log_info_one_rank( "No similarity measure provided, using L1 norm as default" ) self._adaptivity_similarity_measure = "L1" @@ -227,13 +263,41 @@ def read_json_micro_manager(self): self._adaptivity_every_implicit_iteration = False if not self._adaptivity_every_implicit_iteration: - self._logger.info( + self._logger.log_info_one_rank( "Micro Manager will compute adaptivity once at the start of every time window" ) self._write_data_names["active_state"] = False self._write_data_names["active_steps"] = False + try: + if ( + self._data["simulation_params"]["adaptivity_settings"][ + "output_cpu_time" + ] + == "True" + ): + self._adaptivity_output_cpu_time = True + self._write_data_names["adaptivity_cpu_time"] = False + except BaseException: + self._logger.log_info_one_rank( + "Micro Manager will not output CPU time of the adaptivity computation." + ) + + try: + if ( + self._data["simulation_params"]["adaptivity_settings"][ + "output_mem_usage" + ] + == "True" + ): + self._adaptivity_output_mem_usage = True + self._write_data_names["adaptivity_mem_usage"] = False + except BaseException: + self._logger.log_info_one_rank( + "Micro Manager will not output CPU time of the adaptivity computation." + ) + if "interpolate_crash" in self._data["simulation_params"]: if self._data["simulation_params"]["interpolate_crash"] == "True": self._interpolate_crash = True @@ -253,19 +317,24 @@ def read_json_micro_manager(self): "Diagnostics data dictionary as a value other than 'scalar' or 'vector'" ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "No diagnostics data is defined. Micro Manager will not output any diagnostics data." ) try: self._micro_output_n = self._data["diagnostics"]["micro_output_n"] except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "Output interval of micro simulations not specified, if output is available then it will be called " "in every time window." ) def read_json_snapshot(self): + """ + Reads Snapshot relevant information from JSON configuration file + """ + self._read_json(self._config_file_name) # Read base information + self._parameter_file_name = os.path.join( self._folder, self._data["coupling_params"]["parameter_file_name"] ) @@ -278,7 +347,7 @@ def read_json_snapshot(self): .replace(".py", "") ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "No post-processing file name provided. Snapshot computation will not perform any post-processing." ) self._postprocessing_file_name = None @@ -298,7 +367,7 @@ def read_json_snapshot(self): "Diagnostics data dictionary has a value other than 'scalar' or 'vector'" ) except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "No diagnostics data is defined. Snapshot computation will not output any diagnostics data." ) @@ -306,20 +375,20 @@ def read_json_snapshot(self): if self._data["snapshot_params"]["initialize_once"] == "True": self._initialize_once = True except BaseException: - self._logger.info( + self._logger.log_info_one_rank( "For each snapshot a new micro simulation object will be created" ) - def get_config_file_name(self): + def get_precice_config_file_name(self): """ - Get the name of the JSON configuration file. + Get the name of the preCICE XML configuration file. Returns ------- config_file_name : string - Name of the JSON configuration file provided to the Config class. + Name of the preCICE XML configuration file. """ - return self._config_file_name + return self._precice_config_file_name def get_macro_mesh_name(self): """ @@ -415,6 +484,17 @@ def write_micro_solve_time(self): """ return self._output_micro_sim_time + def write_micro_mem_use(self): + """ + Depending on user input, micro manager will calculate memory usage of solve() step of every micro simulation + + Returns + ------- + output_micro_mem_use : bool + True if micro simulation memory usage is required. + """ + return self._output_micro_mem_use + def turn_on_adaptivity(self): """ Boolean stating whether adaptivity is ot or not. @@ -450,6 +530,17 @@ def get_data_for_adaptivity(self): """ return self._data_for_adaptivity + def get_adaptivity_output_n(self): + """ + Get the output frequency of adaptivity metrics. + + Returns + ------- + adaptivity_output_n : int + Output frequency of adaptivity metrics, so output every N timesteps + """ + return self._adaptivity_output_n + def get_adaptivity_hist_param(self): """ Get adaptivity history parameter. @@ -509,6 +600,17 @@ def is_adaptivity_required_in_every_implicit_iteration(self): """ return self._adaptivity_every_implicit_iteration + def output_adaptivity_cpu_time(self): + """ + Check if CPU time of the adaptivity computation needs to be output. + + Returns + ------- + adaptivity_cpu_time : bool + True if CPU time of the adaptivity computation needs to be output, False otherwise. + """ + return self._adaptivity_output_cpu_time + def get_micro_dt(self): """ Get the size of the micro time window. diff --git a/micro_manager/domain_decomposition.py b/micro_manager/domain_decomposition.py index cfc7cbe7..b80b87d4 100644 --- a/micro_manager/domain_decomposition.py +++ b/micro_manager/domain_decomposition.py @@ -6,14 +6,12 @@ class DomainDecomposer: - def __init__(self, logger, dims, rank, size) -> None: + def __init__(self, dims, rank, size) -> None: """ Class constructor. Parameters ---------- - logger : object of logging - Logger defined from the standard package logging. dims : int Dimensions of the problem. rank : int @@ -21,7 +19,6 @@ def __init__(self, logger, dims, rank, size) -> None: size : int Total number of MPI processes. """ - self._logger = logger self._rank = rank self._size = size self._dims = dims @@ -98,6 +95,4 @@ def decompose_macro_domain(self, macro_bounds: list, ranks_per_axis: list) -> li if rank_in_axis[d] + 1 == ranks_per_axis[d]: mesh_bounds[d * 2 + 1] = macro_bounds[d * 2 + 1] - self._logger.info("Bounding box limits are {}".format(mesh_bounds)) - return mesh_bounds diff --git a/micro_manager/interpolation.py b/micro_manager/interpolation.py index 9fa08d62..092e32ec 100644 --- a/micro_manager/interpolation.py +++ b/micro_manager/interpolation.py @@ -31,7 +31,7 @@ def get_nearest_neighbor_indices( Local indices of the k nearest neighbors in all local points. """ if len(coords) < k: - self._logger.info( + self._logger.log_info_any_rank( "Number of desired neighbors k = {} is larger than the number of available neighbors {}. Resetting k = {}.".format( k, len(coords), len(coords) ) diff --git a/micro_manager/micro_manager.py b/micro_manager/micro_manager.py index cf2a5b30..56b70239 100644 --- a/micro_manager/micro_manager.py +++ b/micro_manager/micro_manager.py @@ -23,6 +23,8 @@ from typing import Callable import numpy as np +import time + import precice from .micro_manager_base import MicroManager @@ -33,6 +35,8 @@ from .domain_decomposition import DomainDecomposer from .micro_simulation import create_simulation_class +from .tools.logging_wrapper import Logger + try: from .interpolation import Interpolation @@ -53,12 +57,29 @@ def __init__(self, config_file: str) -> None: Name of the JSON configuration file (provided by the user). """ super().__init__(config_file) - self._config.read_json_micro_manager() - # Define the preCICE Participant - self._participant = precice.Participant( - "Micro-Manager", self._config.get_config_file_name(), self._rank, self._size + + self._logger = Logger( + "MicroManagerCoupling", "micro-manager-coupling.log", self._rank ) + self._config.set_logger(self._logger) + self._config.read_json_micro_manager() + + # Data names of data to output to the snapshot database + self._write_data_names = self._config.get_write_data_names() + + # Data names of data to read as input parameter to the simulations + self._read_data_names = self._config.get_read_data_names() + + self._micro_dt = self._config.get_micro_dt() + + self._is_micro_solve_time_required = self._config.write_micro_solve_time() + + self._is_micro_solve_mem_use_required = self._config.write_micro_mem_use() + + if self._is_micro_solve_mem_use_required: + tracemalloc = importlib.import_module("tracemalloc") + self._macro_mesh_name = self._config.get_macro_mesh_name() self._macro_bounds = self._config.get_macro_domain_bounds() @@ -70,7 +91,7 @@ def __init__(self, config_file: str) -> None: self._interpolate_crashed_sims = self._config.interpolate_crashed_micro_sim() if self._interpolate_crashed_sims: if Interpolation is None: - self._logger.info( + self._logger.log_info_one_rank( "Interpolation is turned off as the required package is not installed." ) self._interpolate_crashed_sims = False @@ -85,6 +106,14 @@ def __init__(self, config_file: str) -> None: self._is_adaptivity_on = self._config.turn_on_adaptivity() if self._is_adaptivity_on: + self._adaptivity_logger = Logger( + "Adaptivity", "adaptivity-metrics.csv", self._rank, csv_logger=True + ) + + self._adaptivity_logger.log_info_one_rank( + "Time Window,Avg Active Sims,Avg Inactive Sims,Max Active,Max Inactive" + ) + self._number_of_sims_for_adaptivity: int = 0 self._data_for_adaptivity: Dict[str, np.ndarray] = dict() @@ -94,6 +123,7 @@ def __init__(self, config_file: str) -> None: # Names of macro data to be used for adaptivity computation self._adaptivity_macro_data_names = dict() + # Names of micro data to be used for adaptivity computation self._adaptivity_micro_data_names = dict() for name, is_data_vector in self._adaptivity_data_names.items(): @@ -105,6 +135,18 @@ def __init__(self, config_file: str) -> None: self._adaptivity_in_every_implicit_step = ( self._config.is_adaptivity_required_in_every_implicit_iteration() ) + self._micro_sims_active_steps = None + + self._adaptivity_output_n = self._config.get_adaptivity_output_n() + self._output_adaptivity_cpu_time = self._config.output_adaptivity_cpu_time() + + # Define the preCICE Participant + self._participant = precice.Participant( + "Micro-Manager", + self._config.get_precice_config_file_name(), + self._rank, + self._size, + ) # ************** # Public methods @@ -154,6 +196,10 @@ def solve(self) -> None: # If micro simulations have been initialized, compute adaptivity before starting the coupling if self._micro_sims_init: + self._logger.log_info_one_rank( + "Micro simulations have been initialized, so adaptivity will be computed before the coupling begins." + ) + adaptivity_data = self._adaptivity_controller.compute_adaptivity( dt, self._micro_sims, @@ -163,6 +209,8 @@ def solve(self) -> None: while self._participant.is_coupling_ongoing(): + adaptivity_cpu_time = 0.0 + dt = min(self._participant.get_max_time_step_size(), self._micro_dt) # Write a checkpoint @@ -174,6 +222,7 @@ def solve(self) -> None: if self._is_adaptivity_on: if not self._adaptivity_in_every_implicit_step: + start_time = time.process_time() adaptivity_data = ( self._adaptivity_controller.compute_adaptivity( dt, @@ -182,6 +231,9 @@ def solve(self) -> None: self._data_for_adaptivity, ) ) + end_time = time.process_time() + + adaptivity_cpu_time = end_time - start_time # Only checkpoint the adaptivity configuration if adaptivity is computed # once in every time window @@ -198,6 +250,10 @@ def solve(self) -> None: micro_sims_output = micro_sim_solve(micro_sims_input, dt, adaptivity_data) + if self._output_adaptivity_cpu_time: + for i in range(self._local_number_of_sims): + micro_sims_output[i]["adaptivity_cpu_time"] = adaptivity_cpu_time + # Check if more than a certain percentage of the micro simulations have crashed and terminate if threshold is exceeded if self._interpolate_crashed_sims: crashed_sims_on_all_ranks = np.zeros(self._size, dtype=np.int64) @@ -215,7 +271,7 @@ def solve(self) -> None: ) if crash_ratio > self._crash_threshold: - self._logger.info( + self._logger.log_info_any_rank( "{:.1%} of the micro simulations have crashed exceeding the threshold of {:.1%}. " "Exiting simulation.".format(crash_ratio, self._crash_threshold) ) @@ -225,6 +281,7 @@ def solve(self) -> None: t += dt # increase internal time when time step is done. n += 1 # increase counter + self._participant.advance( dt ) # notify preCICE that time step of size dt is complete @@ -244,19 +301,22 @@ def solve(self) -> None: if ( self._participant.is_time_window_complete() ): # Time window has converged, now micro output can be generated - self._logger.info( - "Micro simulations {} - {} have converged at t = {}".format( - self._micro_sims[0].get_global_id(), - self._micro_sims[-1].get_global_id(), - t, - ) - ) - if self._micro_sims_have_output: if n % self._micro_n_out == 0: for sim in self._micro_sims: sim.output() + if ( + self._is_adaptivity_on + and n % self._adaptivity_output_n == 0 + and self._rank == 0 + ): + self._adaptivity_controller.log_metrics( + self._adaptivity_logger, adaptivity_data, n + ) + + self._logger.log_info_one_rank("Time window {} converged.".format(n)) + self._participant.finalize() def initialize(self) -> None: @@ -274,7 +334,6 @@ def initialize(self) -> None: ), "Provided macro mesh bounds are of incorrect dimension" if self._is_parallel: domain_decomposer = DomainDecomposer( - self._logger, self._participant.get_mesh_dimensions(self._macro_mesh_name), self._rank, self._size, @@ -299,13 +358,10 @@ def initialize(self) -> None: assert self._mesh_vertex_coords.size != 0, "Macro mesh has no vertices." self._local_number_of_sims, _ = self._mesh_vertex_coords.shape - self._logger.info( - "Number of local micro simulations = {}".format(self._local_number_of_sims) - ) if self._local_number_of_sims == 0: if self._is_parallel: - self._logger.info( + self._logger.log_info_any_rank( "Rank {} has no micro simulations and hence will not do any computation.".format( self._rank ) @@ -320,9 +376,36 @@ def initialize(self) -> None: # the correct global IDs self._comm.Allgatherv(np.array(self._local_number_of_sims), nms_all_ranks) + max_nms = np.max(nms_all_ranks) + min_nms = np.min(nms_all_ranks) + + if ( + max_nms != min_nms + ): # if the number of maximum and minimum micro simulations per rank are different + self._logger.log_info_one_rank( + "The following ranks have the maximum number of micro simulations ({}): {}".format( + max_nms, np.where(nms_all_ranks == max_nms)[0] + ) + ) + self._logger.log_info_one_rank( + "The following ranks have the minimum number of micro simulations ({}): {}".format( + min_nms, np.where(nms_all_ranks == min_nms)[0] + ) + ) + else: # if the number of maximum and minimum micro simulations per rank are the same + self._logger.log_info_one_rank( + "All ranks have the same number of micro simulations: {}".format( + max_nms + ) + ) + # Get global number of micro simulations self._global_number_of_sims: int = np.sum(nms_all_ranks) + self._logger.log_info_one_rank( + "Total number of micro simulations: {}".format(self._global_number_of_sims) + ) + if self._is_adaptivity_on: for name, is_data_vector in self._adaptivity_data_names.items(): if is_data_vector: @@ -367,23 +450,16 @@ def initialize(self) -> None: ) ) - self._logger.info( - "Micro simulations with global IDs {} - {} created.".format( - self._global_ids_of_local_sims[0], self._global_ids_of_local_sims[-1] - ) - ) - if self._is_adaptivity_on: if self._adaptivity_type == "local": self._adaptivity_controller: LocalAdaptivityCalculator = ( - LocalAdaptivityCalculator(self._config, self._logger) + LocalAdaptivityCalculator(self._config, self._comm) ) self._number_of_sims_for_adaptivity = self._local_number_of_sims elif self._adaptivity_type == "global": self._adaptivity_controller: GlobalAdaptivityCalculator = ( GlobalAdaptivityCalculator( self._config, - self._logger, self._global_number_of_sims, self._global_ids_of_local_sims, self._rank, @@ -428,7 +504,7 @@ def initialize(self) -> None: "The initialize() method of the Micro simulation has an incorrect number of arguments." ) except TypeError: - self._logger.info( + self._logger.log_info_one_rank( "The signature of initialize() method of the micro simulation cannot be determined. Trying to determine the signature by calling the method." ) # Try to get the signature of the initialize() method, if it is not written in Python @@ -436,7 +512,7 @@ def initialize(self) -> None: self._micro_sims[0].initialize() is_initial_data_required = False except TypeError: - self._logger.info( + self._logger.log_info_one_rank( "The initialize() method of the micro simulation has arguments. Attempting to call it again with initial data." ) try: # Try to call the initialize() method with initial data @@ -625,24 +701,24 @@ def _solve_micro_simulations( if not self._has_sim_crashed[count]: # Attempt to solve the micro simulation try: - start_time = time.time() + start_time = time.process_time() micro_sims_output[count] = sim.solve(micro_sims_input[count], dt) - end_time = time.time() + end_time = time.process_time() # Write solve time of the macro simulation if required and the simulation has not crashed if self._is_micro_solve_time_required: - micro_sims_output[count]["micro_sim_time"] = ( + micro_sims_output[count]["solve_cpu_time"] = ( end_time - start_time ) # If simulation crashes, log the error and keep the output constant at the previous iteration's output except Exception as error_message: - self._logger.error( + self._logger.log_error_any_rank( "Micro simulation at macro coordinates {} with input {} has experienced an error. " "See next entry on this rank for error message.".format( self._mesh_vertex_coords[count], micro_sims_input[count] ) ) - self._logger.error(error_message) + self._logger.log_error_any_rank(error_message) self._has_sim_crashed[count] = True # If interpolate is off, terminate after crash @@ -652,7 +728,9 @@ def _solve_micro_simulations( np.sum(self._has_sim_crashed), crashed_sims_on_all_ranks ) if sum(crashed_sims_on_all_ranks) > 0: - self._logger.info("Exiting simulation after micro simulation crash.") + self._logger.log_info_any_rank( + "Exiting simulation after micro simulation crash." + ) sys.exit() # Interpolate result for crashed simulation @@ -663,7 +741,7 @@ def _solve_micro_simulations( # Iterate over all crashed simulations to interpolate output if self._interpolate_crashed_sims: for unset_sim in unset_sims: - self._logger.info( + self._logger.log_info_any_rank( "Interpolating output for crashed simulation at macro vertex {}.".format( self._mesh_vertex_coords[unset_sim] ) @@ -681,7 +759,7 @@ def _solve_micro_simulations_with_adaptivity( adaptivity_data: list, ) -> list: """ - Solve all micro simulations and assemble the micro simulations outputs in a list of dicts format. + Adaptively solve micro simulations and assemble the micro simulations outputs in a list of dicts format. Parameters ---------- @@ -729,14 +807,14 @@ def _solve_micro_simulations_with_adaptivity( if not self._has_sim_crashed[active_id]: # Attempt to solve the micro simulation try: - start_time = time.time() + start_time = time.process_time() micro_sims_output[active_id] = self._micro_sims[active_id].solve( micro_sims_input[active_id], dt ) - end_time = time.time() + end_time = time.process_time() # Write solve time of the macro simulation if required and the simulation has not crashed if self._is_micro_solve_time_required: - micro_sims_output[active_id]["micro_sim_time"] = ( + micro_sims_output[active_id]["solve_cpu_time"] = ( end_time - start_time ) @@ -748,13 +826,13 @@ def _solve_micro_simulations_with_adaptivity( # If simulation crashes, log the error and keep the output constant at the previous iteration's output except Exception as error_message: - self._logger.error( + self._logger.log_error_any_rank( "Micro simulation at macro coordinates {} has experienced an error. " "See next entry on this rank for error message.".format( self._mesh_vertex_coords[active_id] ) ) - self._logger.error(error_message) + self._logger.log_error_any_rank(error_message) self._has_sim_crashed[active_id] = True # If interpolate is off, terminate after crash @@ -764,8 +842,11 @@ def _solve_micro_simulations_with_adaptivity( np.sum(self._has_sim_crashed), crashed_sims_on_all_ranks ) if sum(crashed_sims_on_all_ranks) > 0: - self._logger.info("Exiting simulation after micro simulation crash.") + self._logger.log_error_any_rank( + "Exiting simulation after micro simulation crash." + ) sys.exit() + # Interpolate result for crashed simulation unset_sims = [] for active_id in active_sim_ids: @@ -775,7 +856,7 @@ def _solve_micro_simulations_with_adaptivity( # Iterate over all crashed simulations to interpolate output if self._interpolate_crashed_sims: for unset_sim in unset_sims: - self._logger.info( + self._logger.log_info_any_rank( "Interpolating output for crashed simulation at macro vertex {}.".format( self._mesh_vertex_coords[unset_sim] ) @@ -797,7 +878,7 @@ def _solve_micro_simulations_with_adaptivity( ] = self._micro_sims_active_steps[inactive_id] if self._is_micro_solve_time_required: - micro_sims_output[inactive_id]["micro_sim_time"] = 0 + micro_sims_output[inactive_id]["solve_cpu_time"] = 0 # Collect micro sim output for adaptivity calculation for i in range(self._local_number_of_sims): @@ -879,7 +960,7 @@ def _interpolate_output_for_crashed_sim( micro_sims_active_values.append(micro_sims_output[i].copy()) # Find nearest neighbors if len(micro_sims_active_input_lists) == 0: - self._logger.error( + self._logger.log_error_any_rank( "No active neighbors available for interpolation at macro vertex {}. Value cannot be interpolated".format( self._mesh_vertex_coords[unset_sim] ) @@ -900,13 +981,13 @@ def _interpolate_output_for_crashed_sim( if self._is_adaptivity_on: interpol_space.append(micro_sims_active_input_lists[neighbor].copy()) interpol_values.append(micro_sims_active_values[neighbor].copy()) - interpol_values[-1].pop("micro_sim_time", None) + interpol_values[-1].pop("solve_cpu_time", None) interpol_values[-1].pop("active_state", None) interpol_values[-1].pop("active_steps", None) else: interpol_space.append(micro_sims_active_input_lists[neighbor].copy()) interpol_values.append(micro_sims_active_values[neighbor].copy()) - interpol_values[-1].pop("micro_sim_time", None) + interpol_values[-1].pop("solve_cpu_time", None) # Interpolate for each parameter output_interpol = dict() @@ -920,7 +1001,7 @@ def _interpolate_output_for_crashed_sim( ) # Reintroduce removed information if self._is_micro_solve_time_required: - output_interpol["micro_sim_time"] = 0 + output_interpol["solve_cpu_time"] = 0 if self._is_adaptivity_on: output_interpol["active_state"] = 1 output_interpol["active_steps"] = self._micro_sims_active_steps[unset_sim] diff --git a/micro_manager/micro_manager_base.py b/micro_manager/micro_manager_base.py index 9e8d54b4..0de8f829 100644 --- a/micro_manager/micro_manager_base.py +++ b/micro_manager/micro_manager_base.py @@ -8,7 +8,6 @@ """ from mpi4py import MPI -import logging from abc import ABC, abstractmethod from .config import Config @@ -52,20 +51,6 @@ def __init__(self, config_file): self._rank = self._comm.Get_rank() self._size = self._comm.Get_size() - self._logger = logging.getLogger(__name__) - self._logger.setLevel(level=logging.INFO) - - # Create file handler which logs messages - fh = logging.FileHandler("micro-manager.log") - fh.setLevel(logging.INFO) - - # Create formatter and add it to handlers - formatter = logging.Formatter( - "[" + str(self._rank) + "] %(name)s - %(levelname)s - %(message)s" - ) - fh.setFormatter(formatter) - self._logger.addHandler(fh) # add the handlers to the logger - self._is_parallel = self._size > 1 self._micro_sims_have_output = False @@ -73,18 +58,7 @@ def __init__(self, config_file): self._global_number_of_sims = 0 self._is_rank_empty = False - self._logger.info("Provided configuration file: {}".format(config_file)) - self._config = Config(self._logger, config_file) - - # Data names of data to output to the snapshot database - self._write_data_names = self._config.get_write_data_names() - - # Data names of data to read as input parameter to the simulations - self._read_data_names = self._config.get_read_data_names() - - self._micro_dt = self._config.get_micro_dt() - - self._is_micro_solve_time_required = self._config.write_micro_solve_time() + self._config = Config(config_file) def initialize(self): """ diff --git a/micro_manager/snapshot/snapshot.py b/micro_manager/snapshot/snapshot.py index e94456d0..7777873f 100644 --- a/micro_manager/snapshot/snapshot.py +++ b/micro_manager/snapshot/snapshot.py @@ -19,6 +19,8 @@ from micro_manager.micro_manager import MicroManager from .dataset import ReadWriteHDF from micro_manager.micro_simulation import create_simulation_class +from micro_manager.tools.logging_wrapper import Logger + sys.path.append(os.getcwd()) @@ -34,8 +36,24 @@ def __init__(self, config_file: str) -> None: Name of the JSON configuration file (provided by the user). """ super().__init__(config_file) + + self._logger = Logger( + "MicroManagerSnapshot", "micro-manager-snapshot.log", self._rank + ) + + self._config.set_logger(self._logger) self._config.read_json_snapshot() + # Data names of data to output to the snapshot database + self._write_data_names = self._config.get_write_data_names() + + # Data names of data to read as input parameter to the simulations + self._read_data_names = self._config.get_read_data_names() + + self._micro_dt = self._config.get_micro_dt() + + self._is_micro_solve_time_required = self._config.write_micro_solve_time() + # Path to the parameter file containing input parameters for micro simulations self._parameter_file = self._config.get_parameter_file_name() # Get name of pos-processing script @@ -95,12 +113,12 @@ def solve(self) -> None: micro_sims_output ) else: - self._logger.info( + self._logger.log_info_one_rank( "No post-processing script with the provided path found. Skipping post-processing." ) self._post_processing_file_name = None except Exception: - self._logger.info( + self._logger.log_info_one_rank( "No post-processing script with the provided path found. Skipping post-processing." ) self._post_processing_file_name = None @@ -113,7 +131,9 @@ def solve(self) -> None: ) # Log error and write macro data to database if simulation has crashed else: - self._logger.info("Skipping snapshot storage for crashed simulation.") + self._logger.log_info_one_rank( + "Skipping snapshot storage for crashed simulation." + ) self._data_storage.write_output_to_hdf( self._output_file_path, micro_sims_input, @@ -132,7 +152,7 @@ def solve(self) -> None: # Merge output files if self._is_parallel: - self._logger.info( + self._logger.log_info_any_rank( "Snapshots have been computed and stored. Merging output files" ) self._data_storage.set_status(self._output_file_path, "reading/deleting") @@ -146,7 +166,7 @@ def solve(self) -> None: else: self._data_storage.set_status(self._output_file_path, "finished") if self._rank == 0: - self._logger.info("Snapshot computation completed.") + self._logger.log_info_one_rank("Snapshot computation completed.") def initialize(self) -> None: """ @@ -199,15 +219,17 @@ def initialize(self) -> None: self._output_subdirectory, self._file_name ) self._data_storage.create_file(self._output_file_path) - self._logger.info("Output file created: {}".format(self._output_file_path)) + self._logger.log_error_any_rank( + "Output file created: {}".format(self._output_file_path) + ) self._local_number_of_sims = len(self._macro_parameters) - self._logger.info( + self._logger.log_info_any_rank( "Number of local micro simulations = {}".format(self._local_number_of_sims) ) if self._local_number_of_sims == 0: if self._is_parallel: - self._logger.info( + self._logger.log_info_any_rank( "Rank {} has no micro simulations and hence will not do any computation.".format( self._rank ) @@ -265,17 +287,17 @@ def _solve_micro_simulation(self, micro_sims_input: dict) -> dict | None: simulations. The return type is None if the simulation has crashed. """ try: - start_time = time.time() + start_time = time.process_time() micro_sims_output = self._micro_sims.solve(micro_sims_input, self._micro_dt) - end_time = time.time() + end_time = time.process_time() if self._is_micro_solve_time_required: - micro_sims_output["micro_sim_time"] = end_time - start_time + micro_sims_output["solve_cpu_time"] = end_time - start_time return micro_sims_output # Handle simulation crash except Exception as e: - self._logger.error( + self._logger.log_error_any_rank( "Micro simulation with input {} has crashed. See next entry on this rank for error message".format( micro_sims_input ) diff --git a/micro_manager/tools/__init__.py b/micro_manager/tools/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/micro_manager/tools/logging_wrapper.py b/micro_manager/tools/logging_wrapper.py new file mode 100644 index 00000000..8ef7c6be --- /dev/null +++ b/micro_manager/tools/logging_wrapper.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python3 +""" +Provides a logging wrapper for the Micro Manager classes. +""" +import logging + + +class Logger: + """ + Provides a logging wrapper for the Micro Manager classes. + """ + + def __init__(self, name, log_file, rank=0, level=logging.INFO, csv_logger=False): + """ + Set up a logger. + + Parameters + ---------- + name : string + Name of the logger. + log_file : string + Name of the log file. + rank : int, optional + Rank of the logger (default is 0). + level : int, optional + Logging level (default is logging.INFO). + csv_logger : bool, optional + If True, the logger will log in CSV format (default is False). + """ + + self._rank = rank + + handler = logging.FileHandler(log_file) + handler.setLevel(level) + + if csv_logger: + formatter = logging.Formatter("%(message)s") + else: + formatter = logging.Formatter( + "[" + + str(self._rank) + + "] %(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + + handler.setFormatter(formatter) + + self._logger = logging.getLogger(name) + self._logger.setLevel(level) + self._logger.addHandler(handler) + + def get_logger(self): + """ + Get the logger. + + Returns + ------- + logger : object of logging + Logger defined from the standard package logging + """ + return self._logger + + def log_info_one_rank(self, message): + """ + Log a message. Only the rank 0 logs the message. + + Parameters + ---------- + message : string + Message to log. + """ + if self._rank == 0: + self._logger.info(message) + + def log_info_any_rank(self, message): + """ + Log a message. All ranks log the message. + + Parameters + ---------- + message : string + Message to log. + """ + self._logger.info(message) + + def log_error_any_rank(self, message): + """ + Log an error message. Only the rank 0 logs the message. + + Parameters + ---------- + message : string + Message to log. + """ + self._logger.error(message) diff --git a/pyproject.toml b/pyproject.toml index cee97d2a..5ed18012 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ Repository = "https://github.com/precice/micro-manager" micro-manager-precice = "micro_manager:main" [tool.setuptools] -packages=["micro_manager", "micro_manager.adaptivity", "micro_manager.snapshot"] +packages=["micro_manager", "micro_manager.adaptivity", "micro_manager.snapshot", "micro_manager.tools"] [tool.setuptools-git-versioning] enabled = true diff --git a/tests/integration/test_unit_cube/clean-test.sh b/tests/integration/test_unit_cube/clean-test.sh index 8af464d8..60aa7a13 100755 --- a/tests/integration/test_unit_cube/clean-test.sh +++ b/tests/integration/test_unit_cube/clean-test.sh @@ -9,3 +9,4 @@ rm -fv *.err rm -fv output/*.vtu rm -fv output/*.pvtu rm -r -fv __pycache__ +rm -fv *.csv diff --git a/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity-parallel.json b/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity-parallel.json index 5938af6e..bd2c738b 100644 --- a/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity-parallel.json +++ b/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity-parallel.json @@ -1,7 +1,7 @@ { "micro_file_name": "micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-cube-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} @@ -17,7 +17,11 @@ "history_param": 0.5, "coarsening_constant": 0.3, "refining_constant": 0.4, - "every_implicit_iteration": "True" + "every_implicit_iteration": "True", + "output_cpu_time": "True" } + }, + "diagnostics": { + "output_micro_sim_solve_time": "True" } } diff --git a/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity.json b/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity.json index f3c5361e..fb47972a 100644 --- a/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity.json +++ b/tests/integration/test_unit_cube/micro-manager-config-global-adaptivity.json @@ -1,7 +1,7 @@ { "micro_file_name": "micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-cube-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/integration/test_unit_cube/micro-manager-config-local-adaptivity.json b/tests/integration/test_unit_cube/micro-manager-config-local-adaptivity.json index ad1d2be1..a22f3213 100644 --- a/tests/integration/test_unit_cube/micro-manager-config-local-adaptivity.json +++ b/tests/integration/test_unit_cube/micro-manager-config-local-adaptivity.json @@ -1,7 +1,7 @@ { "micro_file_name": "micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-cube-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/integration/test_unit_cube/micro-manager-config-parallel-1.json b/tests/integration/test_unit_cube/micro-manager-config-parallel-1.json index 2a56740b..49ff2fa1 100644 --- a/tests/integration/test_unit_cube/micro-manager-config-parallel-1.json +++ b/tests/integration/test_unit_cube/micro-manager-config-parallel-1.json @@ -1,7 +1,7 @@ { "micro_file_name": "micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-cube-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/integration/test_unit_cube/micro-manager-config-parallel-2.json b/tests/integration/test_unit_cube/micro-manager-config-parallel-2.json index 478e0f0c..265b5b31 100644 --- a/tests/integration/test_unit_cube/micro-manager-config-parallel-2.json +++ b/tests/integration/test_unit_cube/micro-manager-config-parallel-2.json @@ -1,7 +1,7 @@ { "micro_file_name": "micro_dummy", "coupling_params": { - "config_file_name": "precice-config.xml", + "precice_config_file_name": "precice-config.xml", "macro_mesh_name": "macro-cube-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/integration/test_unit_cube/precice-config.xml b/tests/integration/test_unit_cube/precice-config.xml index 6e9f2c11..91395c01 100644 --- a/tests/integration/test_unit_cube/precice-config.xml +++ b/tests/integration/test_unit_cube/precice-config.xml @@ -8,18 +8,20 @@ - + + - + + @@ -36,9 +38,10 @@ - + + diff --git a/tests/unit/micro-manager-config.json b/tests/unit/micro-manager-config.json index c4d886c3..07e030c3 100644 --- a/tests/unit/micro-manager-config.json +++ b/tests/unit/micro-manager-config.json @@ -1,7 +1,7 @@ { "micro_file_name": "test_micro_manager", "coupling_params": { - "config_file_name": "dummy-config.xml", + "precice_config_file_name": "dummy-config.xml", "macro_mesh_name": "dummy-macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/unit/micro-manager-config_crash.json b/tests/unit/micro-manager-config_crash.json index 45305fa9..f95e14b1 100644 --- a/tests/unit/micro-manager-config_crash.json +++ b/tests/unit/micro-manager-config_crash.json @@ -1,7 +1,7 @@ { "micro_file_name": "test_micro_simulation_crash_handling", "coupling_params": { - "config_file_name": "dummy-config.xml", + "precice_config_file_name": "dummy-config.xml", "macro_mesh_name": "dummy-macro-mesh", "read_data_names": {"macro-scalar-data": "scalar", "macro-vector-data": "vector"}, "write_data_names": {"micro-scalar-data": "scalar", "micro-vector-data": "vector"} diff --git a/tests/unit/test_adaptivity_parallel.py b/tests/unit/test_adaptivity_parallel.py index c83ae91d..156e847b 100644 --- a/tests/unit/test_adaptivity_parallel.py +++ b/tests/unit/test_adaptivity_parallel.py @@ -31,7 +31,7 @@ def test_update_inactive_sims_global_adaptivity(self): configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") adaptivity_controller = GlobalAdaptivityCalculator( - configurator, MagicMock(), 5, global_ids, rank=self._rank, comm=self._comm + configurator, 5, global_ids, rank=self._rank, comm=self._comm ) # Force the activation of sim #0 and #4 @@ -109,7 +109,7 @@ def test_update_all_active_sims_global_adaptivity(self): configurator.get_adaptivity_coarsening_const = MagicMock(return_value=0.2) configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L2rel") adaptivity_controller = GlobalAdaptivityCalculator( - configurator, MagicMock(), 5, global_ids, rank=self._rank, comm=self._comm + configurator, 5, global_ids, rank=self._rank, comm=self._comm ) adaptivity_controller._adaptivity_data_names = { @@ -174,7 +174,7 @@ def test_communicate_micro_output(self): configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") adaptivity_controller = GlobalAdaptivityCalculator( - configurator, MagicMock(), 5, global_ids, rank=self._rank, comm=self._comm + configurator, 5, global_ids, rank=self._rank, comm=self._comm ) adaptivity_controller._communicate_micro_output(adaptivity_data, sim_output) diff --git a/tests/unit/test_adaptivity_serial.py b/tests/unit/test_adaptivity_serial.py index 13155351..e2ac83d6 100644 --- a/tests/unit/test_adaptivity_serial.py +++ b/tests/unit/test_adaptivity_serial.py @@ -68,7 +68,7 @@ def test_get_similarity_dists(self): """ configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") - adaptivity_controller = AdaptivityCalculator(configurator, logger=MagicMock()) + adaptivity_controller = AdaptivityCalculator(configurator) adaptivity_controller._hist_param = 0.5 adaptivity_controller._adaptivity_data_names = [ "micro-scalar-data", @@ -102,7 +102,7 @@ def test_update_active_sims(self): """ configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") - adaptivity_controller = AdaptivityCalculator(configurator, logger=MagicMock()) + adaptivity_controller = AdaptivityCalculator(configurator) adaptivity_controller._refine_const = self._refine_const adaptivity_controller._coarse_const = self._coarse_const adaptivity_controller._adaptivity_data_names = [ @@ -127,8 +127,7 @@ def test_adaptivity_norms(self): """ Test functionality for calculating similarity criteria between pairs of simulations using different norms in class AdaptivityCalculator. """ - logger = MagicMock() - calc = AdaptivityCalculator(Config(logger, "micro-manager-config.json"), logger) + calc = AdaptivityCalculator(Config("micro-manager-config.json")) fake_data = np.array([[1], [2], [3]]) self.assertTrue( @@ -209,7 +208,7 @@ def test_associate_active_to_inactive(self): """ configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") - adaptivity_controller = AdaptivityCalculator(configurator, logger=MagicMock()) + adaptivity_controller = AdaptivityCalculator(configurator) adaptivity_controller._refine_const = self._refine_const adaptivity_controller._coarse_const = self._coarse_const adaptivity_controller._adaptivity_data_names = [ @@ -236,9 +235,7 @@ def test_update_inactive_sims_local_adaptivity(self): """ configurator = MagicMock() configurator.get_adaptivity_similarity_measure = MagicMock(return_value="L1") - adaptivity_controller = LocalAdaptivityCalculator( - configurator, logger=MagicMock() - ) + adaptivity_controller = LocalAdaptivityCalculator(configurator, MagicMock()) adaptivity_controller._refine_const = self._refine_const adaptivity_controller._coarse_const = self._coarse_const adaptivity_controller._adaptivity_data_names = [ diff --git a/tests/unit/test_domain_decomposition.py b/tests/unit/test_domain_decomposition.py index ffab9474..ef92a934 100644 --- a/tests/unit/test_domain_decomposition.py +++ b/tests/unit/test_domain_decomposition.py @@ -8,7 +8,6 @@ class TestDomainDecomposition(TestCase): def setUp(self) -> None: - self._logger = MagicMock() self._macro_bounds_3d = [ -1, 1, @@ -25,7 +24,7 @@ def test_rank5_outof_10_3d(self): rank = 5 size = 10 ranks_per_axis = [1, 2, 5] - domain_decomposer = DomainDecomposer(self._logger, 3, rank, size) + domain_decomposer = DomainDecomposer(3, rank, size) domain_decomposer._dims = 3 mesh_bounds = domain_decomposer.decompose_macro_domain( self._macro_bounds_3d, ranks_per_axis @@ -40,7 +39,7 @@ def test_rank10_out_of_32_3d(self): rank = 10 size = 32 ranks_per_axis = [4, 1, 8] - domain_decomposer = DomainDecomposer(self._logger, 3, rank, size) + domain_decomposer = DomainDecomposer(3, rank, size) domain_decomposer._dims = 3 mesh_bounds = domain_decomposer.decompose_macro_domain( self._macro_bounds_3d, ranks_per_axis @@ -55,7 +54,7 @@ def test_rank7_out_of_16_3d(self): rank = 7 size = 16 ranks_per_axis = [8, 2, 1] - domain_decomposer = DomainDecomposer(self._logger, 3, rank, size) + domain_decomposer = DomainDecomposer(3, rank, size) domain_decomposer._dims = 3 mesh_bounds = domain_decomposer.decompose_macro_domain( self._macro_bounds_3d, ranks_per_axis diff --git a/tests/unit/test_micro_manager.py b/tests/unit/test_micro_manager.py index b43850cd..24afea87 100644 --- a/tests/unit/test_micro_manager.py +++ b/tests/unit/test_micro_manager.py @@ -34,7 +34,7 @@ def setUp(self): self.fake_write_data_names = { "micro-scalar-data": False, "micro-vector-data": True, - "micro_sim_time": False, + "solve_cpu_time": False, "active_state": False, "active_steps": False, } @@ -42,7 +42,7 @@ def setUp(self): { "micro-scalar-data": 1, "micro-vector-data": np.array([0, 1, 2]), - "micro_sim_time": 0, + "solve_cpu_time": 0, "active_state": 0, "active_steps": 0, } @@ -119,9 +119,12 @@ def test_config(self): """ Test if the functions in the Config class work. """ - config = micro_manager.Config(MagicMock(), "micro-manager-config.json") + config = micro_manager.Config("micro-manager-config.json") + config.set_logger(MagicMock()) config.read_json_micro_manager() - self.assertEqual(config._config_file_name.split("/")[-1], "dummy-config.xml") + self.assertEqual( + config._precice_config_file_name.split("/")[-1], "dummy-config.xml" + ) self.assertEqual(config._micro_file_name, "test_micro_manager") self.assertEqual(config._macro_mesh_name, "dummy-macro-mesh") self.assertEqual(config._micro_output_n, 10) diff --git a/tests/unit/test_snapshot_computation.py b/tests/unit/test_snapshot_computation.py index 68bde45e..d72f4d47 100644 --- a/tests/unit/test_snapshot_computation.py +++ b/tests/unit/test_snapshot_computation.py @@ -136,7 +136,8 @@ def test_config(self): """ Test if the functions in the SnapshotConfig class work. """ - config = Config(MagicMock(), "snapshot-config.json") + config = Config("snapshot-config.json") + config.set_logger(MagicMock()) config.read_json_snapshot() self.assertEqual(