From 8cbc4bd42c69eddd8931bec7a175e418ea66cae8 Mon Sep 17 00:00:00 2001 From: Mathieu Doucet Date: Mon, 8 Apr 2024 12:27:39 -0400 Subject: [PATCH] clean up --- reduction/lr_reduction/DeadTimeCorrection.py | 16 ------- reduction/lr_reduction/event_reduction.py | 31 +++++++------ .../scaling_factors/LRDirectBeamSort.py | 43 +++++++++++-------- .../scaling_factors/LRScalingFactors.py | 17 +++++--- .../lr_reduction/scaling_factors/workflow.py | 43 ++++++++++++------- reduction/lr_reduction/utils.py | 11 +++++ 6 files changed, 90 insertions(+), 71 deletions(-) diff --git a/reduction/lr_reduction/DeadTimeCorrection.py b/reduction/lr_reduction/DeadTimeCorrection.py index 1bbe993..a719f46 100644 --- a/reduction/lr_reduction/DeadTimeCorrection.py +++ b/reduction/lr_reduction/DeadTimeCorrection.py @@ -7,22 +7,6 @@ import numpy as np import scipy -def call(InputWorkspace, InputErrorEventsWorkspace=None, DeadTime=4.2, TOFStep=100., Paralyzable=False, TOFRange=[0, 0], OutputWorkspace='correction'): - """ - Function to make the algorithm call similar to a normal Mantid call - """ - algo = SingleReadoutDeadTimeCorrection() - algo.PyInit() - algo.setProperty("InputWorkspace", InputWorkspace) - algo.setProperty("InputErrorEventsWorkspace", InputErrorEventsWorkspace) - algo.setProperty("DeadTime", DeadTime) - algo.setProperty("TOFStep", TOFStep) - algo.setProperty("Paralyzable", Paralyzable) - algo.setProperty("TOFRange", TOFRange) - algo.setProperty("OutputWorkspace", OutputWorkspace) - algo.PyExec() - return algo.getProperty('OutputWorkspace').value - class SingleReadoutDeadTimeCorrection(PythonAlgorithm): diff --git a/reduction/lr_reduction/event_reduction.py b/reduction/lr_reduction/event_reduction.py index 57d6c5a..ca4209b 100644 --- a/reduction/lr_reduction/event_reduction.py +++ b/reduction/lr_reduction/event_reduction.py @@ -8,6 +8,7 @@ from . import background from . import DeadTimeCorrection +from lr_reduction.utils import mantid_algorithm_exec def get_wl_range(ws): @@ -246,26 +247,28 @@ def get_dead_time_correction(self): run_number = self._ws_sc.getRun().getProperty("run_number").value error_ws = api.LoadErrorEventsNexus("REF_L_%s" % run_number) - corr_ws = DeadTimeCorrection.call(InputWorkspace=self._ws_sc, - InputErrorEventsWorkspace=error_ws, - DeadTime=self.DEAD_TIME, - TOFStep=self.DEAD_TIME_TOF_STEP, - Paralyzable=self.paralyzable, - TOFRange=[tof_min, tof_max], - OutputWorkspace="corr") + corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection, + InputWorkspace=self._ws_sc, + InputErrorEventsWorkspace=error_ws, + DeadTime=self.DEAD_TIME, + TOFStep=self.DEAD_TIME_TOF_STEP, + Paralyzable=self.paralyzable, + TOFRange=[tof_min, tof_max], + OutputWorkspace="corr") corr_sc = corr_ws.readY(0) wl_bins = corr_ws.readX(0) / self.constant # Direct beam workspace run_number = self._ws_db.getRun().getProperty("run_number").value error_ws = api.LoadErrorEventsNexus("REF_L_%s" % run_number) - corr_ws = DeadTimeCorrection.call(InputWorkspace=self._ws_db, - InputErrorEventsWorkspace=error_ws, - DeadTime=self.DEAD_TIME, - TOFStep=self.DEAD_TIME_TOF_STEP, - Paralyzable=self.paralyzable, - TOFRange=[tof_min, tof_max], - OutputWorkspace="corr") + corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection, + InputWorkspace=self._ws_db, + InputErrorEventsWorkspace=error_ws, + DeadTime=self.DEAD_TIME, + TOFStep=self.DEAD_TIME_TOF_STEP, + Paralyzable=self.paralyzable, + TOFRange=[tof_min, tof_max], + OutputWorkspace="corr") corr_db = corr_ws.readY(0) # Flip the correction since we are going from TOF to Q diff --git a/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py b/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py index 1629e66..b12431d 100644 --- a/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py +++ b/reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py @@ -16,7 +16,8 @@ THI_TOLERANCE = 0.002 -from . import LRScalingFactors +from lr_reduction.scaling_factors import LRScalingFactors +from lr_reduction.utils import mantid_algorithm_exec class CompareTwoNXSDataForSFcalculator(object): @@ -35,6 +36,11 @@ class CompareTwoNXSDataForSFcalculator(object): resultComparison = 0 def __init__(self, nxsdataToCompareWith, nxsdataToPosition): + """ + Compare two runs to decide in which order they should be processed + :param workspace nxsdataToCompareWith: new run to compare with + :param workspace nxsdataToPosition: second run to compare with + """ self.nexusToCompareWithRun = nxsdataToCompareWith.getRun() self.nexusToPositionRun = nxsdataToPosition.getRun() @@ -95,6 +101,10 @@ def result(self): def sorter_function(r1, r2): """ Sorter function used by with the 'sorted' call to sort the direct beams. + + :param workspace r1: first workspace to compare with + :param workspace r2: second workspace to compare with + """ return CompareTwoNXSDataForSFcalculator(r2, r1).result() @@ -266,23 +276,20 @@ def _compute_scaling_factors(self, lr_data_sorted): deadtime = self.getProperty("DeadTime").value deadtime_step = self.getProperty("DeadTimeTOFStep").value - algo = LRScalingFactors.LRScalingFactors() - algo.PyInit() - algo.setProperty("DirectBeamRuns", direct_beam_runs) - algo.setProperty("TOFRange", tof_range) - algo.setProperty("TOFSteps", tof_steps) - algo.setProperty("SignalPeakPixelRange", peak_ranges) - algo.setProperty("SignalBackgroundPixelRange", bck_ranges) - algo.setProperty("LowResolutionPixelRange", x_ranges) - algo.setProperty("IncidentMedium", incident_medium) - algo.setProperty("SlitTolerance", slit_tolerance) - algo.setProperty("ScalingFactorFile", scaling_file) - algo.setProperty("DirectBeamRuns", direct_beam_runs) - algo.setProperty("UseDeadTimeCorrection", use_deadtime) - algo.setProperty("ParalyzableDeadTime", paralyzable) - algo.setProperty("DeadTime", deadtime) - algo.setProperty("DeadTimeTOFStep", deadtime_step) - algo.PyExec() + mantid_algorithm_exec(LRScalingFactors.LRScalingFactors, + DirectBeamRuns=direct_beam_runs, + TOFRange=tof_range, + TOFSteps=tof_steps, + SignalPeakPixelRange=peak_ranges, + SignalBackgroundPixelRange=bck_ranges, + LowResolutionPixelRange=x_ranges, + IncidentMedium=incident_medium, + SlitTolerance=slit_tolerance, + ScalingFactorFile=scaling_file, + UseDeadTimeCorrection=use_deadtime, + ParalyzableDeadTime=paralyzable, + DeadTime=deadtime, + DeadTimeTOFStep=deadtime_step) # log output summary logger.notice(summary) diff --git a/reduction/lr_reduction/scaling_factors/LRScalingFactors.py b/reduction/lr_reduction/scaling_factors/LRScalingFactors.py index 630befa..9436216 100644 --- a/reduction/lr_reduction/scaling_factors/LRScalingFactors.py +++ b/reduction/lr_reduction/scaling_factors/LRScalingFactors.py @@ -12,6 +12,7 @@ from mantid.kernel import * from lr_reduction import DeadTimeCorrection +from lr_reduction.utils import mantid_algorithm_exec class LRScalingFactors(PythonAlgorithm): @@ -462,13 +463,15 @@ def compute_dead_time_correction(self, ws, tof_min, tof_max, tof_step): deadtime = self.getProperty("DeadTime").value deadtime_step = self.getProperty("DeadTimeTOFStep").value error_ws = LoadErrorEventsNexus(ws.getRun().getProperty("run_number").value) - corr_ws = DeadTimeCorrection.call(InputWorkspace=ws, - InputErrorEventsWorkspace=error_ws, - Paralyzable=paralyzable, - DeadTime=deadtime, - TOFStep=deadtime_step, - TOFRange=[tof_min, tof_max], - OutputWorkspace="corr") + + corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection, + InputWorkspace=ws, + InputErrorEventsWorkspace=error_ws, + Paralyzable=paralyzable, + DeadTime=deadtime, + TOFStep=deadtime_step, + TOFRange=[tof_min, tof_max], + OutputWorkspace="corr") # Rebin to the workspace we need corr_ws = Rebin(InputWorkspace=corr_ws, Params=[tof_min, tof_step, tof_max], diff --git a/reduction/lr_reduction/scaling_factors/workflow.py b/reduction/lr_reduction/scaling_factors/workflow.py index 57ced23..e7fd7fa 100644 --- a/reduction/lr_reduction/scaling_factors/workflow.py +++ b/reduction/lr_reduction/scaling_factors/workflow.py @@ -3,7 +3,8 @@ """ import os -from . import LRDirectBeamSort +from lr_reduction.scaling_factors import LRDirectBeamSort +from lr_reduction.utils import mantid_algorithm_exec def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True, @@ -14,6 +15,18 @@ def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True, Compute scaling factors given a DB run, assumed to be the last one of a set. :param workspace ws: Mantid workspace for one of the direct beams to use. + :param output_dir: path the the output directory + :param tof_step: TOF binning for the scaling factor calculation + :param order_by_runs: if True, the runs will be ordered by run number instead of meta data + :param incident_medium: name of the incident medium + :param slit_tolerance: tolerance to use when matching slits between runs + :param wait: if True, scaling factors will only be processed if the workspace + given corresponds to the last run of the complete set + :param postfix: string to add at the end of the output file + :param use_deadtime: if True, a dead time correction will be applied + :param paralyzable: if True, a paralyzable dead time correction will be applied + :param deadtime: value of the dead time + :param deadtime_tof_step: TOF binning to use when computing the dead time """ # Read in the sequence information meta_data_run = ws.getRun() @@ -40,20 +53,18 @@ def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True, output_cfg = os.path.join(output_dir, "sf_%s_%s%s.cfg" % (first_run_of_set, file_id, postfix)) - algo = LRDirectBeamSort.LRDirectBeamSort() - algo.PyInit() - algo.setProperty("RunList", list(range(first_run_of_set, first_run_of_set + sequence_total))) - algo.setProperty("UseLowResCut", True) - algo.setProperty("ComputeScalingFactors", True) - algo.setProperty("TOFSteps", tof_step) - algo.setProperty("IncidentMedium", incident_medium) - algo.setProperty("SlitTolerance", slit_tolerance) - algo.setProperty("OrderDirectBeamsByRunNumber", order_by_runs) - algo.setProperty("UseDeadTimeCorrection", use_deadtime) - algo.setProperty("ParalyzableDeadTime", paralyzable) - algo.setProperty("DeadTime", deadtime) - algo.setProperty("DeadTimeTOFStep", deadtime_tof_step) - algo.setProperty("ScalingFactorFile", output_cfg) - algo.PyExec() + mantid_algorithm_exec(LRDirectBeamSort.LRDirectBeamSort, + RunList=list(range(first_run_of_set, first_run_of_set + sequence_total)), + UseLowResCut=True, + ComputeScalingFactors=True, + TOFSteps=tof_step, + IncidentMedium=incident_medium, + SlitTolerance=slit_tolerance, + OrderDirectBeamsByRunNumber=order_by_runs, + UseDeadTimeCorrection=use_deadtime, + ParalyzableDeadTime=paralyzable, + DeadTime=deadtime, + DeadTimeTOFStep=deadtime_tof_step, + ScalingFactorFile=output_cfg) return True diff --git a/reduction/lr_reduction/utils.py b/reduction/lr_reduction/utils.py index f2d03ae..a8c37a8 100644 --- a/reduction/lr_reduction/utils.py +++ b/reduction/lr_reduction/utils.py @@ -8,6 +8,17 @@ from mantid.kernel import ConfigService +def mantid_algorithm_exec(algorithm_class, **kwargs): + algorithm_instance = algorithm_class() + assert algorithm_instance.PyInit, "str(algorithm_class) is not a Mantid Python algorithm" + algorithm_instance.PyInit() + for name, value in kwargs.items(): + algorithm_instance.setProperty(name, value) + algorithm_instance.PyExec() + if 'OutputWorkspace' in kwargs: + return algorithm_instance.getProperty('OutputWorkspace').value + + @contextmanager def amend_config( new_config: dict = None, data_dir: Union[str, list] = None, data_dir_insert_mode: str = "prepend"