Skip to content

Commit

Permalink
clean up
Browse files Browse the repository at this point in the history
  • Loading branch information
mdoucet committed Apr 8, 2024
1 parent 297d963 commit 8cbc4bd
Show file tree
Hide file tree
Showing 6 changed files with 90 additions and 71 deletions.
16 changes: 0 additions & 16 deletions reduction/lr_reduction/DeadTimeCorrection.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,22 +7,6 @@
import numpy as np
import scipy

def call(InputWorkspace, InputErrorEventsWorkspace=None, DeadTime=4.2, TOFStep=100., Paralyzable=False, TOFRange=[0, 0], OutputWorkspace='correction'):
"""
Function to make the algorithm call similar to a normal Mantid call
"""
algo = SingleReadoutDeadTimeCorrection()
algo.PyInit()
algo.setProperty("InputWorkspace", InputWorkspace)
algo.setProperty("InputErrorEventsWorkspace", InputErrorEventsWorkspace)
algo.setProperty("DeadTime", DeadTime)
algo.setProperty("TOFStep", TOFStep)
algo.setProperty("Paralyzable", Paralyzable)
algo.setProperty("TOFRange", TOFRange)
algo.setProperty("OutputWorkspace", OutputWorkspace)
algo.PyExec()
return algo.getProperty('OutputWorkspace').value


class SingleReadoutDeadTimeCorrection(PythonAlgorithm):

Expand Down
31 changes: 17 additions & 14 deletions reduction/lr_reduction/event_reduction.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@

from . import background
from . import DeadTimeCorrection
from lr_reduction.utils import mantid_algorithm_exec


def get_wl_range(ws):
Expand Down Expand Up @@ -246,26 +247,28 @@ def get_dead_time_correction(self):

run_number = self._ws_sc.getRun().getProperty("run_number").value
error_ws = api.LoadErrorEventsNexus("REF_L_%s" % run_number)
corr_ws = DeadTimeCorrection.call(InputWorkspace=self._ws_sc,
InputErrorEventsWorkspace=error_ws,
DeadTime=self.DEAD_TIME,
TOFStep=self.DEAD_TIME_TOF_STEP,
Paralyzable=self.paralyzable,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")
corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection,
InputWorkspace=self._ws_sc,
InputErrorEventsWorkspace=error_ws,
DeadTime=self.DEAD_TIME,
TOFStep=self.DEAD_TIME_TOF_STEP,
Paralyzable=self.paralyzable,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")
corr_sc = corr_ws.readY(0)
wl_bins = corr_ws.readX(0) / self.constant

# Direct beam workspace
run_number = self._ws_db.getRun().getProperty("run_number").value
error_ws = api.LoadErrorEventsNexus("REF_L_%s" % run_number)
corr_ws = DeadTimeCorrection.call(InputWorkspace=self._ws_db,
InputErrorEventsWorkspace=error_ws,
DeadTime=self.DEAD_TIME,
TOFStep=self.DEAD_TIME_TOF_STEP,
Paralyzable=self.paralyzable,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")
corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection,
InputWorkspace=self._ws_db,
InputErrorEventsWorkspace=error_ws,
DeadTime=self.DEAD_TIME,
TOFStep=self.DEAD_TIME_TOF_STEP,
Paralyzable=self.paralyzable,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")
corr_db = corr_ws.readY(0)

# Flip the correction since we are going from TOF to Q
Expand Down
43 changes: 25 additions & 18 deletions reduction/lr_reduction/scaling_factors/LRDirectBeamSort.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

THI_TOLERANCE = 0.002

from . import LRScalingFactors
from lr_reduction.scaling_factors import LRScalingFactors
from lr_reduction.utils import mantid_algorithm_exec


class CompareTwoNXSDataForSFcalculator(object):
Expand All @@ -35,6 +36,11 @@ class CompareTwoNXSDataForSFcalculator(object):
resultComparison = 0

def __init__(self, nxsdataToCompareWith, nxsdataToPosition):
"""
Compare two runs to decide in which order they should be processed
:param workspace nxsdataToCompareWith: new run to compare with
:param workspace nxsdataToPosition: second run to compare with
"""
self.nexusToCompareWithRun = nxsdataToCompareWith.getRun()
self.nexusToPositionRun = nxsdataToPosition.getRun()

Expand Down Expand Up @@ -95,6 +101,10 @@ def result(self):
def sorter_function(r1, r2):
"""
Sorter function used by with the 'sorted' call to sort the direct beams.
:param workspace r1: first workspace to compare with
:param workspace r2: second workspace to compare with
"""
return CompareTwoNXSDataForSFcalculator(r2, r1).result()

Expand Down Expand Up @@ -266,23 +276,20 @@ def _compute_scaling_factors(self, lr_data_sorted):
deadtime = self.getProperty("DeadTime").value
deadtime_step = self.getProperty("DeadTimeTOFStep").value

algo = LRScalingFactors.LRScalingFactors()
algo.PyInit()
algo.setProperty("DirectBeamRuns", direct_beam_runs)
algo.setProperty("TOFRange", tof_range)
algo.setProperty("TOFSteps", tof_steps)
algo.setProperty("SignalPeakPixelRange", peak_ranges)
algo.setProperty("SignalBackgroundPixelRange", bck_ranges)
algo.setProperty("LowResolutionPixelRange", x_ranges)
algo.setProperty("IncidentMedium", incident_medium)
algo.setProperty("SlitTolerance", slit_tolerance)
algo.setProperty("ScalingFactorFile", scaling_file)
algo.setProperty("DirectBeamRuns", direct_beam_runs)
algo.setProperty("UseDeadTimeCorrection", use_deadtime)
algo.setProperty("ParalyzableDeadTime", paralyzable)
algo.setProperty("DeadTime", deadtime)
algo.setProperty("DeadTimeTOFStep", deadtime_step)
algo.PyExec()
mantid_algorithm_exec(LRScalingFactors.LRScalingFactors,
DirectBeamRuns=direct_beam_runs,
TOFRange=tof_range,
TOFSteps=tof_steps,
SignalPeakPixelRange=peak_ranges,
SignalBackgroundPixelRange=bck_ranges,
LowResolutionPixelRange=x_ranges,
IncidentMedium=incident_medium,
SlitTolerance=slit_tolerance,
ScalingFactorFile=scaling_file,
UseDeadTimeCorrection=use_deadtime,
ParalyzableDeadTime=paralyzable,
DeadTime=deadtime,
DeadTimeTOFStep=deadtime_step)

# log output summary
logger.notice(summary)
Expand Down
17 changes: 10 additions & 7 deletions reduction/lr_reduction/scaling_factors/LRScalingFactors.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from mantid.kernel import *

from lr_reduction import DeadTimeCorrection
from lr_reduction.utils import mantid_algorithm_exec


class LRScalingFactors(PythonAlgorithm):
Expand Down Expand Up @@ -462,13 +463,15 @@ def compute_dead_time_correction(self, ws, tof_min, tof_max, tof_step):
deadtime = self.getProperty("DeadTime").value
deadtime_step = self.getProperty("DeadTimeTOFStep").value
error_ws = LoadErrorEventsNexus(ws.getRun().getProperty("run_number").value)
corr_ws = DeadTimeCorrection.call(InputWorkspace=ws,
InputErrorEventsWorkspace=error_ws,
Paralyzable=paralyzable,
DeadTime=deadtime,
TOFStep=deadtime_step,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")

corr_ws = mantid_algorithm_exec(DeadTimeCorrection.SingleReadoutDeadTimeCorrection,
InputWorkspace=ws,
InputErrorEventsWorkspace=error_ws,
Paralyzable=paralyzable,
DeadTime=deadtime,
TOFStep=deadtime_step,
TOFRange=[tof_min, tof_max],
OutputWorkspace="corr")

# Rebin to the workspace we need
corr_ws = Rebin(InputWorkspace=corr_ws, Params=[tof_min, tof_step, tof_max],
Expand Down
43 changes: 27 additions & 16 deletions reduction/lr_reduction/scaling_factors/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
"""
import os

from . import LRDirectBeamSort
from lr_reduction.scaling_factors import LRDirectBeamSort
from lr_reduction.utils import mantid_algorithm_exec


def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True,
Expand All @@ -14,6 +15,18 @@ def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True,
Compute scaling factors given a DB run, assumed to be the last
one of a set.
:param workspace ws: Mantid workspace for one of the direct beams to use.
:param output_dir: path the the output directory
:param tof_step: TOF binning for the scaling factor calculation
:param order_by_runs: if True, the runs will be ordered by run number instead of meta data
:param incident_medium: name of the incident medium
:param slit_tolerance: tolerance to use when matching slits between runs
:param wait: if True, scaling factors will only be processed if the workspace
given corresponds to the last run of the complete set
:param postfix: string to add at the end of the output file
:param use_deadtime: if True, a dead time correction will be applied
:param paralyzable: if True, a paralyzable dead time correction will be applied
:param deadtime: value of the dead time
:param deadtime_tof_step: TOF binning to use when computing the dead time
"""
# Read in the sequence information
meta_data_run = ws.getRun()
Expand All @@ -40,20 +53,18 @@ def process_scaling_factors(ws, output_dir, tof_step=200., order_by_runs=True,

output_cfg = os.path.join(output_dir, "sf_%s_%s%s.cfg" % (first_run_of_set, file_id, postfix))

algo = LRDirectBeamSort.LRDirectBeamSort()
algo.PyInit()
algo.setProperty("RunList", list(range(first_run_of_set, first_run_of_set + sequence_total)))
algo.setProperty("UseLowResCut", True)
algo.setProperty("ComputeScalingFactors", True)
algo.setProperty("TOFSteps", tof_step)
algo.setProperty("IncidentMedium", incident_medium)
algo.setProperty("SlitTolerance", slit_tolerance)
algo.setProperty("OrderDirectBeamsByRunNumber", order_by_runs)
algo.setProperty("UseDeadTimeCorrection", use_deadtime)
algo.setProperty("ParalyzableDeadTime", paralyzable)
algo.setProperty("DeadTime", deadtime)
algo.setProperty("DeadTimeTOFStep", deadtime_tof_step)
algo.setProperty("ScalingFactorFile", output_cfg)
algo.PyExec()
mantid_algorithm_exec(LRDirectBeamSort.LRDirectBeamSort,
RunList=list(range(first_run_of_set, first_run_of_set + sequence_total)),
UseLowResCut=True,
ComputeScalingFactors=True,
TOFSteps=tof_step,
IncidentMedium=incident_medium,
SlitTolerance=slit_tolerance,
OrderDirectBeamsByRunNumber=order_by_runs,
UseDeadTimeCorrection=use_deadtime,
ParalyzableDeadTime=paralyzable,
DeadTime=deadtime,
DeadTimeTOFStep=deadtime_tof_step,
ScalingFactorFile=output_cfg)

return True
11 changes: 11 additions & 0 deletions reduction/lr_reduction/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,17 @@
from mantid.kernel import ConfigService


def mantid_algorithm_exec(algorithm_class, **kwargs):
algorithm_instance = algorithm_class()
assert algorithm_instance.PyInit, "str(algorithm_class) is not a Mantid Python algorithm"
algorithm_instance.PyInit()
for name, value in kwargs.items():
algorithm_instance.setProperty(name, value)
algorithm_instance.PyExec()
if 'OutputWorkspace' in kwargs:
return algorithm_instance.getProperty('OutputWorkspace').value


@contextmanager
def amend_config(
new_config: dict = None, data_dir: Union[str, list] = None, data_dir_insert_mode: str = "prepend"
Expand Down

0 comments on commit 8cbc4bd

Please sign in to comment.