diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..308e1e2 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +# Maintain dependencies for GitHub Actions +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8ec3aca..ebecd7f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -18,8 +18,8 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] - ctapipe-version: ["v0.19.2",] + python-version: ["3.9", "3.10", "3.11"] + ctapipe-version: ["v0.19.2"] defaults: run: @@ -68,4 +68,4 @@ jobs: run: | pytest --cov=ctapipe_io_magic --cov-report=xml - - uses: codecov/codecov-action@v3 + - uses: codecov/codecov-action@v4 diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index 873a5cd..1697ed4 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -10,23 +10,19 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - # make sure we have version info - - run: git fetch --tags - - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.8" + python-version: "3.9" - name: Install dependencies run: | python --version - pip install -U pip setuptools wheel setuptools_scm[toml] - python setup.py sdist bdist_wheel + pip install -U build + python -m build - name: Publish package - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/.github/workflows/release-drafter.yml b/.github/workflows/release-drafter.yml index 17fdb96..99fe14d 100644 --- a/.github/workflows/release-drafter.yml +++ b/.github/workflows/release-drafter.yml @@ -9,6 +9,6 @@ jobs: update_release_draft: runs-on: ubuntu-latest steps: - - uses: release-drafter/release-drafter@v5 + - uses: release-drafter/release-drafter@v6 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/ctapipe_io_magic/__init__.py b/ctapipe_io_magic/__init__.py index d35cab6..837e1c3 100644 --- a/ctapipe_io_magic/__init__.py +++ b/ctapipe_io_magic/__init__.py @@ -46,6 +46,11 @@ ) from .mars_datalevels import MARSDataLevel +from .exceptions import ( + MissingInputFilesError, + FailedFileCheckError, + MissingDriveReportError, +) from .version import __version__ from .constants import ( @@ -59,7 +64,14 @@ DATA_MAGIC_LST_TRIGGER, ) -__all__ = ["MAGICEventSource", "MARSDataLevel", "__version__"] +__all__ = [ + "MAGICEventSource", + "MARSDataLevel", + "MissingInputFilesError", + "FailedFileCheckError", + "MissingDriveReportError", + "__version__", +] logger = logging.getLogger(__name__) @@ -152,15 +164,6 @@ def load_camera_geometry(): return CameraGeometry.from_table(f) -class MissingDriveReportError(Exception): - """ - Exception raised when a subrun does not have drive reports. - """ - - def __init__(self, message): - self.message = message - - class MAGICEventSource(EventSource): """ EventSource for MAGIC calibrated data. @@ -243,6 +246,8 @@ def __init__(self, input_url=None, config=None, parent=None, **kwargs): reg_comp_mc = re.compile(regex_mc) ls = Path(path).iterdir() + + self.file_list = [] self.file_list_drive = [] for file_path in ls: @@ -252,6 +257,12 @@ def __init__(self, input_url=None, config=None, parent=None, **kwargs): ): self.file_list_drive.append(file_path) + if not len(self.file_list_drive): + raise MissingInputFilesError( + f"No input files found in {path}. Exiting." + f"Check your input: {input_url}." + ) + self.file_list_drive.sort() if self.process_run: @@ -261,6 +272,12 @@ def __init__(self, input_url=None, config=None, parent=None, **kwargs): # Retrieving the list of run numbers corresponding to the data files self.files_ = [uproot.open(rootf) for rootf in self.file_list] + + is_check_valid = self.check_files() + + if not is_check_valid: + raise FailedFileCheckError("Validity check for the files failed. Exiting.") + run_info = self.parse_run_info() self.run_id = run_info[0][0] @@ -472,6 +489,51 @@ def get_run_info_from_name(file_name): return run_number, is_mc, telescope, datalevel + def check_files(self): + """Check the the input files contain the needed trees.""" + + needed_trees = ["RunHeaders", "Events"] + num_files = len(self.files_) + + if ( + num_files == 1 + and "Drive" not in self.files_[0].keys(cycle=False) + and "OriginalMC" not in self.files_[0].keys(cycle=False) + ): + logger.error("Cannot proceed without Drive information for a single file.") + return False + + if ( + num_files == 1 + and "Trigger" not in self.files_[0].keys(cycle=False) + and "OriginalMC" not in self.files_[0].keys(cycle=False) + ): + logger.error( + "Cannot proceed without Trigger information for a single file." + ) + return False + + num_invalid_files = 0 + + for rootf in self.files_: + for tree in needed_trees: + if tree not in rootf.keys(cycle=False): + logger.warning( + f"File {rootf.file_path} does not have the tree {tree}." + ) + if tree == "RunHeaders" or tree == "Events": + logger.error( + f"File {rootf.file_path} does not have a {tree} tree. " + f"Please check the file and try again. If the file " + f"cannot be recovered, exclude it from the analysis." + ) + num_invalid_files += 1 + + if num_invalid_files > 0: + return False + else: + return True + def parse_run_info(self): """ Parses run info from the TTrees in the ROOT file @@ -588,6 +650,13 @@ def parse_data_info(self): is_sumt = [] is_hast = [] + stereo_prev = None + sumt_prev = None + hast_prev = None + + has_prescaler_info = True + has_trigger_table_info = True + if not self.is_simulation: prescaler_mono_nosumt = [1, 1, 0, 1, 0, 0, 0, 0] prescaler_mono_sumt = [0, 1, 0, 1, 0, 1, 0, 0] @@ -601,66 +670,139 @@ def parse_data_info(self): L3_table_sumt = "L3T_SUMSUM_100_SYNC" for rootf in self.files_: - trigger_tree = rootf["Trigger"] - L3T_tree = rootf["L3T"] + has_trigger_info = True + try: + trigger_tree = rootf["Trigger"] + except uproot.exceptions.KeyInFileError: + logger.warning(f"No Trigger tree found in {rootf.file_path}.") + has_trigger_info = False + + has_l3_info = True - # here we take the 2nd element (if possible) because sometimes - # the first trigger report has still the old prescaler values from a previous run try: - prescaler_array = trigger_tree[ - "MTriggerPrescFact.fPrescFact" - ].array(library="np") - except AssertionError: - logger.warning( - "No prescaler info found. Will assume standard stereo data." - ) - stereo = True - sumt = False - hast = False - return stereo, sumt, hast + L3T_tree = rootf["L3T"] + except uproot.exceptions.KeyInFileError: + logger.warning(f"No L3T tree found in {rootf.file_path}.") + has_l3_info = False - prescaler_size = prescaler_array.size - if prescaler_size > 1: - prescaler = list(prescaler_array[1]) - else: - prescaler = list(prescaler_array[0]) + # here we take the 2nd element (if possible) because sometimes + # the first trigger report has still the old prescaler values from a previous run + if has_trigger_info: + try: + prescaler_array = trigger_tree[ + "MTriggerPrescFact.fPrescFact" + ].array(library="np") + except AssertionError: + logger.warning( + f"No prescaler factors branch found in {rootf.file_path}." + ) + has_prescaler_info = False - if ( - prescaler == prescaler_mono_nosumt - or prescaler == prescaler_mono_sumt - ): - stereo = False - hast = False - elif prescaler == prescaler_stereo: - stereo = True - hast = False - elif prescaler == prescaler_hast: - stereo = True - hast = True - else: - stereo = True - hast = False + if not has_trigger_info or not has_prescaler_info: + if stereo_prev is not None and hast_prev is not None: + logger.warning( + "Assuming previous subrun information for trigger settings." + ) + stereo = stereo_prev + hast = hast_prev + else: + logger.warning("Assuming standard stereo data.") + stereo = True + hast = False + + if has_prescaler_info: + prescaler = None + prescaler_size = prescaler_array.size + if prescaler_size > 1: + prescaler = list(prescaler_array[1]) + elif prescaler_size == 1: + prescaler = list(prescaler_array[0]) + else: + logger.warning(f"No prescaler info found in {rootf.file_path}.") + if stereo_prev is not None and hast_prev is not None: + logger.warning( + "Assuming previous subrun information for trigger settings." + ) + stereo = stereo_prev + hast = hast_prev + else: + logger.warning("Assuming standard stereo data.") + stereo = True + hast = False + + if prescaler is not None: + if ( + prescaler == prescaler_mono_nosumt + or prescaler == prescaler_mono_sumt + ): + stereo = False + hast = False + elif prescaler == prescaler_stereo: + stereo = True + hast = False + elif prescaler == prescaler_hast: + stereo = True + hast = True + else: + logger.warning( + f"Prescaler different from the default mono, stereo or hast was found: {prescaler}. Please check your data." + ) + stereo = True + hast = False sumt = False if stereo: # here we take the 2nd element for the same reason as above # L3Table is empty for mono data i.e. taken with one telescope only # if both telescopes take data with no L3, L3Table is filled anyway - L3Table_array = L3T_tree["MReportL3T.fTablename"].array( - library="np" - ) - L3Table_size = L3Table_array.size - if L3Table_size > 1: - L3Table = L3Table_array[1] - else: - L3Table = L3Table_array[0] + if has_l3_info: + try: + L3Table_array = L3T_tree["MReportL3T.fTablename"].array( + library="np" + ) + except AssertionError: + logger.warning( + f"No trigger table branch found in {rootf.file_path}." + ) + has_trigger_table_info = False - if L3Table == L3_table_sumt: - sumt = True - elif L3Table == L3_table_nosumt: - sumt = False - else: - sumt = False + if not has_l3_info or not has_trigger_table_info: + if sumt_prev is not None: + logger.warning( + "Assuming previous subrun information trigger table information." + ) + sumt = sumt_prev + else: + logger.warning("Assuming standard trigger data.") + sumt = False + + if has_trigger_table_info: + L3Table = None + L3Table_size = L3Table_array.size + if L3Table_size > 1: + L3Table = L3Table_array[1] + elif L3Table_size == 1: + L3Table = L3Table_array[0] + else: + logger.warning( + f"No trigger table info found in {rootf.file_path}." + ) + if sumt_prev is not None: + logger.warning( + "Assuming previous subrun information trigger table information." + ) + sumt = sumt_prev + else: + logger.warning("Assuming standard trigger data.") + sumt = False + + if L3Table is not None: + if L3Table == L3_table_sumt: + sumt = True + elif L3Table == L3_table_nosumt: + sumt = False + else: + sumt = False else: if prescaler == prescaler_mono_sumt: sumt = True @@ -669,6 +811,10 @@ def parse_data_info(self): is_sumt.append(sumt) is_hast.append(hast) + stereo_prev = stereo + sumt_prev = sumt + hast_prev = hast + else: for rootf in self.files_: # looking into MC data, when SumT is simulated, trigger pattern of all events is set to 32 (bit 5), except the first (set to 0) @@ -1329,13 +1475,11 @@ def get_event_time_difference(self): if self.is_hast: event_cut = ( f"(MTriggerPattern.fPrescaled == {data_trigger_pattern})" - f" | (MTriggerPattern.fPrescaled == {DATA_TOPOLOGICAL_TRIGGER})" - f" | (MTriggerPattern.fPrescaled == {DATA_MAGIC_LST_TRIGGER})" + f" | (MTriggerPattern.fPrescaled == {DATA_TOPOLOGICAL_TRIGGER})" + f" | (MTriggerPattern.fPrescaled == {DATA_MAGIC_LST_TRIGGER})" ) else: - event_cut = ( - f"(MTriggerPattern.fPrescaled == {data_trigger_pattern})", - ) + event_cut = (f"(MTriggerPattern.fPrescaled == {data_trigger_pattern})",) for uproot_file in self.files_: event_info = uproot_file["Events"].arrays( @@ -1677,9 +1821,9 @@ def _event_generator(self): if not self.use_pedestals: badrmspixel_mask = self._get_badrmspixel_mask(event) - event.mon.tel[ - tel_id - ].pixel_status.pedestal_failing_pixels = badrmspixel_mask + event.mon.tel[tel_id].pixel_status.pedestal_failing_pixels = ( + badrmspixel_mask + ) # Set the telescope pointing container: event.pointing.array_azimuth = event_data["pointing_az"][i_event].to( @@ -1899,9 +2043,9 @@ def _load_data(self): if self.use_sumt_events: mc_trigger_pattern = MC_SUMT_TRIGGER_PATTERN if self.use_mc_mono_events or not self.is_stereo: - events_cut[ - "cosmic_events" - ] = f"(MTriggerPattern.fPrescaled == {mc_trigger_pattern})" + events_cut["cosmic_events"] = ( + f"(MTriggerPattern.fPrescaled == {mc_trigger_pattern})" + ) else: events_cut["cosmic_events"] = ( f"(MTriggerPattern.fPrescaled == {mc_trigger_pattern})" @@ -1921,13 +2065,13 @@ def _load_data(self): f" | (MTriggerPattern.fPrescaled == {DATA_MAGIC_LST_TRIGGER})" ) else: - events_cut[ - "cosmic_events" - ] = f"(MTriggerPattern.fPrescaled == {data_trigger_pattern})" + events_cut["cosmic_events"] = ( + f"(MTriggerPattern.fPrescaled == {data_trigger_pattern})" + ) # Only for cosmic events because MC data do not have pedestal events: - events_cut[ - "pedestal_events" - ] = f"(MTriggerPattern.fPrescaled == {PEDESTAL_TRIGGER_PATTERN})" + events_cut["pedestal_events"] = ( + f"(MTriggerPattern.fPrescaled == {PEDESTAL_TRIGGER_PATTERN})" + ) logger.info(f"Cosmic events selection: {events_cut['cosmic_events']}") @@ -1988,10 +2132,12 @@ def _load_data(self): daq_ids = common_info["MRawEvtHeader.fDAQEvtNumber"] calib_data[event_type]["event_number"] = np.array( [ - f"{subrun_id}{daq_ids[event_idx]:07}" - if common_info["MTriggerPattern.fPrescaled"][event_idx] - == DATA_TOPOLOGICAL_TRIGGER - else stereo_ids[event_idx] + ( + f"{subrun_id}{daq_ids[event_idx]:07}" + if common_info["MTriggerPattern.fPrescaled"][event_idx] + == DATA_TOPOLOGICAL_TRIGGER + else stereo_ids[event_idx] + ) for event_idx in range( common_info["MTriggerPattern.fPrescaled"].size ) diff --git a/ctapipe_io_magic/exceptions.py b/ctapipe_io_magic/exceptions.py new file mode 100644 index 0000000..f8132ff --- /dev/null +++ b/ctapipe_io_magic/exceptions.py @@ -0,0 +1,22 @@ +class MissingInputFilesError(Exception): + """ + Exception raised when there are no input files. + """ + + pass + + +class FailedFileCheckError(Exception): + """ + Exception raised when the files check fails. + """ + + pass + + +class MissingDriveReportError(Exception): + """ + Exception raised when a subrun does not have drive reports. + """ + + pass diff --git a/ctapipe_io_magic/tests/test_magic_event_source.py b/ctapipe_io_magic/tests/test_magic_event_source.py index 154551a..fbac975 100644 --- a/ctapipe_io_magic/tests/test_magic_event_source.py +++ b/ctapipe_io_magic/tests/test_magic_event_source.py @@ -13,6 +13,33 @@ test_calibrated_real_dir / "20210314_M2_05095172.002_Y_CrabNebula-W0.40+035.root", ] +test_calibrated_real_only_events = [ + test_calibrated_real_dir + / "missing_trees/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_events.root", +] + +test_calibrated_real_only_drive = [ + test_calibrated_real_dir + / "missing_trees/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_drive.root", +] + +test_calibrated_real_only_runh = [ + test_calibrated_real_dir + / "missing_trees/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_runh.root", +] + +test_calibrated_real_only_trigger = [ + test_calibrated_real_dir + / "missing_trees/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_trigger.root", +] + +test_calibrated_real_without_prescaler_trigger = [ + test_calibrated_real_dir + / "missing_prescaler_trigger/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root", + test_calibrated_real_dir + / "missing_prescaler_trigger/20210314_M1_05095172.002_Y_CrabNebula-W0.40+035_no_prescaler_trigger.root", +] + test_calibrated_real_hast = [ test_calibrated_real_dir / "20230324_M1_05106879.001_Y_1ES0806+524-W0.40+000.root", test_calibrated_real_dir / "20230324_M1_05106879.002_Y_1ES0806+524-W0.40+000.root", @@ -32,6 +59,13 @@ test_calibrated_real + test_calibrated_simulated + test_calibrated_real_hast ) +test_calibrated_missing_trees = ( + test_calibrated_real_only_events + + test_calibrated_real_only_drive + + test_calibrated_real_only_runh + + test_calibrated_real_only_trigger +) + data_dict = dict() data_dict["20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root"] = dict() @@ -335,9 +369,18 @@ def test_number_of_events(dataset): count_2_tel_m1_m2 += 1 assert count_3_tel == data_dict[source.input_url.name]["n_events_3_tel"] - assert count_2_tel_m1_lst == data_dict[source.input_url.name]["n_events_2_tel_m1_lst"] - assert count_2_tel_m2_lst == data_dict[source.input_url.name]["n_events_2_tel_m2_lst"] - assert count_2_tel_m1_m2 == data_dict[source.input_url.name]["n_events_2_tel_m1_m2"] + assert ( + count_2_tel_m1_lst + == data_dict[source.input_url.name]["n_events_2_tel_m1_lst"] + ) + assert ( + count_2_tel_m2_lst + == data_dict[source.input_url.name]["n_events_2_tel_m2_lst"] + ) + assert ( + count_2_tel_m1_m2 + == data_dict[source.input_url.name]["n_events_2_tel_m1_m2"] + ) # if '_M1_' in dataset.name: # assert run['data'].n_cosmics_stereo_events_m1 == data_dict[source.input_url.name]['n_events_stereo'] @@ -478,24 +521,41 @@ def test_focal_length_choice(dataset): ) -# def test_eventseeker(): -# dataset = get_dataset_path("20131004_M1_05029747.003_Y_MagicCrab-W0.40+035.root") -# -# with MAGICEventSource(input_url=dataset) as source: -# seeker = EventSeeker(source) -# event = seeker.get_event_index(0) -# assert event.count == 0 -# assert event.index.event_id == 29795 -# -# event = seeker.get_event_index(2) -# assert event.count == 2 -# assert event.index.event_id == 29798 -# -# def test_eventcontent(): -# dataset = get_dataset_path("20131004_M1_05029747.003_Y_MagicCrab-W0.40+035.root") -# -# with MAGICEventSource(input_url=dataset) as source: -# seeker = EventSeeker(source) -# event = seeker.get_event_index(0) -# assert event.dl1.tel[1].image[0] == -0.53125 -# assert event.dl1.tel[1].peak_time[0] == 49.125 +@pytest.mark.parametrize("dataset", test_calibrated_missing_trees) +def test_check_files(dataset): + from ctapipe_io_magic import MAGICEventSource, FailedFileCheckError + + with pytest.raises(FailedFileCheckError): + MAGICEventSource(input_url=dataset, process_run=False) + + +def test_check_missing_files(): + from ctapipe_io_magic import MAGICEventSource, MissingInputFilesError + + with pytest.raises(MissingInputFilesError): + MAGICEventSource( + input_url="20501312_M1_05095172.001_Y_FakeSource-W0.40+035.root", + process_run=False, + ) + + +def test_broken_subruns_missing_trees(): + from ctapipe_io_magic import MAGICEventSource + + input_file = test_calibrated_real_dir / "missing_prescaler_trigger/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root" + + MAGICEventSource(input_url=input_file, process_run=True,) + + +def test_broken_subruns_missing_arrays(): + from ctapipe_io_magic import MAGICEventSource + + input_file = ( + test_calibrated_real_dir + / "missing_arrays/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root" + ) + + MAGICEventSource( + input_url=input_file, + process_run=True, + ) diff --git a/download_test_data.sh b/download_test_data.sh index 88f5fb0..5e1dd5f 100755 --- a/download_test_data.sh +++ b/download_test_data.sh @@ -10,6 +10,16 @@ echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20230324_M echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20230324_M1_05106879.002_Y_1ES0806+524-W0.40+000.root" >> test_data_real.txt echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20230324_M2_05106879.001_Y_1ES0806+524-W0.40+000.root" >> test_data_real.txt echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20230324_M2_05106879.002_Y_1ES0806+524-W0.40+000.root" >> test_data_real.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_events.root" > test_data_real_missing_trees.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_drive.root" >> test_data_real_missing_trees.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_runh.root" >> test_data_real_missing_trees.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035_only_trigger.root" >> test_data_real_missing_trees.txt + +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root" > test_data_real_missing_prescaler_trigger.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.002_Y_CrabNebula-W0.40+035_no_prescaler_trigger.root" >> test_data_real_missing_prescaler_trigger.txt + +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.001_Y_CrabNebula-W0.40+035.root" > test_data_real_missing_arrays.txt +echo "https://www.magic.iac.es/mcp-testdata/test_data/real/calibrated/20210314_M1_05095172.002_Y_CrabNebula-W0.40+035_no_arrays.root" >> test_data_real_missing_arrays.txt echo "https://www.magic.iac.es/mcp-testdata/test_data/simulated/calibrated/GA_M1_za35to50_8_824318_Y_w0.root" > test_data_simulated.txt echo "https://www.magic.iac.es/mcp-testdata/test_data/simulated/calibrated/GA_M1_za35to50_8_824319_Y_w0.root" >> test_data_simulated.txt @@ -26,26 +36,26 @@ if [ -z "$TEST_DATA_PASSWORD" ]; then echo fi -if ! wget \ - -i test_data_real.txt \ - --user="$TEST_DATA_USER" \ - --password="$TEST_DATA_PASSWORD" \ - --no-check-certificate \ - --no-verbose \ - --timestamping \ - --directory-prefix=test_data/real/calibrated; then - echo "Problem in downloading the test data set (calibrated) for real data." -fi - -if ! wget \ - -i test_data_simulated.txt \ - --user="$TEST_DATA_USER" \ - --password="$TEST_DATA_PASSWORD" \ - --no-check-certificate \ - --no-verbose \ - --timestamping \ - --directory-prefix=test_data/simulated/calibrated; then - echo "Problem in downloading the test data set (calibrated) for simulated data." +declare -A TEST_FILES_DOWNLOAD + +TEST_FILES_DOWNLOAD[test_data_real]="test_data/real/calibrated" +TEST_FILES_DOWNLOAD[test_data_real_missing_trees]="test_data/real/calibrated/missing_trees" +TEST_FILES_DOWNLOAD[test_data_real_missing_prescaler_trigger]="test_data/real/calibrated/missing_prescaler_trigger" +TEST_FILES_DOWNLOAD[test_data_real_missing_arrays]="test_data/real/calibrated/missing_arrays" +TEST_FILES_DOWNLOAD[test_data_simulated]="test_data/simulated/calibrated" + +for key in "${!TEST_FILES_DOWNLOAD[@]}" +do + if ! wget \ + -i "${key}.txt" \ + --user="$TEST_DATA_USER" \ + --password="$TEST_DATA_PASSWORD" \ + --no-check-certificate \ + --no-verbose \ + --timestamping \ + --directory-prefix="${TEST_FILES_DOWNLOAD[${key}]}"; then + echo "Problem in downloading the test data set from ${key}.txt." fi +done -rm -f test_data_real.txt test_data_simulated.txt +rm -f test_data_real.txt test_data_simulated.txt test_data_real_missing_trees.txt test_data_real_missing_prescaler_trigger.txt test_data_real_missing_arrays.txt