From 53e8796ceda985e11173e86333b3131b9d1f630d Mon Sep 17 00:00:00 2001 From: alex-rakowski Date: Thu, 9 Nov 2023 11:12:25 -0800 Subject: [PATCH] initial bare exception fix --- py4DSTEM/braggvectors/diskdetection.py | 2 +- py4DSTEM/braggvectors/diskdetection_aiml.py | 42 ++++++++++++------- .../braggvectors/diskdetection_aiml_cuda.py | 10 +++-- py4DSTEM/braggvectors/diskdetection_cuda.py | 3 +- py4DSTEM/datacube/virtualimage.py | 3 +- py4DSTEM/io/filereaders/read_K2.py | 3 +- py4DSTEM/io/legacy/read_utils.py | 3 +- .../process/classification/featurization.py | 2 +- py4DSTEM/process/polar/polar_peaks.py | 11 ++--- py4DSTEM/process/utils/multicorr.py | 3 +- py4DSTEM/utils/configuration_checker.py | 5 ++- 11 files changed, 53 insertions(+), 34 deletions(-) diff --git a/py4DSTEM/braggvectors/diskdetection.py b/py4DSTEM/braggvectors/diskdetection.py index e23b10a15..59a8a55e7 100644 --- a/py4DSTEM/braggvectors/diskdetection.py +++ b/py4DSTEM/braggvectors/diskdetection.py @@ -221,7 +221,7 @@ def find_Bragg_disks( mode = "dp" elif data.ndim == 3: mode = "dp_stack" - except: + except Exception: er = f"entry {data} for `data` could not be parsed" raise Exception(er) diff --git a/py4DSTEM/braggvectors/diskdetection_aiml.py b/py4DSTEM/braggvectors/diskdetection_aiml.py index 67df18074..4a59fd59e 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml.py @@ -103,12 +103,12 @@ def find_Bragg_disks_aiml_single_DP( """ try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") try: import tensorflow as tf - except: - raise ImportError( + except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -256,8 +256,8 @@ def find_Bragg_disks_aiml_selected( try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") assert len(Rx) == len(Ry) peaks = [] @@ -433,8 +433,8 @@ def find_Bragg_disks_aiml_serial( try: import crystal4D - except: - raise ImportError("Import Error: Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding") # Make the peaks PointListArray # dtype = [('qx',float),('qy',float),('intensity',float)] @@ -643,8 +643,8 @@ def find_Bragg_disks_aiml( """ try: import crystal4D - except: - raise ImportError("Please install crystal4D before proceeding") + except ModuleNotFoundError: + raise ModuleNotFoundError("Please install crystal4D before proceeding") def _parse_distributed(distributed): import os @@ -840,7 +840,8 @@ def _integrate_disks(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1): disks.append(np.average(disk)) try: disks = disks / max(disks) - except: + # TODO work out what exception would go here + except Exception: pass return (maxima_x, maxima_y, disks) @@ -878,8 +879,8 @@ def _get_latest_model(model_path=None): try: import tensorflow as tf - except: - raise ImportError( + except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -891,8 +892,12 @@ def _get_latest_model(model_path=None): if model_path is None: try: os.mkdir("./tmp") - except: + except FileExistsError: pass + except Exception as e: + # TODO work out if I want to pass or raise + pass + # raise e # download the json file with the meta data download_file_from_google_drive("FCU-Net", "./tmp/model_metadata.json") with open("./tmp/model_metadata.json") as f: @@ -905,9 +910,13 @@ def _get_latest_model(model_path=None): with open("./tmp/model_metadata_old.json") as f_old: metaold = json.load(f_old) file_id_old = metaold["file_id"] - except: + # TODO Double check this is correct Error + except FileNotFoundError: + file_id_old = file_id + except Exception: file_id_old = file_id + if os.path.exists(file_path) and file_id == file_id_old: print( "Latest model weight is already available in the local system. Loading the model... \n" @@ -921,7 +930,8 @@ def _get_latest_model(model_path=None): download_file_from_google_drive(file_id, filename) try: shutil.unpack_archive(filename, "./tmp", format="zip") - except: + # TODO Work work what specific exception + except Exception: pass model_path = file_path os.rename("./tmp/model_metadata.json", "./tmp/model_metadata_old.json") diff --git a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py index c5f89b9fd..bbe0d37d4 100644 --- a/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_aiml_cuda.py @@ -22,8 +22,8 @@ try: import tensorflow as tf -except: - raise ImportError( +except ModuleNotFoundError: + raise ModuleNotFoundError( "Please install tensorflow before proceeding - please check " + "https://www.tensorflow.org/install" + "for more information" @@ -637,7 +637,8 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - except: + # TODO I think this is just the Index Error + except IndexError or Exception: dx, dy = ( 0, 0, @@ -733,6 +734,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1) disks.append(np.average(disk)) try: disks = disks / max(disks) - except: + # TODO work out what exception to use + except Exception: pass return (maxima_x, maxima_y, disks) diff --git a/py4DSTEM/braggvectors/diskdetection_cuda.py b/py4DSTEM/braggvectors/diskdetection_cuda.py index 4bbb7f488..55e782028 100644 --- a/py4DSTEM/braggvectors/diskdetection_cuda.py +++ b/py4DSTEM/braggvectors/diskdetection_cuda.py @@ -618,7 +618,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift): dy = (icc[1, 2] - icc[1, 0]) / ( 4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0] ) - except: + # TODO Work out what exception to use + except IndexError or Exception: dx, dy = ( 0, 0, diff --git a/py4DSTEM/datacube/virtualimage.py b/py4DSTEM/datacube/virtualimage.py index 50a297914..51415e9fb 100644 --- a/py4DSTEM/datacube/virtualimage.py +++ b/py4DSTEM/datacube/virtualimage.py @@ -414,7 +414,8 @@ def position_detector( try: image = self.tree(k) break - except: + # TODO check what error is produced when passing nonexistant key to tree + except KeyError or AttributeError: pass if image is None: image = self[0, 0] diff --git a/py4DSTEM/io/filereaders/read_K2.py b/py4DSTEM/io/filereaders/read_K2.py index 61405a437..d316a5e7b 100644 --- a/py4DSTEM/io/filereaders/read_K2.py +++ b/py4DSTEM/io/filereaders/read_K2.py @@ -124,7 +124,8 @@ def __init__( # this may be wrong for binned data... in which case the reader doesn't work anyway! Q_Nx = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.height"] Q_Ny = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.width"] - except: + # TODO check this is the correct error type + except ValueError: print("Warning: diffraction pattern shape not detected!") print("Assuming 1920x1792 as the diffraction pattern size!") Q_Nx = 1792 diff --git a/py4DSTEM/io/legacy/read_utils.py b/py4DSTEM/io/legacy/read_utils.py index 7cd48cde7..27ee5cb7a 100644 --- a/py4DSTEM/io/legacy/read_utils.py +++ b/py4DSTEM/io/legacy/read_utils.py @@ -100,7 +100,8 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"): N_pla = len(f[topgroup]["data/pointlistarrays"].keys()) try: N_coords = len(f[topgroup]["data/coordinates"].keys()) - except: + # TODO work out what exception will be raised + except ValueError or AttributeError or Exception: N_coords = 0 N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords return N_dc, N_cdc, N_ds, N_rs, N_pl, N_pla, N_coords, N_do diff --git a/py4DSTEM/process/classification/featurization.py b/py4DSTEM/process/classification/featurization.py index 38b4e1412..26a63a62d 100644 --- a/py4DSTEM/process/classification/featurization.py +++ b/py4DSTEM/process/classification/featurization.py @@ -477,7 +477,7 @@ def get_class_DPs(self, datacube, method, thresh): datacube.data.shape[2], datacube.data.shape[3], ) - except: + except ValueError: raise ValueError( "Datacube must have same R_Nx and R_Ny dimensions as Featurization instance." ) diff --git a/py4DSTEM/process/polar/polar_peaks.py b/py4DSTEM/process/polar/polar_peaks.py index be9ae989e..7cc453544 100644 --- a/py4DSTEM/process/polar/polar_peaks.py +++ b/py4DSTEM/process/polar/polar_peaks.py @@ -602,8 +602,8 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - - except: + # TODO work out what error is raised + except Exception: pass else: @@ -643,8 +643,8 @@ def refine_peaks_local( self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step - - except: + # TODO work out what exception is raised + except Exception: pass @@ -1044,7 +1044,8 @@ def fit_image(basis, *coefs): ), name="peaks_polar", ) - except: + # TODO work out what exception is raised + except Exception: # if fitting has failed, we will still output the last iteration # TODO - add a flag for unconverged fits coefs_peaks = np.reshape(coefs_all[(3 * num_rings + 3) :], (5, num_peaks)).T diff --git a/py4DSTEM/process/utils/multicorr.py b/py4DSTEM/process/utils/multicorr.py index bc07390bb..6760407a6 100644 --- a/py4DSTEM/process/utils/multicorr.py +++ b/py4DSTEM/process/utils/multicorr.py @@ -99,7 +99,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift, device="cpu"): ) dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1]) dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]) - except: + # TODO work out what error is raised + except IndexError or Exception: dx, dy = ( 0, 0, diff --git a/py4DSTEM/utils/configuration_checker.py b/py4DSTEM/utils/configuration_checker.py index 26b0b89d5..c1bb98c47 100644 --- a/py4DSTEM/utils/configuration_checker.py +++ b/py4DSTEM/utils/configuration_checker.py @@ -304,7 +304,7 @@ def import_tester(m: str) -> bool: # try and import the module try: importlib.import_module(m) - except: + except ModuleNotFoundError: state = False return state @@ -391,7 +391,8 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs): try: d = cp.cuda.Device(i) hasattr(d, "attributes") - except: + # TODO work out what error is raised + except AttributeError or Exception: num_gpus_detected = i break