Skip to content

Commit

Permalink
initial bare exception fix
Browse files Browse the repository at this point in the history
  • Loading branch information
alex-rakowski committed Nov 9, 2023
1 parent ea1e7ed commit 53e8796
Show file tree
Hide file tree
Showing 11 changed files with 53 additions and 34 deletions.
2 changes: 1 addition & 1 deletion py4DSTEM/braggvectors/diskdetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ def find_Bragg_disks(
mode = "dp"
elif data.ndim == 3:
mode = "dp_stack"
except:
except Exception:
er = f"entry {data} for `data` could not be parsed"
raise Exception(er)

Expand Down
42 changes: 26 additions & 16 deletions py4DSTEM/braggvectors/diskdetection_aiml.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,12 +103,12 @@ def find_Bragg_disks_aiml_single_DP(
"""
try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding")
try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand Down Expand Up @@ -256,8 +256,8 @@ def find_Bragg_disks_aiml_selected(

try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding")

assert len(Rx) == len(Ry)
peaks = []
Expand Down Expand Up @@ -433,8 +433,8 @@ def find_Bragg_disks_aiml_serial(

try:
import crystal4D
except:
raise ImportError("Import Error: Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError("Import Error: Please install crystal4D before proceeding")

# Make the peaks PointListArray
# dtype = [('qx',float),('qy',float),('intensity',float)]
Expand Down Expand Up @@ -643,8 +643,8 @@ def find_Bragg_disks_aiml(
"""
try:
import crystal4D
except:
raise ImportError("Please install crystal4D before proceeding")
except ModuleNotFoundError:
raise ModuleNotFoundError("Please install crystal4D before proceeding")

def _parse_distributed(distributed):
import os
Expand Down Expand Up @@ -840,7 +840,8 @@ def _integrate_disks(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1):
disks.append(np.average(disk))
try:
disks = disks / max(disks)
except:
# TODO work out what exception would go here
except Exception:
pass
return (maxima_x, maxima_y, disks)

Expand Down Expand Up @@ -878,8 +879,8 @@ def _get_latest_model(model_path=None):

try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand All @@ -891,8 +892,12 @@ def _get_latest_model(model_path=None):
if model_path is None:
try:
os.mkdir("./tmp")
except:
except FileExistsError:
pass
except Exception as e:
# TODO work out if I want to pass or raise
pass
# raise e
# download the json file with the meta data
download_file_from_google_drive("FCU-Net", "./tmp/model_metadata.json")
with open("./tmp/model_metadata.json") as f:
Expand All @@ -905,9 +910,13 @@ def _get_latest_model(model_path=None):
with open("./tmp/model_metadata_old.json") as f_old:
metaold = json.load(f_old)
file_id_old = metaold["file_id"]
except:
# TODO Double check this is correct Error
except FileNotFoundError:
file_id_old = file_id
except Exception:
file_id_old = file_id


if os.path.exists(file_path) and file_id == file_id_old:
print(
"Latest model weight is already available in the local system. Loading the model... \n"
Expand All @@ -921,7 +930,8 @@ def _get_latest_model(model_path=None):
download_file_from_google_drive(file_id, filename)
try:
shutil.unpack_archive(filename, "./tmp", format="zip")
except:
# TODO Work work what specific exception
except Exception:
pass
model_path = file_path
os.rename("./tmp/model_metadata.json", "./tmp/model_metadata_old.json")
Expand Down
10 changes: 6 additions & 4 deletions py4DSTEM/braggvectors/diskdetection_aiml_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@

try:
import tensorflow as tf
except:
raise ImportError(
except ModuleNotFoundError:
raise ModuleNotFoundError(
"Please install tensorflow before proceeding - please check "
+ "https://www.tensorflow.org/install"
+ "for more information"
Expand Down Expand Up @@ -637,7 +637,8 @@ def upsampled_correlation_cp(imageCorr, upsampleFactor, xyShift):
)
dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1])
dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0])
except:
# TODO I think this is just the Index Error
except IndexError or Exception:
dx, dy = (
0,
0,
Expand Down Expand Up @@ -733,6 +734,7 @@ def _integrate_disks_cp(DP, maxima_x, maxima_y, maxima_int, int_window_radius=1)
disks.append(np.average(disk))
try:
disks = disks / max(disks)
except:
# TODO work out what exception to use
except Exception:
pass
return (maxima_x, maxima_y, disks)
3 changes: 2 additions & 1 deletion py4DSTEM/braggvectors/diskdetection_cuda.py
Original file line number Diff line number Diff line change
Expand Up @@ -618,7 +618,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift):
dy = (icc[1, 2] - icc[1, 0]) / (
4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0]
)
except:
# TODO Work out what exception to use
except IndexError or Exception:
dx, dy = (
0,
0,
Expand Down
3 changes: 2 additions & 1 deletion py4DSTEM/datacube/virtualimage.py
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,8 @@ def position_detector(
try:
image = self.tree(k)
break
except:
# TODO check what error is produced when passing nonexistant key to tree
except KeyError or AttributeError:
pass
if image is None:
image = self[0, 0]
Expand Down
3 changes: 2 additions & 1 deletion py4DSTEM/io/filereaders/read_K2.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,8 @@ def __init__(
# this may be wrong for binned data... in which case the reader doesn't work anyway!
Q_Nx = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.height"]
Q_Ny = gtg.allTags[".SI Image Tags.Acquisition.Parameters.Detector.width"]
except:
# TODO check this is the correct error type
except ValueError:
print("Warning: diffraction pattern shape not detected!")
print("Assuming 1920x1792 as the diffraction pattern size!")
Q_Nx = 1792
Expand Down
3 changes: 2 additions & 1 deletion py4DSTEM/io/legacy/read_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,8 @@ def get_N_dataobjects(filepath, topgroup="4DSTEM_experiment"):
N_pla = len(f[topgroup]["data/pointlistarrays"].keys())
try:
N_coords = len(f[topgroup]["data/coordinates"].keys())
except:
# TODO work out what exception will be raised
except ValueError or AttributeError or Exception:
N_coords = 0
N_do = N_dc + N_cdc + N_ds + N_rs + N_pl + N_pla + N_coords
return N_dc, N_cdc, N_ds, N_rs, N_pl, N_pla, N_coords, N_do
2 changes: 1 addition & 1 deletion py4DSTEM/process/classification/featurization.py
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ def get_class_DPs(self, datacube, method, thresh):
datacube.data.shape[2],
datacube.data.shape[3],
)
except:
except ValueError:
raise ValueError(
"Datacube must have same R_Nx and R_Ny dimensions as Featurization instance."
)
Expand Down
11 changes: 6 additions & 5 deletions py4DSTEM/process/polar/polar_peaks.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,8 +602,8 @@ def refine_peaks_local(
self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step
self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step
self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step

except:
# TODO work out what error is raised
except Exception:
pass

else:
Expand Down Expand Up @@ -643,8 +643,8 @@ def refine_peaks_local(
self.peaks[rx, ry]["qr"][a0] = p0[2] / q_step
self.peaks[rx, ry]["sigma_annular"][a0] = p0[3] / t_step
self.peaks[rx, ry]["sigma_radial"][a0] = p0[4] / q_step

except:
# TODO work out what exception is raised
except Exception:
pass


Expand Down Expand Up @@ -1044,7 +1044,8 @@ def fit_image(basis, *coefs):
),
name="peaks_polar",
)
except:
# TODO work out what exception is raised
except Exception:
# if fitting has failed, we will still output the last iteration
# TODO - add a flag for unconverged fits
coefs_peaks = np.reshape(coefs_all[(3 * num_rings + 3) :], (5, num_peaks)).T
Expand Down
3 changes: 2 additions & 1 deletion py4DSTEM/process/utils/multicorr.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,8 @@ def upsampled_correlation(imageCorr, upsampleFactor, xyShift, device="cpu"):
)
dx = (icc[2, 1] - icc[0, 1]) / (4 * icc[1, 1] - 2 * icc[2, 1] - 2 * icc[0, 1])
dy = (icc[1, 2] - icc[1, 0]) / (4 * icc[1, 1] - 2 * icc[1, 2] - 2 * icc[1, 0])
except:
# TODO work out what error is raised
except IndexError or Exception:
dx, dy = (
0,
0,
Expand Down
5 changes: 3 additions & 2 deletions py4DSTEM/utils/configuration_checker.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def import_tester(m: str) -> bool:
# try and import the module
try:
importlib.import_module(m)
except:
except ModuleNotFoundError:
state = False

return state
Expand Down Expand Up @@ -391,7 +391,8 @@ def check_cupy_gpu(gratuitously_verbose: bool, **kwargs):
try:
d = cp.cuda.Device(i)
hasattr(d, "attributes")
except:
# TODO work out what error is raised
except AttributeError or Exception:
num_gpus_detected = i
break

Expand Down

0 comments on commit 53e8796

Please sign in to comment.