diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..c941c52 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 Maastricht University + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md new file mode 100644 index 0000000..23af8c0 --- /dev/null +++ b/README.md @@ -0,0 +1,6 @@ +## H5 file format description + +* `/pixels` +* `/trajectories/0-n` +* `/predictions/*` +* `/incidents` \ No newline at end of file diff --git a/browser.py b/browser.py new file mode 100644 index 0000000..27e1d16 --- /dev/null +++ b/browser.py @@ -0,0 +1,229 @@ +import sys +import h5py +import tkinter as Tk +from matplotlib.backends.backend_tkagg import ( + FigureCanvasTkAgg, NavigationToolbar2Tk +) +import matplotlib.pyplot as plt +# from keras.models import load_model + +from trajectories import plot_3dtrajectory +from pixels import plot_pixels + + +# from deeplearning import VisualiseConvLayer + +def _quit(): + root.quit() + root.destroy() + + +def go(): + global slider, index_val + + slider.set(int(index_val.get())) + + +def set_plot(i): + global idx, slider, index_val + + index_val.set(i) + idx = int(i) + show_plots(idx) + + +def prev_plot(): + global idx, slider + + idx = idx - 1 + slider.set(idx) + + +def next_plot(): + global idx, slider + + idx = idx + 1 + slider.set(idx) + + +def get_pixel(i): + global pixels + + if pixels is None: + return None + + return pixels[i] + + +def get_incident(i): + global incidents + + if incidents is None: + return None + + return incidents[i] + + +def get_edges(i): + global edges + + if edges is None: + return None + + return edges[i] + + +def get_trajectory(i): + global trajectories + + if trajectories is None: + return None + + return trajectories[str(i)][()] + + +def get_prediction(i): + global predictions + + if predictions is None: + return None + + pred = dict() + for label in predictions: + pred[label] = predictions[label][i] + + return pred + + +# def get_actv(model, i, pixel): +# weights = VisualiseConvLayer.getWeightsLayer(model, 0) +# actvs = VisualiseConvLayer.get_activations(model, pixel.reshape(1, 2, 10, 10), layer_name='separable_conv2d_1') +# +# return weights, actvs + +def show_plots(i): + global canvas, canvas_pix, sensor_height + + pixel = get_pixel(i) + trajectory = get_trajectory(i) + prediction = get_prediction(i) + incident = get_incident(i) + # weights, actvs = get_actv(i, pixel) + edges = get_edges(i) + + if trajectory is not None: + traj_fig.clear() + plot_3dtrajectory.plot(traj_fig, trajectory, sensor_height) + canvas.draw() + + if pixel is not None: + # Get pixels + pix_fig.clear() + plot_pixels.plot(pix_fig, pixel, incident, prediction, edges) + canvas_pix.draw() + + # if actvs is not None: + # # Get pixels + # traj_fig.clear() + # VisualiseConvLayer.display_activations(actvs, weights, traj_fig) + # canvas.draw() + + +# File +filename = sys.argv[1] +f = h5py.File(filename, 'r') + +pixels, predictions, trajectories, incidents, edges = None, None, None, None, None +if 'clusters' in f: + pixels = f['clusters'][()] +if 'trajectories' in f: + trajectories = f['trajectories'] +if 'predictions' in f: + predictions = f['predictions'] +if 'incidents' in f: + incidents = f['incidents'][()] +if 'edges' in f: + edges = f['edges'][()] + +sensor_height = f.attrs['sensor_height'] if 'sensor_height' in f.attrs else 300000 + +# Setup Tk +root = Tk.Tk() +root.wm_title("Trajectory Browser") +graphs = Tk.Frame(root) + +# Setup info screen + +info = Tk.Frame(graphs) +info.grid(row=0, column=0, sticky='N') +Tk.Label(master=info, text="Information:", height=2).pack(side=Tk.TOP) + +attrs = f.attrs +Tk.Label(master=info, text="Sensor height: %s" % attrs.get('sensor_height', 'N/A'), anchor='w').pack(side=Tk.TOP) +Tk.Label(master=info, text="Sensor material: %s" % attrs.get('sensor_material', 'N/A'), anchor='w').pack(side=Tk.TOP, + fill='both') +Tk.Label(master=info, text="Beam energy: %s" % attrs.get('beam_energy', 'N/A'), anchor='w').pack(side=Tk.TOP, + fill='both') +Tk.Label(master=info, text="Source: %s" % attrs.get('data_source', 'N/A'), anchor='w').pack(side=Tk.TOP, fill='both') + +# Setup graphs + +# actv_fig = plt.figure() +# canvas_actv = FigureCanvasTkAgg(actv_fig, master=graphs) +# canvas_actv.draw() +# toolbar_frame_actv = Tk.Frame(graphs) +# toolbar = NavigationToolbar2Tk(canvas_actv, toolbar_frame_actv) +# toolbar.update() +# canvas_actv.get_tk_widget().grid(row=0, column=3) +# toolbar_frame_actv.grid(row=1, column=3, sticky=Tk.W) + +traj_fig = plt.figure() +canvas = FigureCanvasTkAgg(traj_fig, master=graphs) +canvas.draw() +toolbar_frame_traj = Tk.Frame(graphs) +toolbar = NavigationToolbar2Tk(canvas, toolbar_frame_traj) +toolbar.update() +canvas.get_tk_widget().grid(row=0, column=2) +toolbar_frame_traj.grid(row=1, column=2, sticky=Tk.W) + +pix_fig = plt.figure() +canvas_pix = FigureCanvasTkAgg(pix_fig, master=graphs) +canvas_pix.draw() +toolbar_frame_pix = Tk.Frame(graphs) +toolbar = NavigationToolbar2Tk(canvas_pix, toolbar_frame_pix) +toolbar.update() +canvas_pix.get_tk_widget().grid(row=0, column=1) +toolbar_frame_pix.grid(row=1, column=1, sticky=Tk.W) + +# Controls +controls = Tk.Frame(root) + +index_val = Tk.StringVar() +index = Tk.Entry(master=controls, textvariable=index_val).grid(row=0, column=2, sticky='S') +go = Tk.Button(master=controls, text='Go', command=go).grid(row=0, column=3, sticky='S') +next = Tk.Button(master=controls, text='Prev', command=prev_plot).grid(row=1, column=1, sticky='S') +slider = Tk.Scale( + master=controls, + from_=0, to=f['clusters'].shape[0], + orient=Tk.HORIZONTAL, length=400, + command=set_plot, + showvalue=0 +) +slider.grid(row=1, column=2) +prev = Tk.Button(master=controls, text='Next', command=next_plot).grid(row=1, column=3, sticky='S') + +# Tk final setup +root.protocol("WM_DELETE_WINDOW", _quit) + +graphs.pack(side=Tk.TOP) +controls.pack(side=Tk.BOTTOM) + +if 2 in sys.argv and int(sys.argv[2]) > 0: + idx = int(sys.argv[2]) +else: + idx = 0 + +set_plot(idx) +slider.set(idx) + +Tk.mainloop() diff --git a/deeplearning/testing.py b/deeplearning/testing.py new file mode 100644 index 0000000..b83ba2a --- /dev/null +++ b/deeplearning/testing.py @@ -0,0 +1,130 @@ +import argparse +import os +import sys + +import h5py +import numpy as np +from keras.models import load_model + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) +from lib.constants import * + + +def parse_arguments(): + parser = argparse.ArgumentParser( + description=__doc__, # printed with -h/--help + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument('FILE', help="Input .h5 dataset") + parser.add_argument("--model", metavar='FILE', help="Path to model") + parser.add_argument("--tot", default=False, action='store_true', help="Predict on only ToT") + parser.add_argument("--toa", default=False, action='store_true', help="Predict on only ToA") + parser.add_argument("--name", default='CNN', help="Name of prediction to use") + parser.add_argument("--experimental", default=False, action='store_true', + help='set to true to use experimental data') + + settings = parser.parse_args() + + return settings + + +def predict(predictpath, model_path, tot, toa, predic): + f = h5py.File(predictpath, "a") + pixels = f['clusters'][()] + n = len(pixels) + + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + x_test, y_test = np.zeros((n, shape, n_pixels, n_pixels)), np.zeros((n, 2)) + + for i in range(0, n): + if not toa and not tot: + x_test[i, 0] = pixels[i, 0][0:n_pixels, 0:n_pixels] + x_test[i, 1] = np.nan_to_num(pixels[i, 1])[0:n_pixels, 0:n_pixels] + elif toa: + x_test[i, 0] = np.nan_to_num(pixels[i, 1])[0:n_pixels, 0:n_pixels] + elif tot: + x_test[i, 0] = pixels[i, 0][0:n_pixels, 0:n_pixels] + + model = load_model(model_path) + + # keras.utils.plot_model(model, to_file='test.ps', show_shapes=True, rankdir='TB') + # exit(0) + + pred = model.predict(x_test, batch_size=n, verbose=1) + + if not f.__contains__("predictions"): + predictions = f.create_group("predictions") + else: + predictions = f["predictions"] + + pred = pred * 55000 + + if predictions.__contains__(predic): + del predictions[predic] + + predictions.create_dataset(predic, data=pred) + + +def predict3(predictpath, model_path, tot, toa, predic): + f = h5py.File(predictpath, "a") + pixels = f['clusters'][()] + edges = f['edges'][()] + n = len(pixels) + + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + x_test, y_test = np.zeros((n, shape, n_pixels, n_pixels)), np.zeros((n, 2)) + + for i in range(0, n): + if not toa and not tot: + x_test[i, 0] = pixels[i, 0][0:n_pixels, 0:n_pixels] + x_test[i, 1] = np.nan_to_num(pixels[i, 1])[0:n_pixels, 0:n_pixels] + elif toa: + x_test[i, 0] = np.nan_to_num(pixels[i, 1])[0:n_pixels, 0:n_pixels] + elif tot: + x_test[i, 0] = pixels[i, 0][0:n_pixels, 0:n_pixels] + + model = load_model(model_path, custom_objects={"loss_truth_mask": loss_truth_mask}) + + pred = model.predict(x_test, batch_size=1000, verbose=1) + + if not f.__contains__("predictions"): + predictions = f.create_group("predictions") + + else: + predictions = f["predictions"] + + pred = pred * 55000 + + if predictions.__contains__(predic): + del predictions[predic] + + predictions.create_dataset(predic, data=pred) + + +def main(): + config = parse_arguments() + + if config.experimental: + predict3(config.FILE, config.model, config.tot, config.toa, config.name) + else: + predict(config.FILE, config.model, config.tot, config.toa, config.name) + + +if __name__ == "__main__": + try: + sys.exit(main()) + except KeyboardInterrupt: + sys.exit(0) diff --git a/deeplearning/training.py b/deeplearning/training.py new file mode 100644 index 0000000..682f9b4 --- /dev/null +++ b/deeplearning/training.py @@ -0,0 +1,201 @@ +import argparse +import os +import sys + +import keras +from keras.layers import Dense, Dropout, Flatten, SeparableConv2D +from keras.models import Sequential, load_model +from keras.optimizers import Adam +import h5py +import numpy as np +import sys + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) +from lib.constants import * + + + +def parse_arguments(): + parser = argparse.ArgumentParser( + description=__doc__, # printed with -h/--help + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument('FILE', help="Input .h5 dataset") + parser.add_argument("-e", "--epochs", default=200, metavar='N', type=int, help="Number of epochs") + parser.add_argument("-b", "--batch_size", default=1000, metavar='N', help="Batch size for training") + parser.add_argument("--model", metavar='FILE', help="Path to model") + parser.add_argument("--tot", default=False, action='store_true', help="Train on only ToT") + parser.add_argument("--toa", default=False, action='store_true', help="Train on only ToA") + parser.add_argument("--append_model", default=False, action='store_true', help="Continue training model") + parser.add_argument("--dropout", default=0.25, help="Dropout") + parser.add_argument("--dense_size", default=512, help="Dense size") + parser.add_argument("--train_rate", default=0.7, metavar='N', help="Percentage of training/test split") + parser.add_argument("--classic", default=False, action='store_true', help='set to true for using classic NN') + parser.add_argument("--minimal_cnn", default=False, action='store_true', help='set to true to use minimal CNN') + settings = parser.parse_args() + + return settings + + +def new_model(dropout, dense_size, tot, toa, path, loss="logcosh"): + + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + model = Sequential() + model.add(SeparableConv2D(64, (2, 2), padding='same', input_shape=(shape, n_pixels, n_pixels), + data_format="channels_first", activation="relu")) + model.add(Dropout(dropout)) + model.add(Flatten()) + model.add(Dense(dense_size, activation="relu", kernel_initializer="uniform")) + model.add(Dropout(dropout)) + model.add(Dense(dense_size, activation="relu", kernel_initializer="uniform")) + model.add(Dropout(dropout)) + model.add(Dense(dense_size, activation="relu", kernel_initializer="uniform")) + model.add(Dropout(dropout)) + model.add(Dense(2, activation="relu")) + adam = Adam(lr=0.001) + model.compile(loss=loss, optimizer=adam, metrics=['accuracy', 'mse']) + model.save(path) + + +def minimal_cnn_model(dropout, dense_size, tot, toa, path, loss="logcosh"): + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + model = Sequential() + model.add(SeparableConv2D(64, (2, 2), padding='same', input_shape=(shape, n_pixels, n_pixels), + data_format="channels_first", activation="relu")) + model.add(Dropout(dropout)) + model.add(Flatten()) + model.add(Dense(2, activation="relu")) + + adam = Adam(lr=0.001) + model.compile(loss=loss, optimizer=adam, metrics=['accuracy', 'mse']) + model.save(path) + + +def classic_model(dropout, dense_size, tot, toa, path, loss="logcosh"): + model = Sequential() + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + model.add(Dense(256, activation="relu", input_shape=(shape, n_pixels, n_pixels))) + model.add(Flatten()) + model.add(Dense(128, activation="relu")) + model.add(Dense(64, activation="relu")) + model.add(Dense(32, activation="relu")) + model.add(Dense(16, activation="relu")) + model.add(Dense(8, activation="relu")) + model.add(Dense(4, activation="relu")) + model.add(Dense(2, activation="relu")) + adam = Adam(lr=0.001) + + model.compile(loss=loss, optimizer=adam, metrics=['accuracy', 'mse']) + model.save(path) + + +def train_data(pixels, coords, tot, toa, train_rate): + if tot: + shape = 1 + elif toa: + shape = 1 + else: + shape = 2 + + # Replacing the nans + pixels = np.nan_to_num(pixels) + + n = len(pixels) + indexList = range(n) + train_indices = np.random.choice(n, size=int(float(train_rate) * n), replace=False) + test_indices = list(set(indexList).difference(train_indices)) + + x_train, y_train = np.zeros((len(train_indices), shape, n_pixels, n_pixels)), np.zeros((len(train_indices), 2)) + x_test, y_test = np.zeros((len(test_indices), shape, n_pixels, n_pixels)), np.zeros((len(test_indices), 2)) + + if tot: + x_train[:, 0] = pixels[train_indices, 0] + elif toa: + x_train[:, 0] = pixels[train_indices, 1] + else: + x_train[:, 0] = pixels[train_indices, 0] + x_train[:, 1] = pixels[train_indices, 1] + + y_train[:, 0] = coords[train_indices, 0] / pixel_size + y_train[:, 1] = coords[train_indices, 1] / pixel_size + + if tot: + x_test[:, 0] = pixels[test_indices, 0] + elif toa: + x_test[:, 0] = pixels[test_indices, 1] + else: + x_test[:, 0] = pixels[test_indices, 0] + x_test[:, 1] = pixels[test_indices, 1] + + y_test[:, 0] = coords[test_indices, 0] / pixel_size + y_test[:, 1] = coords[test_indices, 1] / pixel_size + + return x_train, y_train, x_test, y_test + + +def train(training_path, epochs, batch_size, tot, toa, model_path, train_rate): + model = load_model(model_path) + f = h5py.File(training_path, "r") + pix = f['clusters'][()] + coords = f['incidents'][()] + + x_train, y_train, x_test, y_test = train_data(pix, coords, tot, toa, train_rate) + name = os.path.splitext(os.path.basename(model_path))[0] + + history = keras.callbacks.TensorBoard( + log_dir="./tb/%s" % name, histogram_freq=25, batch_size=32, write_graph=True, write_grads=False, + write_images=False, + embeddings_freq=0, embeddings_layer_names=None, embeddings_metadata=None + ) + + model.fit(x_train, y_train, batch_size=batch_size, + epochs=epochs, + validation_data=(x_test, y_test), + shuffle=True, + # callbacks=[history] + ) + + score = model.evaluate(x_test, y_test, batch_size=200) + + model.save(model_path) + model.summary() + + +def main(): + config = parse_arguments() + + if not config.append_model: + if config.classic: + classic_model(config.dropout, config.dense_size, config.tot, config.toa, config.model) + elif config.minimal_cnn: + minimal_cnn_model(config.dropout, config.dense_size, config.tot, config.toa, config.model) + else: + new_model(config.dropout, config.dense_size, config.tot, config.toa, config.model) + + train(config.FILE, config.epochs, config.batch_size, config.tot, config.toa, config.model, config.train_rate) + + +if __name__ == "__main__": + try: + sys.exit(main()) + except KeyboardInterrupt: + sys.exit(0) diff --git a/grid-foils/find-spots.py b/grid-foils/find-spots.py new file mode 100644 index 0000000..af9442a --- /dev/null +++ b/grid-foils/find-spots.py @@ -0,0 +1,70 @@ +import sys +import numpy as np +from PIL import Image +from scipy import fftpack +import matplotlib.pyplot as plt +import matplotlib.colors as colors +import scipy.ndimage.filters as filters +import scipy.ndimage as ndimage + +img_file = sys.argv[1] + +# Read image +src_im = Image.open(img_file) +frame = np.array(src_im) + +############## +# FFT of image +# uint8 max (255) +i8 = np.iinfo(np.uint8) + +# This zero pads the image to 1024x1024, this helps the FFT transform and is also done by ImageJ +frame_padded = np.pad(frame, ((0, 1024-frame.shape[0]), (0, 1024-frame.shape[1])), 'constant') + +# Take the fourier transform of the image. +F1 = fftpack.fft2(frame_padded) +# Now shift the quadrants around so that low spatial frequencies are in +# the center of the 2D fourier transformed image. +F2 = fftpack.fftshift(F1) +# Calculate a 2D power spectrum +psd2D = np.abs(F2) ** 2 +# Take the 10 log +ps = np.log10(psd2D) + +# Fit in uint8 (this is what ImageJ also does) +ps = i8.max * ps.astype(np.float64) / ps.max() + +##################### +# Find suitable spots +neighborhood_size = 15 +threshold = 80 + +data_max = filters.maximum_filter(ps, neighborhood_size) +maxima = (ps == data_max) +data_min = filters.minimum_filter(ps, neighborhood_size) +diff = ((data_max - data_min) > threshold) +maxima[diff == 0] = 0 + +labeled, num_objects = ndimage.label(maxima) +xy = np.array(ndimage.center_of_mass(ps, labeled, range(1, num_objects + 1))) + +# Apply some filtering to get best spots +xy_filt = xy[np.bitwise_and(xy[:, 1] < 1000, xy[:, 1] > 550)] + +# Take a random but uniform sample +choices = xy_filt[np.random.choice(len(xy_filt), size=50, replace=False)] +np.set_printoptions(linewidth=np.inf) + +print("np.array([ ") +for choice in choices: + print(" [%d, %d], " % (choice[0], choice[1])) +print("])") + + +fig, axes = plt.subplots(nrows=1) +axes.imshow(ps, vmin=ps.min(), vmax=ps.max()) +axes.plot(xy_filt[:, 1], xy_filt[:, 0], 'ro') +axes.plot(choices[:, 1], choices[:, 0], 'go') + + +plt.show() diff --git a/grid-foils/prep-image.py b/grid-foils/prep-image.py new file mode 100644 index 0000000..a81a1ae --- /dev/null +++ b/grid-foils/prep-image.py @@ -0,0 +1,119 @@ +import PIL +import sys +import numpy as np +import os +from PIL import Image, ImageFont +from PIL import ImageDraw +from scipy import fftpack +import spot_locations + +if sys.argv[1] == "300": + spots = spot_locations.spots_300 +else: + spots = spot_locations.spots_200 + +img_file = sys.argv[2] +base = os.path.basename(img_file) +img_file_no_ext = os.path.splitext(base)[0] + +# uint8 max (255) +i8 = np.iinfo(np.uint8) + +# Read image +src_im = Image.open(img_file) +frame = np.array(src_im) + +################## +# Convert to uint8 +if frame.max() > i8.max: + print("WARNING: Cannot fit in uint8. Scaling values to uint8 max.") + data = i8.max * frame.astype(np.float64) / frame.max() + frame_uint8 = data.astype(np.uint8) +else: + frame_uint8 = frame.astype(dtype=np.uint8) + +# Save as uint8 image +im_real = Image.fromarray(frame_uint8) +# im.save(img_file_no_ext + '-uint8.tif') + +############## +# FFT of image + +# This zero pads the image to 1024x1024, this helps the FFT transform and is also done by ImageJ +frame_padded = np.pad(frame, ((0, 1024-frame.shape[0]), (0, 1024-frame.shape[1])), 'constant') + +# Take the fourier transform of the image. +F1 = fftpack.fft2(frame_padded) +# Now shift the quadrants around so that low spatial frequencies are in +# the center of the 2D fourier transformed image. +F2 = fftpack.fftshift(F1) +# Calculate a 2D power spectrum +psd2D = np.abs(F2) ** 2 +# Take the 10 log +ps = np.log10(psd2D) + +# Fit in uint8 (this is what ImageJ also does) +ps = i8.max * ps.astype(np.float64) / ps.max() + +# Invert the image. To make +ps = i8.max - ps + +# Threshold FFT +min1 = np.percentile(ps, 1) +max1 = np.percentile(ps, 9) +print("Min: %d" % min1) +print("Max: %d" % max1) +ps = np.clip(ps, min1, max1) + +# Rescale values +ps = np.interp(ps, (min1, max1), (0, i8.max)) + +im_fft = Image.fromarray(ps.astype(dtype=np.uint8)) + +################# +# Put all images together +margin = 2 +half = 258 +full = 516 + +# This RESIZES (or bins) the FFT to make it APPEAR to be the same size as the image +im_fft = im_fft.resize((516, 516), PIL.Image.NEAREST) +#im_fft.save(img_file_no_ext + '-fft.tif') + +# Create white image +new_im = Image.new('L', (full + margin, full)) +draw = ImageDraw.Draw(new_im) +draw.rectangle([(0, 0), new_im.size], fill=255) + +# Take insert +insert = im_fft.resize((258, 256), PIL.Image.NEAREST, (half + 130, half + 130, half + 130 + 64, half + 130 + 64)) + +# Draw spots +draw = ImageDraw.Draw(im_fft) +# Recalculate spot positions after resizing +spots = spots * 516/1024 +for spot in spots: + draw.ellipse((spot[1] - 5, spot[0] - 5, spot[1] + 5, spot[0] + 5), outline=0) + +# Draw insert +draw = ImageDraw.Draw(im_fft) +draw.rectangle([(130 + half, 130 + half), (130 + half + 64, 130 + half + 64)], outline=(0)) + +# Crops +im_fft_crop = im_fft.crop((half, 0, full, full)) +im_real_crop = im_real.crop((0, 0, half, half)) + +# Combine +new_im.paste(im_fft_crop, (half + margin, 0, full + margin, full)) +new_im.paste(im_real_crop, (0, 0, half, half)) +new_im.paste(insert, (0, half + margin, half, full)) + +# Text +fnt = ImageFont.truetype('Pillow/Tests/fonts/FreeMono.ttf', 20) +d = ImageDraw.Draw(new_im) +#d.text((5, 5), "real", font=fnt, fill=128) +d.text((half + 5, 5), "fft", font=fnt, fill=0) +d.text((5, half + 5), "zoom", font=fnt, fill=0) + +new_im.save(img_file_no_ext + '-prepped.tif') +# new_im.show() diff --git a/grid-foils/spot_locations.py b/grid-foils/spot_locations.py new file mode 100644 index 0000000..04b54ff --- /dev/null +++ b/grid-foils/spot_locations.py @@ -0,0 +1,190 @@ +import numpy as np + +# 300kv 20190812_eric GrateAU_50Mhit_THL100_THC6_300kV_iKrum10_000005 +spots_300 = np.array([ + [368, 744], + # [110, 746], + [295, 625], + [824, 577], + # [371, 836], + # [932, 968], + [871, 666], + [852, 580], + [581, 659], + [422, 779], + [747, 735], + [695, 923], + [795, 573], + [186, 557], + [623, 553], + [355, 855], + [559, 601], + [459, 951], + [235, 646], + [212, 838], + [499, 622], + [324, 629], + [275, 790], + [781, 683], + [448, 560], + [724, 676], + [674, 615], + # [512, 885], + [444, 837], + # [512, 960], + [884, 556], + [88, 656], + [463, 924], + [521, 680], + [311, 738], + [230, 924], + [607, 690], + [591, 577], + [617, 608], + [486, 731], + [584, 881], + [622, 803], + [498, 871], + [568, 769], + [673, 865], + [482, 759], + [467, 646], + [518, 707], + [85, 794], + [562, 824], +]) + +# 200kv - 20190826 - foil_000001.tpx3 +spots_200 = np.array([ + [552, 875], + [113, 763], + [779, 696], + # [780, 771], + [508, 752], + [451, 601], + [479, 691], + [447, 751], + # [836, 846], + [122, 566], + [298, 809], + [418, 811], + [90, 656], + [271, 568], + [391, 570], + [538, 753], + [268, 809], + [747, 816], + [630, 634], + [806, 877], + [1020, 580], + [211, 628], + [329, 689], + [731, 939], + [691, 605], + # [723, 799], + [541, 603], + [62, 565], + [270, 689], + [570, 663], + # [881, 767], + [900, 638], + [211, 598], + # [83, 792], + [177, 866], + [297, 839], + [117, 836], + [390, 630], + [656, 934], + [717, 815], + [419, 751], + [751, 576], + [449, 691], + [59, 746], + [718, 755], + # [1021, 895], + [239, 748], + [477, 872], + [781, 576], + [777, 817], +]) + +# 200kv - ultra_au_225x - foil_000001.tpx3 +# spots = np.array([ +# [262, 292], +# [350, 298], +# [286, 340], +# [261, 412], +# [134, 496], +# [451, 285], +# [405, 462], +# [434, 424], +# [277, 373], +# [76, 452], +# [289, 493], +# [478, 367], +# [304, 475], +# [511, 360], +# [227, 416], +# [203, 368], +# [214, 315], +# [437, 304], +# [54, 284], +# [280, 426], +# [39, 303], +# [465, 386], +# #[401, 497], +# [199, 334], +# [57, 438], +# [299, 441], +# [271, 359], +# [52, 404], +# [67, 385], +# [338, 471], +# [148, 478], +# [344, 384], +# [86, 400], +# [12, 343], +# [154, 391], +# [174, 406], +# [301, 321], +# [403, 308], +# [267, 325], +# [348, 418], +# [182, 473], +# [146, 324], +# [88, 280], +# [410, 375], +# [73, 299], +# [282, 306], +# [127, 309], +# [369, 312], +# [58, 318], +# ]) +# 300kv - 20180605_eric - TOA-TOT-norm_100_00THL_20s_000000.tpx3 +# spots = np.array([ +# [135, 366], +# [361, 374], +# [283, 396], +# [297, 302], +# [187, 393], +# [129, 378], +# [264, 323], +# [90, 327], +# [283, 365], +# [103, 427], +# [309, 410], +# # [445, 278], +# [148, 371], +# [245, 277], +# [265, 301], +# [44, 350], +# [238, 388], +# [239, 295], +# [297, 284], +# [277, 409], +# [219, 331], +# [232, 432], +# [64, 344], +# [419, 358], +# [437, 384], +# ]) \ No newline at end of file diff --git a/grid-foils/spots_comparison.py b/grid-foils/spots_comparison.py new file mode 100644 index 0000000..e4c3644 --- /dev/null +++ b/grid-foils/spots_comparison.py @@ -0,0 +1,80 @@ +import sys +import numpy as np +from PIL import Image +from scipy import fftpack +import matplotlib.pyplot as plt +import spot_locations + +plt.rcParams.update({ + "font.size": 12, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + +if sys.argv[1] == "300": + spots = spot_locations.spots_300 +else: + spots = spot_locations.spots_200 + +frames = list() + +for idx, filename in enumerate(sys.argv[2:4]): + # Read image + src_im = Image.open(filename) + frame = np.array(src_im) + + if idx > 0: + # Normalize counts in image to the square of the counts + print(np.sum(frames[0]) / np.sum(frame)) + frame = frame * ( np.sum(frames[0] ) / np.sum(frame)) + + frames.append(frame) + + +def img_spot_values(frame, s): + # This zero pads the image to 1024x1024, this helps the FFT transform and is also done by ImageJ + frame_padded = np.pad(frame, ((0, 1024 - frame.shape[0]), (0, 1024 - frame.shape[1])), 'constant') + + # Take the fourier transform of the image. + F1 = fftpack.fft2(frame_padded) + # Now shift the quadrants around so that low spatial frequencies are in + # the center of the 2D fourier transformed image. + F2 = fftpack.fftshift(F1) + # Calculate a 2D power spectrum + ps = np.abs(F2) ** 2 + + # Take the 10 log (this is normally only done for display purposes, not needed here) + # ps = np.log10(psd2D) + + values = [] + + for spot in s: + #print ("x: %d, y: %d --> %d" % (spot[1], spot[0], ps[spot[0], spot[1]])) + values.append(ps[spot[0], spot[1]]) + + return np.array(values) + + +fig, axes = plt.subplots(nrows=1, figsize=(5,5)) +ref = img_spot_values(frames[0], spots) +imp = img_spot_values(frames[1], spots) + +improvement = imp / ref + +for index, val in np.ndenumerate(improvement): + print('{} => {}'.format(spots[index], val)) + +frequency = np.sqrt((spots[:, 0] - 512) ** 2 + (spots[:, 1] - 512) ** 2) / 512 + +axes.scatter(frequency, improvement) +axes.set_xlabel('Fraction of Nyquist') +axes.set_ylabel('MTF enhancement') + +axes.grid() +axes.set_xlim((0,1)) + +if len(sys.argv) > 4: + plt.savefig(sys.argv[4], dpi=300, bbox_inches='tight', pad_inches=0.1) + +plt.show() + diff --git a/lib/__init__.py b/lib/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/constants.py b/lib/constants.py new file mode 100644 index 0000000..259f149 --- /dev/null +++ b/lib/constants.py @@ -0,0 +1,9 @@ +n_pixels = 10 +pixel_size = 55000 + +# TODO: Is this the most elegant way? +CI_CHIP = 0 +CI_X = 1 +CI_Y = 2 +CI_SPIDR_TIME = 3 +CI_cTOA = 4 \ No newline at end of file diff --git a/pixels/__init__.py b/pixels/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pixels/combine_freq_tot.py b/pixels/combine_freq_tot.py new file mode 100644 index 0000000..0308fc1 --- /dev/null +++ b/pixels/combine_freq_tot.py @@ -0,0 +1,19 @@ +import sys +import h5py +import numpy as np + +w = h5py.File(sys.argv[-1], 'w') + +freq_tot = np.zeros((512 * 512, 1024), dtype='uint32') + +for idx, filename in enumerate(sys.argv[1:-1]): + f = h5py.File(filename, 'r') + + freq_chunk = f['freq_tot'][()] + freq_tot += freq_chunk + + f.close() + +w.create_dataset('freq_tot', (512 * 512, 1024), dtype='uint32', data=freq_tot) + +w.close() \ No newline at end of file diff --git a/pixels/filter_clusters.py b/pixels/filter_clusters.py new file mode 100644 index 0000000..9b9eef9 --- /dev/null +++ b/pixels/filter_clusters.py @@ -0,0 +1,66 @@ +import sys +import h5py +import argparse +import numpy as np +def parse_arguments(): + parser = argparse.ArgumentParser( + description=__doc__, # printed with -h/--help + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument('FILE', help="Input .h5 dataset") + parser.add_argument("--minToT", default=0, metavar='N', type=int, help="Minimum ToT Value") + parser.add_argument("--maxToT", default=700, metavar='N', type=int, help="Maximum ToT Value") + parser.add_argument("--minCl", default=0, metavar='N', type=int, help="Minimum Cluster Size") + parser.add_argument("--maxCl", default=700, metavar='N', type=int, help="Maximum Cluster Size") + parser.add_argument("--save", help="saveLocation") + + settings = parser.parse_args() + + return settings + +def load_and_filter(doc, minToT,maxToT,minCl,maxCl,save): + f = h5py.File(doc,'r') + + clusters = f['clusters'] + pixels = f['clusters'][:, 0, :] + incidents = f['incidents'] + trajectories = f['trajectories'] + size = list() + tot = list() + for pixel in pixels: + tot.append(np.sum(pixel)) + size.append(np.count_nonzero(pixel)) + + filteredIndices = list() + for i in range(0,len(tot)): + if tot[i] >= minToT: + if tot[i] <= maxToT: + if size[i] >= minCl: + if size[i] <= maxCl: + filteredIndices.append(i) + print('you filtered out '+str(len(filteredIndices))+'('+str(float(len(filteredIndices))/float(len(clusters))*100)+'%)'+' items') + + newClusters = clusters[filteredIndices] + newIncidents = incidents[filteredIndices] + create_new_file(save,newClusters,newIncidents,trajectories,filteredIndices) + +def create_new_file(save_location,Xclusters,Xincidents,Xtrajectories,indices): + f = h5py.File(save_location,'w') + f.create_dataset("clusters",(np.shape(Xclusters))) + f['clusters'][...] = Xclusters + f.create_dataset("incidents",(np.shape(Xincidents))) + f['incidents'][...] = Xincidents + g = f.create_group("/trajectories") + for i in range(0,len(indices)): + g.create_dataset(str(indices[i]),data = Xtrajectories[str(indices[i])]) + +def main(): + config = parse_arguments() + load_and_filter(config.FILE,config.minToT,config.maxToT,config.minCl,config.maxCl,config.save) + +if __name__ == "__main__": + try: + sys.exit(main()) + except KeyboardInterrupt: + sys.exit(0) diff --git a/pixels/parse_g4medipix.py b/pixels/parse_g4medipix.py new file mode 100644 index 0000000..e2408f6 --- /dev/null +++ b/pixels/parse_g4medipix.py @@ -0,0 +1,78 @@ +import numpy as np +import h5py +import sys +import os + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) +from lib.constants import * + +filename = sys.argv[1] + +f = h5py.File(filename, 'a') + +used_n_pixels = f.attrs['n_pixels'] +sensor_height = f.attrs['sensor_height'] + +if 'incidents' in f: + del f['incidents'] + +f.create_dataset("incidents", (len(f['g4medipix']), 2)) + +pixels = f['g4medipix'] + +if 'clusters' in f: + del f['clusters'] + +f.create_dataset("clusters", (len(f['g4medipix']), 2, n_pixels, n_pixels)) + +for idx in pixels: + # Read dataset + pixel = pixels[idx][()] + + tot = pixel[0, :] + toa = pixel[1, :] + + if np.count_nonzero(tot) == 0: + continue + + # Calculate the shift to be taken from the ToT values + y, x = np.nonzero(tot) + shift_x = min(x) + shift_y = min(y) + + # Apply shift to tot and toa matrices + tot = np.roll(tot, -shift_x, axis=1) + tot = np.roll(tot, -shift_y, axis=0) + toa = np.roll(toa, -shift_x, axis=1) + toa = np.roll(toa, -shift_y, axis=0) + + # G4Medipix outputs a few zero values around clusters, convert those to -NaN + toa[toa == 0] = -np.nan + + # Reduce the lowest ToA values to 0, as we do not know a time offset. Just like the real data + toa = toa - np.nanmin(toa) + + # Store values in resized matrix again + f['clusters'][int(idx), 0, :] = tot[0:n_pixels, 0:n_pixels] + f['clusters'][int(idx), 1, :] = toa[0:n_pixels, 0:n_pixels] + + trajectory = f['trajectories'][str(idx)][()] + + # G4medipix sets its origin in the middle of four pixels + trajectory[:, 1] = trajectory[:, 1] + (-1 + used_n_pixels / 2 - shift_x) * pixel_size + trajectory[:, 0] = trajectory[:, 0] + (-1 + used_n_pixels / 2 - shift_y) * pixel_size + # Invert trajectory, and increase with half sensor height + trajectory[:, 2] = trajectory[:, 2] * -1 + sensor_height / 2 + + # Store trajectory back + del f['trajectories'][str(idx)] + # Sort by time + f['trajectories'][str(idx)] = trajectory[trajectory[:, 4].argsort()] + + # Calculate incident position + f['incidents'][int(idx)] = [trajectory[0, 0], trajectory[0, 1]] + +# Store new cluster size +f.attrs['n_pixels'] = n_pixels + +del f['g4medipix'] diff --git a/pixels/plot_pixels.py b/pixels/plot_pixels.py new file mode 100644 index 0000000..15eb8fc --- /dev/null +++ b/pixels/plot_pixels.py @@ -0,0 +1,127 @@ +import h5py +import matplotlib.pyplot as plt +import sys +import matplotlib +import numpy as np +from matplotlib import cm +import matplotlib.lines as mlines +from lib.constants import * + + +def plot_incident_electron(ax, incident): + ax.plot(incident[1] / pixel_size, incident[0] / pixel_size, 's', color='red', label='Incident', markersize=10) + + +def plot_prediction(ax, val, color, label): + ax.plot( val[1] / pixel_size, val[0] / pixel_size, 'o', color=color, label=label, markersize=10) + + +def plot_predictions(ax, predictions): + colors = cm.rainbow(np.linspace(0, 1, len(predictions))) + + for idx, pred in enumerate(predictions): + plot_prediction(ax, predictions[pred], colors[idx], pred) + + +def plot(fig, pixel, incident, predictions, edge): + # Plot ToT + ax = fig.add_subplot(211) + + # Converting to float32, as float16 cannot be displayed by imshow (https://github.com/matplotlib/matplotlib/issues/15432) + tot = pixel[0, :].astype(np.float32) + + plt.imshow(tot, aspect='equal', interpolation='none', cmap='Greys_r', vmin=0, vmax=100, extent=[0, n_pixels, n_pixels, 0]) + + # Plot edge + if edge is not None: + # [xmin,xmax], [ymin,ymax] + y0 = 0 + y1 = 10 + x0 = (y0 - edge[1]) / edge[0] + x1 = (y1 - edge[1]) / edge[0] + l = mlines.Line2D([x0, x1], [y0, y1], color='green') + ax.add_line(l) + + # Plot incident electrons + if incident is not None: + plot_incident_electron(ax, incident) + + # Plot predictions if they exist + if predictions is not None: + plot_predictions(ax, predictions) + + # Add legend + if predictions is not None or incident is not None: + plt.legend(bbox_to_anchor=(-1.2, 1), loc=2, numpoints=1) + + # Hide axis + ax.xaxis.set_visible(False) + ax.yaxis.set_visible(False) + # Set title + plt.title('Time over Threshold') + # Add colorbar + cbar = plt.colorbar() + cbar.set_label('Clock ticks (A.U)', rotation=270) + + # Plot ToA + ax = fig.add_subplot(212) + + # Converting to float32, as float16 cannot be displayed by imshow (https://github.com/matplotlib/matplotlib/issues/15432) + toa = pixel[1, :].astype(np.float32) + # Plot edge + + if edge is not None: + # [xmin,xmax], [ymin,ymax] + y0 = 0 + y1 = 10 + x0 = (y0 - edge[1]) / edge[0] + x1 = (y1 - edge[1]) / edge[0] + l = mlines.Line2D([x0, x1], [y0, y1], color='green') + ax.add_line(l) + + # Give pixels with NaN a different color + matplotlib.cm.Greys_r.set_bad('lightblue', 1.) + + plt.imshow(toa, aspect='equal', interpolation='none', cmap=matplotlib.cm.Greys_r, + vmin=0, vmax=10, extent=[0, n_pixels, n_pixels, 0]) + # Plot incident electrons + if incident is not None: + plot_incident_electron(ax, incident) + + # Plot predictions if they exist + if predictions is not None: + plot_predictions(ax, predictions) + + # Hide axis + ax.xaxis.set_visible(False) + ax.yaxis.set_visible(False) + # Set title + plt.title('Delta Time of Arrival') + # Add colorbar + cbar = plt.colorbar() + cbar.set_label('Clock ticks (A.U)', rotation=270) + + +if __name__ == "__main__": + filename = sys.argv[1] + + f = h5py.File(filename, 'r') + + pixel, predictions, incident, edge = None, None, None, None + + if 'clusters' in f: + pixel = f['clusters'][int(sys.argv[2]), :, :] + if 'incidents' in f: + incident = f['incidents'][int(sys.argv[2]), :] + if 'edges' in f: + edge = f['edges'][[int(sys.argv[2])]] + if 'predictions' in f: + predictions = dict() + for pred in f['predictions']: + predictions[pred] = f['predictions'][pred][int(sys.argv[2])] + + fig = plt.figure() + + plot(fig, pixel, incident, predictions, edge) + + plt.show() diff --git a/pixels/sim-sample-output-prediction.py b/pixels/sim-sample-output-prediction.py new file mode 100644 index 0000000..d9fe87e --- /dev/null +++ b/pixels/sim-sample-output-prediction.py @@ -0,0 +1,133 @@ +import sys + +import matplotlib +import matplotlib.pyplot as plt +import h5py +import os + +from matplotlib import colors +from matplotlib import cm +import numpy as np +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) +from pixels import plot_pixels +from lib.constants import * + +# File +filename = sys.argv[1] +f = h5py.File(filename, 'r') +clusters = f['clusters'][()] +incidents = f['incidents'][()] +predictions = f['predictions'] + +prediction_order = [ + 'Random', + 'Centroid', + 'Highest ToA', + 'Highest ToT', + 'CNN-ToT', + 'CNN-ToT-ToA' +] + +# Get indices +indices = list(map(int, sys.argv[2:5])) + +# Figure +fig = plt.figure(figsize=(8, 4)) +fig.subplots_adjust(left=0.1, hspace=0.1, top=0.83) + +toa_cmap = matplotlib.cm.get_cmap('Greys_r') +toa_cmap.set_bad('lightblue', 1.) +color = cm.Set1(np.linspace(0, 1, 8)) + +for idx, i in enumerate(indices): + ax = fig.add_subplot("24" + str(idx + 2)) + + ax.set_title("Example " + str(idx + 1)) + + # Plot all elements + im_tot = ax.imshow(clusters[i, 0, 0:5, 0:5], aspect='equal', interpolation='none', cmap='Greys_r', + vmin=1, vmax=150, extent=[0, 5, 5, 0], norm=colors.LogNorm(vmin=1, vmax=150)) + plot_pixels.plot_incident_electron(ax, incidents[i]) + + # Plot predictions + for c, label in enumerate(prediction_order): + pred = predictions[label][i] + ax.plot(pred[1] / pixel_size, pred[0] / pixel_size, 'o', color=color[c], label=label, markersize=9) + + # Show grid + ax.get_xaxis().set_ticks([0, 1, 2, 3, 4, 5]) + ax.get_yaxis().set_ticks([0, 1, 2, 3, 4, 5]) + ax.grid(True, which='both', color='white') + + # Hide axis labels + for tic in ax.xaxis.get_major_ticks(): + tic.tick2line.set_visible(False) + tic.tick1line.set_visible(False) + tic.label1.set_visible(False) + tic.label2.set_visible(False) + for tic in ax.yaxis.get_major_ticks(): + tic.tick2line.set_visible(False) + tic.tick1line.set_visible(False) + tic.label1.set_visible(False) + tic.label2.set_visible(False) + + # ToA ######## + ax = fig.add_subplot("24" + str(idx + 2 + 4)) + + # Plot all elements for ToA graph + im_toa = ax.imshow(clusters[i, 1, 0:5, 0:5], aspect='equal', interpolation='none', cmap=toa_cmap, + vmin=0, vmax=40, extent=[0, 5, 5, 0]) + plot_pixels.plot_incident_electron(ax, incidents[i]) + + for c, label in enumerate(prediction_order): + pred = predictions[label][i] + ax.plot(pred[1] / pixel_size, pred[0] / pixel_size, 'o', color=color[c], label=label, markersize=10) + + # Show grid + ax.get_xaxis().set_ticks([0, 1, 2, 3, 4, 5]) + ax.get_yaxis().set_ticks([0, 1, 2, 3, 4, 5]) + ax.grid(True, which='both', color='white') + + # Hide axis labels + for tic in ax.xaxis.get_major_ticks(): + tic.tick2line.set_visible(False) + tic.tick1line.set_visible(False) + tic.label1.set_visible(False) + tic.label2.set_visible(False) + for tic in ax.yaxis.get_major_ticks(): + tic.tick2line.set_visible(False) + tic.tick1line.set_visible(False) + tic.label1.set_visible(False) + tic.label2.set_visible(False) + + +# get left most plots +left_top = fig.axes[0] +left_bottom = fig.axes[1] + +left_top.set_ylabel('ToT (A.U)', rotation=90, fontsize=12, labelpad=0) +left_bottom.set_ylabel(r"$\Delta$ToA (A.U)", rotation=90, fontsize=12, labelpad=0) + +# Colorbars +# [x0, y0, width, height] +cax_tot = fig.add_axes([0.92, 0.49, 0.02, 0.34]) +cax_toa = fig.add_axes([0.92, 0.10, 0.02, 0.34]) + +# noinspection PyUnboundLocalVariable +cbar_tot = fig.colorbar(im_tot, cax=cax_tot, orientation='vertical') +# cbar_tot.set_label('ToT (A.U)', rotation=270) +# noinspection PyUnboundLocalVariable +cbar_toa = fig.colorbar(im_toa, cax=cax_toa, orientation='vertical') +# cbar_toa.set_label(r"$\Delta$ToA (A.U)", rotation=270) + +# Legend +ax_legend = fig.add_subplot(241) +ax_legend.axis("off") +handles, labels = ax.get_legend_handles_labels() +ax_legend.legend(ax, handles=handles, labels=labels) + +if len(sys.argv) > 5: + print(fig.dpi) + plt.savefig(sys.argv[5], dpi=300, bbox_inches='tight', pad_inches=0.1) + +plt.show() diff --git a/pixels/tot_correct.py b/pixels/tot_correct.py new file mode 100644 index 0000000..c9a9784 --- /dev/null +++ b/pixels/tot_correct.py @@ -0,0 +1,43 @@ +import sys +import h5py +import matplotlib.pyplot as plt +import numpy as np + +plt.rcParams.update({ + "font.size": 12, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + +filename = sys.argv[1] +tot = int(sys.argv[2]) + +f = h5py.File(filename, 'r') + +data = f['tot_correction'] + +print(data.attrs['creation_date']) + +d = data[tot, :] + +full = np.zeros((512,512)) + +full[0:256, 0:256] = np.fliplr(d[:, :, 2]) +full[256:512, 0:256] = np.flipud(d[:, :, 3]) +full[256:512, 256:512] = np.flipud(d[:, :, 0]) +full[0:256, 256:512] = np.flipud(d[:, :, 1]) + +fig = plt.figure() +im = plt.imshow(full, vmin=-50, vmax=70) + +ax = plt.gca() +ax.xaxis.set_major_locator(plt.FixedLocator([512 - 1])) +ax.yaxis.set_major_locator(plt.FixedLocator([512 - 1])) + +fig.colorbar(im, label='ToT correction value') + +if len(sys.argv) > 3: + plt.savefig(sys.argv[3], bbox_inches='tight', pad_inches=0.1) + + +plt.show() \ No newline at end of file diff --git a/predictions/remove.py b/predictions/remove.py new file mode 100644 index 0000000..82c5f81 --- /dev/null +++ b/predictions/remove.py @@ -0,0 +1,7 @@ +import sys +import h5py + +filename = sys.argv[1] + +with h5py.File(filename, 'a') as f: + del f['predictions'] diff --git a/predictions/simple.py b/predictions/simple.py new file mode 100644 index 0000000..0592373 --- /dev/null +++ b/predictions/simple.py @@ -0,0 +1,132 @@ +import random +import h5py +from scipy import ndimage +import numpy as np +import os +import sys + +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) +from lib.constants import * + +filename = sys.argv[1] +f = h5py.File(filename, 'a') + +highest_tot = "Highest ToT" +highest_toa = "Highest ToA" +weighted_centroid = "Centroid" +# centroid = "Centroid" +rand = "Random" + +# Raise runtime warnings, instead of printing them +np.seterr(all='raise') + +# Get just the ToT pixels +plots = f['clusters'][:, 0, :] + +if 'predictions' in f and highest_tot in f['predictions']: + del f['predictions'][highest_tot] + +num_plots = plots.shape[0] +results = f.create_dataset("/predictions/%s" % highest_tot, (num_plots, 2)) + +# Very naive Charge Sharing Mode (ToT) +for idx, pixels in enumerate(plots): + i = pixels.argmax() + y, x = np.unravel_index(i, (n_pixels, n_pixels)) + + results[idx] = [(y + 0.5) * pixel_size, (x + 0.5) * pixel_size] + +# Highest ToA + +plots_toa = f['clusters'][:, 1, :] + +if 'predictions' in f and highest_toa in f['predictions']: + del f['predictions'][highest_toa] + +results = f.create_dataset("/predictions/%s" % highest_toa, (num_plots, 2)) + +# Calculate highest_toa +for idx, pixels in enumerate(plots_toa): + + if np.nanmax(pixels) == 0: + # Get the ToT data, and find a pixel in there + nzy, nzx = np.nonzero(plots[idx]) + + if len(nzy) == 0: + continue + elif len(nzy) == 1: + y, x = nzy[0], nzx[0] + else: + i = np.random.randint(0, len(nzy) - 1) + y, x = nzy[i], nzx[i] + else: + maxes = np.argwhere(pixels == np.nanmax(pixels)) + + if len(maxes) > 1: + i = np.random.randint(0, len(maxes) - 1) + y, x = maxes[i][0], maxes[i][1] + else: + y, x = maxes[0][0], maxes[0][1] + + results[idx] = [(y + 0.5) * pixel_size, (x + 0.5) * pixel_size] + +# Weighted Centroid + +if 'predictions' in f and weighted_centroid in f['predictions']: + del f['predictions'][weighted_centroid] + +results = f.create_dataset("/predictions/%s" % weighted_centroid, (num_plots, 2)) + +for idx, pixels in enumerate(plots): + + try: + y, x = ndimage.measurements.center_of_mass(pixels) + results[idx] = [(y + 0.5) * pixel_size, (x + 0.5) * pixel_size] + except FloatingPointError: + print("Could not calculate center of mass: empty cluster?. Cluster_idx: %s" % idx) + +# Centroid + +# if 'predictions' in f and centroid in f['predictions']: +# del f['predictions'][centroid] +# +# results = f.create_dataset("/predictions/%s" % centroid, (num_plots, 2)) +# +# for idx, pixels in enumerate(plots): +# +# # Normalize all values to 1 +# pixels[pixels > 0] = 1 +# +# try: +# y, x = ndimage.measurements.center_of_mass(pixels) +# results[idx] = [(y + 0.5) * pixel_size, (x + 0.5) * pixel_size] +# except FloatingPointError: +# print("Could not calculate center of mass: empty cluster?. Cluster_idx: %s" % idx) + + +# Random + +if 'predictions' in f and rand in f['predictions']: + del f['predictions'][rand] + +results = f.create_dataset("/predictions/%s" % rand, (num_plots, 2)) + +for idx, pixels in enumerate(plots): + + nzy, nzx = np.nonzero(pixels) + + if len(nzy) == 0: + print("Could not find random pixel: empty cluster?. Cluster_idx: %s" % idx) + continue + elif len(nzy) == 1: + y, x = nzy[0], nzx[0] + else: + i = np.random.randint(0, len(nzy) - 1) + y, x = nzy[i], nzx[i] + + results[idx] = [(y + 0.5 + random.uniform(-0.5, 0.5)) * pixel_size, + (x + 0.5 + random.uniform(-0.5, 0.5)) * pixel_size] + + +f.flush() +f.close() diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..3541348 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,6 @@ +keras +scikit-image +matplotlib +scipy +numpy +h5py \ No newline at end of file diff --git a/statistics/cluster_tot_stats_combined.py b/statistics/cluster_tot_stats_combined.py new file mode 100644 index 0000000..856bd96 --- /dev/null +++ b/statistics/cluster_tot_stats_combined.py @@ -0,0 +1,91 @@ +import argparse +import h5py +import numpy as np +import matplotlib.pyplot as plt +from matplotlib.ticker import MultipleLocator + +plt.rcParams.update({ + "font.size": 12, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + + +def parse_arguments(): + parser = argparse.ArgumentParser( + description=__doc__, # printed with -h/--help + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + + parser.add_argument("--c200", metavar='FILE', help="200 kv uncorrected") + parser.add_argument("--u200", metavar='FILE', help="200 kv corrected") + parser.add_argument("--c300", metavar='FILE', help="300 kv uncorrected") + parser.add_argument("--u300", metavar='FILE', help="300 kv corrected") + parser.add_argument("--out", metavar='FILE', help="output") + + settings = parser.parse_args() + + return settings + + +def plot(f, ax, label, style, color): + bins = np.arange(1, 600, 1) + + with h5py.File(f, 'r') as h5f: + cluster_stats = h5f['cluster_stats'][()] + tot_summed = cluster_stats[:, 1] + + y, edges = np.histogram(tot_summed, bins=bins, normed=True) + centers = 0.5 * (edges[1:] + edges[:-1]) + + hist = ax.plot(centers, y, label=label, linestyle=style, linewidth=2, color=color) + + # Find the fwhm + hmx = half_max_x(centers, y) + fwhm = hmx[1] - hmx[0] + print("FWHM ({}): {:.3f}".format(label, fwhm)) + + return hist + + +def lin_interp(x, y, i, half): + return x[i] + (x[i + 1] - x[i]) * ((half - y[i]) / (y[i + 1] - y[i])) + + +def half_max_x(x, y): + half = max(y) / 2.0 + signs = np.sign(np.add(y, -half)) + zero_crossings = (signs[0:-2] != signs[1:-1]) + zero_crossings_i = np.where(zero_crossings)[0] + return [lin_interp(x, y, zero_crossings_i[0], half), + lin_interp(x, y, zero_crossings_i[1], half)] + + +config = parse_arguments() + +fig = plt.figure(dpi=200, figsize=(8, 4)) +ax = fig.add_subplot(111) + +if config.c200: + plot(config.c200, ax, '200 kV corrected', 'solid', 'C0') + +if config.u200: + plot(config.u200, ax, '200 kV uncorrected', '--', 'C0') + +if config.c300: + plot(config.c300, ax, '300 kV corrected', 'solid', 'C1') + +if config.u300: + plot(config.u300, ax, '300 kV uncorrected', '--', 'C1') + +plt.ylabel('Normalised occurrence') +plt.xlabel('Cluster ToT sum (A.U)') +plt.legend(loc='upper left') + +ax.xaxis.set_major_locator(MultipleLocator(50)) +plt.grid() + +if config.out: + plt.savefig(config.out, bbox_inches='tight') + +plt.show() diff --git a/statistics/clusterinfo_2dhisto.py b/statistics/clusterinfo_2dhisto.py new file mode 100644 index 0000000..eae9029 --- /dev/null +++ b/statistics/clusterinfo_2dhisto.py @@ -0,0 +1,63 @@ +import sys +import os +import h5py +import numpy as np +import matplotlib.pyplot as plt +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir))) + +plt.rcParams.update({ + "font.size": 12, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + +fig = plt.figure(dpi=200) +ax = fig.add_subplot(111) + +f = h5py.File(sys.argv[1], 'r') + +pixels = f['clusters'][:, 0, :] + +size = list() +tot = list() +for pixel in pixels: + tot.append(np.sum(pixel)) + size.append(np.count_nonzero(pixel)) + +# max_tot = np.percentile(tot, 99.99) +max_tot = int(sys.argv[2]) +# max_size = np.percentile(size, 99.999) +max_size = int(sys.argv[3]) + +cmap = plt.get_cmap('viridis') +cmap.set_under('w', 1) +bins = [np.arange(0, max_tot, 25), np.arange(0, max_size, 1)] +plt.hist2d(tot, size, cmap=cmap, vmin=0.000001, range=((0, max_tot), (0, max_size)), bins=bins, normed=True) + +# x-axis ticks +xax = ax.get_xaxis() +xax.set_major_locator(plt.MultipleLocator(50)) +xax.set_minor_locator(plt.MultipleLocator(25)) +xax.set_tick_params(colors='black', which='major') +plt.xlabel('ToT Sum (A.U)') + +# y-axis ticks +yax = ax.get_yaxis() +yax.set_major_locator(plt.MultipleLocator(1)) +yax.set_tick_params(colors='black', which='major') + +ax.set_ylim(1) +plt.ylabel('Cluster Size (pixels)') + +# Set grid +plt.grid(b=True, which='both') + +# Colorbar +cbar = plt.colorbar() +cbar.set_ticks([]) +cbar.set_label('Normalised occurrence') + +if len(sys.argv) > 4: + plt.savefig(sys.argv[4], bbox_inches='tight') + +plt.show() diff --git a/statistics/distance_incident_prediction.py b/statistics/distance_incident_prediction.py new file mode 100644 index 0000000..0781bb0 --- /dev/null +++ b/statistics/distance_incident_prediction.py @@ -0,0 +1,72 @@ +import os +import sys +import h5py +import numpy as np +import matplotlib.pyplot as plt +from matplotlib import ticker +from matplotlib.pyplot import cm + +plt.rcParams.update({ + "font.size": 15, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + +prediction_order = [ + 'Random', + 'Centroid', + 'Highest ToA', + 'Highest ToT', + 'CNN-ToT', + 'CNN-ToT-ToA' +] + +filename = sys.argv[1] +f = h5py.File(filename, 'r') + +incidents = f['incidents'][()] + +sensor_height = f.attrs.get("sensor_height", "N/A") +beam_energy = f.attrs.get("beam_energy", "N/A"), +sensor_material = f.attrs.get("sensor_material", "N/A") + +# Merge prediction order +prediction_order_complete = prediction_order +prediction_order_complete.extend(x for x in list(f['predictions']) if x not in prediction_order_complete) + + +def calculate_distance(prediction): + # Not sure this is the cleanest way, but it works to do sqrt( (x-x)^2 + (y - y)^2) on the whole matrix at once + diff = incidents - prediction + square = np.square(diff) + dist = np.sqrt(square[:, 0] + square[:, 1]) + + return np.divide(dist, 55000) + + +distances = dict() + +for pred in f['predictions']: + distances[pred] = calculate_distance(f['predictions'][pred]) + +# Setup plots +fig = plt.figure(figsize=(10, 4)) + +ax = fig.add_subplot(111) +ax.set_ylabel('RMS (pixel)') +ax.yaxis.grid(True) +ax.yaxis.set_major_locator(ticker.MultipleLocator(1)) + +# Build plots +boxes = [] +for pred in prediction_order_complete: + boxes.append(distances[pred]) + + print("%s: %f" % (pred, np.median(distances[pred]))) + +ax.boxplot(boxes, labels=prediction_order_complete, showfliers=False) + +if len(sys.argv) > 2: + plt.savefig(sys.argv[2], dpi=300, bbox_inches='tight', pad_inches=0.1) + +plt.show() diff --git a/statistics/super-resolution-distribution.py b/statistics/super-resolution-distribution.py new file mode 100644 index 0000000..fdc6115 --- /dev/null +++ b/statistics/super-resolution-distribution.py @@ -0,0 +1,84 @@ +import sys +import h5py +import matplotlib.pyplot as plt +import numpy as np +import scipy.sparse +from mpl_toolkits.axes_grid1 import make_axes_locatable +from mpl_toolkits.axes_grid1.inset_locator import inset_axes + +super_resolution = 2 +shape = 516 * super_resolution + +plt.rcParams.update({ + "font.size": 12, + "font.family": 'sans-serif', + "svg.fonttype": 'none' +}) + +def colorbar(mappable): + ax = mappable.axes + fig = ax.figure + divider = make_axes_locatable(ax) + cax = divider.append_axes("right", size="5%", pad=0.05) + return fig.colorbar(mappable, cax=cax) + + +def imshow(e, a): + # Calculate events at super resolution + x = e['x'] * super_resolution + y = e['y'] * super_resolution + data = np.ones(len(x)) + d = scipy.sparse.coo_matrix((data, (y, x)), shape=(shape, shape), dtype=np.uint32) + frame = d.todense() + + min5 = np.percentile(frame, 5) + max95 = np.percentile(frame, 95) + + im = a.imshow(frame, vmin=min5, vmax=max95, cmap='gray') + + # Look and feel + a.xaxis.set_major_locator(plt.FixedLocator([shape - 1])) + a.yaxis.set_major_locator(plt.FixedLocator([shape - 1])) + + # Color bar + # colorbar(im) + + return im + + +def hist2d(e, a): + bins = np.arange(0, 1.1, 0.5) + remain_x = np.mod(e['x'], np.ones(len(e))) + remain_y = np.mod(e['y'], np.ones(len(e))) + hist, _, _, im = a.hist2d(remain_x, remain_y, bins=[bins, bins], cmin=0, normed=True) + im.set_clim(vmin=0) + + # Look and feel + a.set_aspect('equal', adjustable='box') + a.set_axis_off() + + # Plot values + for i in range(len(bins) - 1): + for j in range(len(bins) - 1): + a.text(bins[j] + 0.25, bins[i] + 0.25, ".2%f" % hist[i, j], color="black", ha="center", va="center", fontweight="bold") + + +# Figure +fig = plt.figure(figsize=(10, 8)) + +# Without correction +filename = sys.argv[1] +f = h5py.File(filename, 'r') + +events = f['events'][()] +ax = fig.add_subplot("111") +imshow(events, ax) + +# Plot 2d histogram on an inset +ax_inset = inset_axes(ax, width="30%", height="30%", loc=4, borderpad=2) +hist2d(events, ax_inset) + +if len(sys.argv) > 2: + plt.savefig(sys.argv[2], bbox_inches='tight', pad_inches=0.1, pil_kwargs={'compression': 'tiff_deflate'}) + +plt.show() diff --git a/statistics/toa-pattern.py b/statistics/toa-pattern.py new file mode 100644 index 0000000..386a994 --- /dev/null +++ b/statistics/toa-pattern.py @@ -0,0 +1,86 @@ +import sys +import matplotlib.pyplot as plt +import numpy as np +from PIL import Image +from scipy import fftpack + + +def imshow(frame, a, title): + # Calculate events at super resolution + min5 = np.percentile(frame, 1) + max95 = np.percentile(frame, 99) + + a.imshow(frame, vmin=min5, vmax=max95, cmap='gray') + + # Look and feel + a.xaxis.set_major_locator(plt.FixedLocator([len(frame[0]) - 1])) + a.yaxis.set_major_locator(plt.FixedLocator([len(frame[0]) - 1])) + a.set_title(title) + + return frame + + +def psshow(f, a, title): + # Take the fourier transform of the image. + f1 = fftpack.fft2(f) + print(f.shape) + + # Now shift the quadrants around so that low spatial frequencies are in + # the center of the 2D fourier transformed image. + f2 = fftpack.fftshift(f1) + + # Calculate a 2D power spectrum + psd2D = np.abs(f2) ** 2 + + # Display at log 10 + psd2D_log = np.log10(psd2D) + + min5 = np.percentile(psd2D_log, 5) + print(min5) + max95 = np.max(psd2D_log) + print(max95) + + a.imshow(psd2D_log, vmin=min5, vmax=max95, cmap='gray') + + # Look and feel + a.xaxis.set_major_locator(plt.FixedLocator([len(psd2D[0]) - 1])) + a.yaxis.set_major_locator(plt.FixedLocator([len(psd2D[0]) - 1])) + a.set_title(title) + + +x = 10 +y = 10 +width = 128 +height = 128 + +# Figure +fig = plt.figure() +plt.subplots_adjust(wspace=0.0) +# CNN-ToT +filename = sys.argv[1] +src_im = Image.open(filename) +im = np.array(src_im) + +ax = fig.add_subplot("221") +imshow(im[y:y+height, x:x+width], ax, "CNN-ToT") + +ax = fig.add_subplot("222") +psshow(im[y:y+height, x:x+width], ax, "CNN-ToT FFT") + +# CNN-ToA +filename = sys.argv[2] +src_im = Image.open(filename) +im = np.array(src_im) + +ax = fig.add_subplot("223") +imshow(im[y:y+height, x:x+width], ax, "CNN-ToT-ToA") + +ax = fig.add_subplot("224") +psshow(im[y:y+height, x:x+width], ax, "CNN-ToT-ToA FFT") + + +# Save to file +if len(sys.argv) > 3: + plt.savefig(sys.argv[3], dpi=300, bbox_inches='tight', pad_inches=0.1) + +plt.show() diff --git a/trajectories/plot_3dtrajectory.py b/trajectories/plot_3dtrajectory.py new file mode 100644 index 0000000..1a68555 --- /dev/null +++ b/trajectories/plot_3dtrajectory.py @@ -0,0 +1,70 @@ +import matplotlib.pyplot as plt +import numpy as np +from mpl_toolkits.mplot3d import Axes3D +import h5py +import sys +from lib.constants import * + + +def plot(fig, trajectory, sensor_height): + # Filter out everything above block (start of trajectory) + traj_inside = trajectory[trajectory[:, 2] < sensor_height] + + ax = fig.add_subplot(111, projection='3d') # type:Axes3D + ax.view_init(elev=20., azim=32) + + # Plot track + ax.plot(traj_inside[:, 0], traj_inside[:, 1], traj_inside[:, 2]) + # Plot incident electron + print("Traj incident: %f, %f" % (traj_inside[0, 1] / 55000, traj_inside[0, 0] / 55000)) + ax.plot([traj_inside[0, 0]], [traj_inside[0, 1]], [sensor_height], 'ro') + + # Set axes scale + ax.set_xlim3d(0, n_pixels * pixel_size) + ax.set_ylim3d(0, n_pixels * pixel_size) + ax.set_zlim3d(0, sensor_height) + + # Set tick lines to pixel size, + xedges = np.arange(0, n_pixels * pixel_size, pixel_size) + yedges = np.arange(0, n_pixels * pixel_size, pixel_size) + zedges = np.arange(0, sensor_height, 100000) + ax.set_yticks(yedges, minor=False) + ax.yaxis.grid(True, which='major') + ax.yaxis.set_ticklabels([]) + ax.set_xticks(xedges, minor=False) + ax.xaxis.grid(True, which='major') + ax.xaxis.set_ticklabels([]) + ax.set_zticks(zedges, minor=False) + ax.zaxis.grid(True, which='major') + + # Change background color + ax.xaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) + ax.yaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) + ax.zaxis.set_pane_color((1.0, 1.0, 1.0, 1.0)) + + return fig, ax + + +if __name__ == "__main__": + filename = sys.argv[1] + + f = h5py.File(filename, 'r') + + # Get trajectory + trajectory = f['trajectories'][sys.argv[2]][()] + height = f.attrs['sensor_height'] + + ax = plot(plt.figure(), trajectory, height) + + # from matplotlib import animation + # def animate(i): + # ax[1].view_init(30, i) + # plt.draw() + # plt.pause(.001) + # # Animate + # anim = animation.FuncAnimation(ax[0], animate, + # frames=360, interval=20) + # # Save + # anim.save('basic_animation.mp4', fps=30, extra_args=['-vcodec', 'libx264']) + + plt.show()