Skip to content

Commit

Permalink
Merge branch 'dev'
Browse files Browse the repository at this point in the history
  • Loading branch information
bsavitzky committed Jul 25, 2024
2 parents 532f6fc + b9e125b commit 080cebe
Show file tree
Hide file tree
Showing 32 changed files with 4,359 additions and 1,007 deletions.
87 changes: 0 additions & 87 deletions .github/workflows/pypi_upload.yml

This file was deleted.

2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

> :warning: **py4DSTEM version 0.14 update** :warning: Warning: this is a major update and we expect some workflows to break. You can still install previous versions of py4DSTEM [as discussed here](#legacyinstall)
> :warning: **Phase retrieval refactor version 0.14.9** :warning: Warning: The phase-retrieval modules in py4DSTEM (DPC, parallax, and ptychography) underwent a major refactor in version 0.14.9 and as such older tutorial notebooks will not work as expected. Notably, class names have been pruned to remove the trailing "Reconstruction" (`DPCReconstruction` -> `DPC` etc.), and regularization functions have dropped the `_iter` suffix (and are instead specified as boolean flags). We are working on updating the tutorial notebooks to reflect these changes. In the meantime, there's some more information in the relevant pull request [here](https://github.com/py4dstem/py4DSTEM/pull/597#issuecomment-1890325568).
> :warning: **Phase retrieval refactor version 0.14.9** :warning: Warning: The phase-retrieval modules in py4DSTEM (DPC, parallax, and ptychography) underwent a major refactor in version 0.14.9 and as such older tutorial notebooks will not work as expected. Notably, class names have been pruned to remove the trailing "Reconstruction" (`DPCReconstruction` -> `DPC` etc.), and regularization functions have dropped the `_iter` suffix (and are instead specified as boolean flags). See the [updated tutorials](https://github.com/py4dstem/py4DSTEM_tutorials) for more information.
![py4DSTEM logo](/images/py4DSTEM_logo.png)

Expand Down
16 changes: 13 additions & 3 deletions py4DSTEM/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
from py4DSTEM.version import __version__
from emdfile import tqdmnd

from importlib.metadata import packages_distributions

is_package_lite = "py4DSTEM-lite" in packages_distributions()["py4DSTEM"]

### io

Expand Down Expand Up @@ -52,8 +55,11 @@
BraggVectorMap,
)

from py4DSTEM.process import classification

try:
from py4DSTEM.process import classification
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc

# diffraction
from py4DSTEM.process.diffraction import Crystal, Orientation
Expand All @@ -70,7 +76,11 @@
# strain
from py4DSTEM.process.strain.strain import StrainMap

from py4DSTEM.process import wholepatternfit
try:
from py4DSTEM.process import wholepatternfit
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc


### more submodules
Expand Down
8 changes: 7 additions & 1 deletion py4DSTEM/braggvectors/diskdetection.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,18 @@
from scipy.ndimage import gaussian_filter

from emdfile import tqdmnd
from py4DSTEM import is_package_lite
from py4DSTEM.braggvectors.braggvectors import BraggVectors
from py4DSTEM.data import QPoints
from py4DSTEM.datacube import DataCube
from py4DSTEM.preprocess.utils import get_maxima_2D
from py4DSTEM.process.utils.cross_correlate import get_cross_correlation_FT
from py4DSTEM.braggvectors.diskdetection_aiml import find_Bragg_disks_aiml

try:
from py4DSTEM.braggvectors.diskdetection_aiml import find_Bragg_disks_aiml
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc


def find_Bragg_disks(
Expand Down
5 changes: 3 additions & 2 deletions py4DSTEM/datacube/virtualimage.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
# for bragg virtual imaging methods, goto diskdetection.virtualimage.py

import numpy as np
import dask.array as da
from typing import Optional
import inspect

Expand Down Expand Up @@ -220,7 +219,9 @@ def get_virtual_image(
virtual_image[rx, ry] = np.sum(self.data[rx, ry] * mask)

# dask
if dask is True:
if dask:
import dask.array as da

# set up a generalized universal function for dask distribution
def _apply_mask_dask(self, mask):
virtual_image = np.sum(
Expand Down
10 changes: 8 additions & 2 deletions py4DSTEM/io/filereaders/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,12 @@
from py4DSTEM import is_package_lite
from py4DSTEM.io.filereaders.empad import read_empad
from py4DSTEM.io.filereaders.read_dm import read_dm
from py4DSTEM.io.filereaders.read_K2 import read_gatan_K2_bin
from py4DSTEM.io.filereaders.empad import read_empad
from py4DSTEM.io.filereaders.read_mib import load_mib
from py4DSTEM.io.filereaders.read_arina import read_arina

try:
from py4DSTEM.io.filereaders.read_arina import read_arina
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc
from py4DSTEM.io.filereaders.read_abTEM import read_abTEM
3 changes: 2 additions & 1 deletion py4DSTEM/io/importfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from py4DSTEM.io.filereaders import (
load_mib,
read_abTEM,
read_arina,
read_dm,
read_empad,
read_gatan_K2_bin,
Expand Down Expand Up @@ -90,6 +89,8 @@ def import_file(
elif filetype == "mib":
data = load_mib(filepath, mem=mem, binfactor=binfactor, **kwargs)
elif filetype == "arina":
from py4DSTEM.io.filereaders import read_arina

data = read_arina(filepath, mem=mem, binfactor=binfactor, **kwargs)
elif filetype == "abTEM":
data = read_abTEM(filepath, mem=mem, binfactor=binfactor, **kwargs)
Expand Down
16 changes: 14 additions & 2 deletions py4DSTEM/process/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,21 @@
from py4DSTEM import is_package_lite
from py4DSTEM.process.polar import PolarDatacube
from py4DSTEM.process.strain.strain import StrainMap

from py4DSTEM.process import phase
from py4DSTEM.process import calibration
from py4DSTEM.process import utils
from py4DSTEM.process import classification

try:
from py4DSTEM.process import classification
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc

from py4DSTEM.process import diffraction
from py4DSTEM.process import wholepatternfit

try:
from py4DSTEM.process import wholepatternfit
except (ImportError, ModuleNotFoundError) as exc:
if not is_package_lite:
raise exc
7 changes: 7 additions & 0 deletions py4DSTEM/process/calibration/origin.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,7 @@ def get_origin(
dp_max=None,
mask=None,
fast_center=False,
remove_NaN=False,
):
"""
Find the origin for all diffraction patterns in a datacube, assuming (a) there is no
Expand Down Expand Up @@ -253,6 +254,8 @@ def get_origin(
arrays are returned for qx0,qy0
fast_center: (bool)
Skip the center of mass refinement step.
remove_NaN: (bool)
If True, sets NaN to mean value
Returns:
(2-tuple of (R_Nx,R_Ny)-shaped ndarrays): the origin, (x,y) at each scan position
Expand Down Expand Up @@ -317,6 +320,10 @@ def get_origin(
else:
qx0.mask, qy0.mask = True, True

if remove_NaN:
qx0[np.isnan(qx0)] = np.mean(qx0[~np.isnan(qx0)])
qy0[np.isnan(qy0)] = np.mean(qy0[~np.isnan(qy0)])

# return
mask = np.ones(datacube.Rshape, dtype=bool)
return qx0, qy0, mask
Expand Down
Loading

0 comments on commit 080cebe

Please sign in to comment.