Skip to content

Commit

Permalink
Browse image (#22)
Browse files Browse the repository at this point in the history
* add browse image scaling

* added normalization, resizing, and save to disk

* make output name optional

* black formatting

* correct output file type

* pre-commit changes

* global dataset choices

* [pre-commit.ci] auto fixes from pre-commit.com hooks

for more information, see https://pre-commit.ci

* pytdocstyle fix

* fix as_list and add more documenation

* docstrings

* fix error in browse
make dataset info and names importable

* fix import

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
  • Loading branch information
LiangJYu and pre-commit-ci[bot] authored Nov 29, 2023
1 parent 5c6ed78 commit 50b7893
Show file tree
Hide file tree
Showing 3 changed files with 288 additions and 59 deletions.
171 changes: 160 additions & 11 deletions src/disp_s1/browse_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,35 +2,184 @@

from __future__ import annotations

import argparse

import h5netcdf
import numpy as np
import scipy
from dolphin._types import Filename
from numpy.typing import ArrayLike
from PIL import Image

from .product_info import DISP_PRODUCT_NAMES


def _normalize_apply_gamma(arr: ArrayLike, gamma=1.0) -> np.ndarray:
"""Normalize to [0-1] and gamma correct an image array.
Parameters
----------
arr: np.ndarray
Numpy array representing an image to be normalized and gamma corrected.
gamma: float
Exponent value used to gamma correct image.
Returns
-------
arr: ArrayLike
Normalized and gamma corrected image.
"""
vmin = np.nanmin(arr)
vmax = np.nanmax(arr)

# scale to 0-1 for gray scale and then apply gamma correction
arr = (arr - vmin) / (vmax - vmin)

# scale to 0-1 for gray scale and then apply gamma correction
if gamma != 1.0:
arr = np.power(arr, gamma)

return arr


def _resize_to_max_pixel_dim(arr: ArrayLike, max_dim_allowed=2048) -> np.ndarray:
"""Scale shape of a given array.
Scale up or down length and width of an array to a maximum dimension
where the larger of length or width used to compute scaling ratio.
Parameters
----------
arr: ArrayLike
Numpy array representing an image to be resized.
max_dim_allowed: int
Maximum dimension allowed for either length or width.
Returns
-------
arr: ArrayLike
Numpy array representing a resized image.
"""
if max_dim_allowed < 1:
raise ValueError(f"{max_dim_allowed} is not a valid max image dimension")

# compute scaling ratio based on larger dimension
input_shape = arr.shape
scaling_ratio = max([max_dim_allowed / xy for xy in input_shape])

# set NaNs set to 0 to correctly interpolate with zoom
arr[np.isnan(arr)] = 0

# scale original shape by scaling ratio
arr = scipy.ndimage.zoom(arr, scaling_ratio)

return arr


def _save_to_disk_as_greyscale(arr: ArrayLike, fname: Filename) -> None:
"""Save image array as greyscale to file.
Parameters
----------
arr: ArrayLike
Numpy array representing an image to be saved to png file.
fname: str
File name of output browse image.
"""
# scale to 1-255
# 0 reserved for transparency
arr = np.uint8(arr * (254)) + 1

# save to disk in grayscale ('L')
img = Image.fromarray(arr, mode="L")
img.save(fname, transparency=0)

def make_browse_image(

def make_browse_image_from_arr(
output_filename: Filename,
arr: ArrayLike,
max_dim_allowed: int = 2048,
) -> None:
"""Create a PNG browse image for the output product.
"""Create a PNG browse image for the output product from given array.
Parameters
----------
output_filename : Filename
Name of output PNG
arr : ArrayLike
input 2D image array
Array to be saved to image
max_dim_allowed : int, default = 2048
Size (in pixels) of the maximum allowed dimension of output image.
Image gets rescaled with same aspect ratio.
"""
orig_shape = arr.shape
scaling_ratio = max([s / max_dim_allowed for s in orig_shape])
# scale original shape by scaling ratio
scaled_shape = [int(np.ceil(s / scaling_ratio)) for s in orig_shape]
# nomalize non-nan pixels to 0-1
arr = _normalize_apply_gamma(arr)

# compute browse shape and resize full size array to it
arr = _resize_to_max_pixel_dim(arr, max_dim_allowed)

_save_to_disk_as_greyscale(arr, output_filename)


def make_browse_image_from_nc(
output_filename: Filename,
input_filename: Filename,
dataset_name: str,
max_dim_allowed: int = 2048,
) -> None:
"""Create a PNG browse image for the output product from product in NetCDF file.
Parameters
----------
output_filename : Filename
Name of output PNG
input_filename : Filename
Name of input NetCDF file.
dataset_name: str
Name of datast to be made into a browse image.
max_dim_allowed : int, default = 2048
Size (in pixels) of the maximum allowed dimension of output image.
Image gets rescaled with same aspect ratio.
"""
if dataset_name not in DISP_PRODUCT_NAMES:
raise ValueError(f"{args.dataset_name} is not a valid dataset name")

# get dataset as array from input NC file
with h5netcdf.File(input_filename, "r") as nc_handle:
arr = nc_handle[dataset_name][()]

make_browse_image_from_arr(output_filename, arr, max_dim_allowed)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Create browse images for displacement products from command line",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"-o", "--out-fname", required=False, help="Optional path to output png file"
)
parser.add_argument("-i", "--in-fname", help="Path to input NetCDF file")
parser.add_argument(
"-n",
"--dataset-name",
choices=DISP_PRODUCT_NAMES,
help="Name of dataset to plot from NetCDF file",
)
parser.add_argument(
"-m",
"--max-img-dim",
type=int,
default=2048,
help="Maximum dimension allowed for either length or width of browse image",
)
args = parser.parse_args()

# if no output file name given, set output file name to input path with
# dataset name inserted before .nc
if args.out_fname is None:
args.out_fname = args.in_fname.replace(".nc", f".{args.dataset_name}.png")

# TODO: Make actual browse image
dummy = np.zeros(scaled_shape, dtype="uint8")
img = Image.fromarray(dummy, mode="L")
img.save(output_filename, transparency=0)
make_browse_image_from_nc(
args.out_fname, args.in_fname, args.dataset_name, args.max_img_dim
)
73 changes: 25 additions & 48 deletions src/disp_s1/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,9 @@
from . import __version__ as disp_s1_version
from . import _parse_cslc_product
from ._common import DATETIME_FORMAT
from .browse_image import make_browse_image
from .browse_image import make_browse_image_from_arr
from .pge_runconfig import RunConfig
from .product_info import DISP_PRODUCTS_INFO

logger = get_log(__name__)

Expand Down Expand Up @@ -110,7 +111,6 @@ def create_output_product(

assert unw_arr.shape == conncomp_arr.shape == tcorr_arr.shape

make_browse_image(Path(output_name).with_suffix(".png"), unw_arr)
start_times = [
_parse_cslc_product.get_zero_doppler_time(f, type_="start") for f in cslc_files
]
Expand All @@ -137,52 +137,29 @@ def create_output_product(

# ######## Main datasets ###########
# Write the displacement array / conncomp arrays
_create_geo_dataset(
group=f,
name="unwrapped_phase",
data=unw_arr,
description="Unwrapped phase",
fillvalue=np.nan,
attrs=dict(units="radians"),
)
_create_geo_dataset(
group=f,
name="connected_component_labels",
data=conncomp_arr,
description="Connected component labels of the unwrapped phase",
fillvalue=0,
attrs=dict(units="unitless"),
)
_create_geo_dataset(
group=f,
name="temporal_coherence",
data=tcorr_arr,
description="Temporal coherence of phase inversion",
fillvalue=np.nan,
attrs=dict(units="unitless"),
)
_create_geo_dataset(
group=f,
name="interferometric_correlation",
data=ifg_corr_arr,
description=(
"Estimate of interferometric correlation derived from multilooked"
" interferogram."
),
fillvalue=np.nan,
attrs=dict(units="unitless"),
)
_create_geo_dataset(
group=f,
name="persistent_scatterer_mask",
data=io.load_gdal(ps_mask_filename),
description=(
"Mask of persistent scatterers downsampled to the multilooked output"
" grid."
),
fillvalue=255,
attrs=dict(units="unitless"),
)
disp_products_info = DISP_PRODUCTS_INFO
disp_data = [
unw_arr,
conncomp_arr,
tcorr_arr,
ifg_corr_arr,
io.load_gdal(ps_mask_filename),
]
disp_products = [
(nfo, data) for nfo, data in zip(disp_products_info, disp_data)
]
for nfo, data in disp_products:
_create_geo_dataset(
group=f,
name=nfo.name,
data=data,
description=nfo.description,
fillvalue=nfo.fillvalue,
attrs=nfo.attrs,
)
make_browse_image_from_arr(
Path(output_name).with_suffix(f".{nfo.name}.png"), data
)

_create_corrections_group(
output_name=output_name,
Expand Down
103 changes: 103 additions & 0 deletions src/disp_s1/product_info.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
from dataclasses import dataclass, fields

import numpy as np
from numpy.typing import DTypeLike


@dataclass(frozen=True)
class DispProductInfo:
"""Container for items used in creating displacement product datasets."""

# Name of of the dataset.
name: str
# Description of the dataset.
description: str
# Fill value of the dataset.
fillvalue: DTypeLike
# Attributes of the dataset.
attrs: dict

@classmethod
def unwrapped_phase(cls):
"""Return container of unwrapped phase specific information."""
return cls(
name="unwrapped_phase",
description="Unwrapped phase",
fillvalue=np.nan,
attrs=dict(units="radians"),
)

@classmethod
def connected_component_labels(cls):
"""Return container of connected component label specific information."""
return cls(
name="connected_component_labels",
description="Connected component labels of the unwrapped phase",
fillvalue=0,
attrs=dict(units="unitless"),
)

@classmethod
def temporal_coherence(cls):
"""Return container of temporal coherence specific information."""
return cls(
name="temporal_coherence",
description="Temporal coherence of phase inversion",
fillvalue=np.nan,
attrs=dict(units="unitless"),
)

@classmethod
def interferometric_correlation(cls):
"""Return container of interferometric correlation specific information."""
return cls(
name="interferometric_correlation",
description=(
"Estimate of interferometric correlation derived from"
" multilooked interferogram."
),
fillvalue=np.nan,
attrs=dict(units="unitless"),
)

@classmethod
def persistent_scatterer_mask(cls):
"""Return container of persistent scatterer mask specific information."""
return cls(
name="persistent_scatterer_mask",
description=(
"Mask of persistent scatterers downsampled to the multilooked"
" output grid."
),
fillvalue=255,
attrs=dict(units="unitless"),
)


@dataclass(frozen=True)
class DispProductsInfo:
"""Container for instantiated displacement product dataset info containers."""

unwrapped_phase: DispProductInfo = DispProductInfo.unwrapped_phase()
connected_component_labels: (
DispProductInfo
) = DispProductInfo.connected_component_labels()
temporal_coherence: DispProductInfo = DispProductInfo.temporal_coherence()
interferometric_correlation: (
DispProductInfo
) = DispProductInfo.interferometric_correlation()
persistent_scatterer_mask: (
DispProductInfo
) = DispProductInfo.persistent_scatterer_mask()

def as_list(self):
"""Return all displacement dataset info containers as a list."""
return [getattr(self, field.name) for field in fields(self)]

def product_names(self):
"""Return all displacement dataset names as a list."""
return [field.name for field in fields(self)]


DISP_PRODUCTS_INFO = DispProductsInfo().as_list()
DISP_PRODUCT_NAMES = DispProductsInfo().product_names()

0 comments on commit 50b7893

Please sign in to comment.