Skip to content

Commit

Permalink
Add satellite names, number of granules, early/latest datetimes
Browse files Browse the repository at this point in the history
  • Loading branch information
scottstanie committed Dec 20, 2024
1 parent 266bec7 commit 4fa76e4
Showing 1 changed file with 60 additions and 6 deletions.
66 changes: 60 additions & 6 deletions src/disp_s1/product.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
get_dates,
get_radar_wavelength,
get_zero_doppler_time,
parse_filename,
)

from . import __version__ as disp_s1_version
Expand Down Expand Up @@ -170,9 +171,11 @@ def _get_start_end_cslcs(files):
logger.debug(f"Start, end files: {start}, {end}")
return start, end

reference_start, reference_end = _get_start_end_cslcs(reference_cslc_files)
reference_start_time = get_zero_doppler_time(reference_start, type_="start")
reference_end_time = get_zero_doppler_time(reference_end, type_="end")
reference_start_file, reference_end_file = _get_start_end_cslcs(
reference_cslc_files
)
reference_start_time = get_zero_doppler_time(reference_start_file, type_="start")
reference_end_time = get_zero_doppler_time(reference_end_file, type_="end")

secondary_start, secondary_end = _get_start_end_cslcs(secondary_cslc_files)
secondary_start_time = get_zero_doppler_time(secondary_start, type_="start")
Expand All @@ -188,7 +191,7 @@ def _get_start_end_cslcs(files):
try:
logger.info("Calculating perpendicular baselines subsampled by %s", subsample)
baseline_arr = compute_baselines(
reference_start,
reference_start_file,
secondary_start,
x=x,
y=y,
Expand All @@ -197,7 +200,8 @@ def _get_start_end_cslcs(files):
)
except Exception:
logger.error(
f"Failed to compute baselines for {reference_start}, {secondary_start}",
f"Failed to compute baselines for {reference_start_file},"
f" {secondary_start}",
exc_info=True,
)
baseline_arr = np.zeros((100, 100))
Expand Down Expand Up @@ -411,7 +415,7 @@ def _get_start_end_cslcs(files):
dolphin_config=dolphin_config,
)
copy_cslc_metadata_to_displacement(
reference_cslc_file=reference_start,
reference_cslc_file=reference_start_file,
secondary_cslc_file=secondary_start,
output_disp_file=output_name,
)
Expand Down Expand Up @@ -695,6 +699,56 @@ def _create_identification_group(
description="Number of input data granule used during processing.",
attrs={"units": "unitless"},
)
input_dts = sorted(
[get_dates(f)[0] for f in pge_runconfig.input_file_group.cslc_file_list]
)
parsed_files = [
parse_filename(f)
for f in pge_runconfig.input_file_group.cslc_file_list
if "compressed" not in str(f).lower()
]
input_sensors = {p.get("sensor") for p in parsed_files if p.get("sensor")}

# CEOS: Section 1.5
_create_dataset(
group=identification_group,
name="ceos_source_data_satellite_names",
dimensions=(),
data=",".join(input_sensors),
fillvalue=None,
description="Names of satellites included in input granules",
attrs={"units": "unitless"},
)
starting_date_str = input_dts[0].isoformat()
_create_dataset(
group=identification_group,
name="ceos_source_data_earliest_acquisition",
dimensions=(),
data=starting_date_str,
fillvalue=None,
description="Datetime of earliest input granule used during processing",
attrs={"units": "unitless"},
)
last_date_str = input_dts[-1].isoformat()
_create_dataset(
group=identification_group,
name="ceos_source_data_latest_acquisition",
dimensions=(),
data=last_date_str,
fillvalue=None,
description="Datetime of latest input granule used during processing",
attrs={"units": "unitless"},
)
_create_dataset(
group=identification_group,
name="ceos_number_of_input_granules",
dimensions=(),
data=len(pge_runconfig.input_file_group.cslc_file_list),
fillvalue=None,
description="Number of input data granule used during processing.",
attrs={"units": "unitless"},
)

# CEOS: Section 1.6.4 source acquisition parameters
_create_dataset(
group=identification_group,
Expand Down

0 comments on commit 4fa76e4

Please sign in to comment.