Skip to content

Commit

Permalink
Merge branch 'test' into fix-frame-precision
Browse files Browse the repository at this point in the history
  • Loading branch information
tylercchase authored Jun 13, 2024
2 parents b7bc781 + 91441e3 commit 32b849d
Show file tree
Hide file tree
Showing 16 changed files with 297 additions and 83 deletions.
1 change: 0 additions & 1 deletion .github/workflows/reusable-DeployStack-SearchAPI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,6 @@ jobs:
run: |
sudo apt-get update
sudo apt-get install libgdal-dev
export SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True
python3 -m pip install --no-cache-dir --upgrade pip
python3 -m pip install --no-cache-dir wheel Cython
python3 -m pip install -r requirements.txt --use-deprecated=legacy-resolver
Expand Down
1 change: 0 additions & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@ WORKDIR "${LAMBDA_TASK_ROOT}/Discovery-SearchAPI"
COPY requirements.txt .
RUN mkdir "${LAMBDA_TASK_ROOT}/python-packages"
ENV PYTHONPATH "${PYTHONPATH}:${LAMBDA_TASK_ROOT}/python-packages"
ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True
RUN python3 -m pip install --no-cache-dir -r requirements.txt --target "${LAMBDA_TASK_ROOT}/python-packages"

## Copy required files (Already inside Discovery-SearchAPI dir):
Expand Down
13 changes: 6 additions & 7 deletions SearchAPI/Baseline/Calc.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from math import sqrt, cos, sin, radians
import numpy as np
import dateparser

import ciso8601
# WGS84 constants
a = 6378137
f = pow((1.0 - 1 / 298.257224), 2)
Expand All @@ -15,17 +14,17 @@ def calculate_perpendicular_baselines(reference, stack):
product['noStateVectors'] = True
continue

asc_node_time = dateparser.parse(product['ascendingNodeTime']).timestamp()
asc_node_time = ciso8601.parse_datetime(product['ascendingNodeTime']).timestamp()

start = dateparser.parse(product['startTime']).timestamp()
end = dateparser.parse(product['stopTime']).timestamp()
start = ciso8601.parse_datetime(product['startTime']).timestamp()
end = ciso8601.parse_datetime(product['stopTime']).timestamp()
center = start + ((end - start) / 2)
product['relative_start_time'] = start - asc_node_time
product['relative_center_time'] = center - asc_node_time
product['relative_end_time'] = end - asc_node_time

t_pre = dateparser.parse(product['sv_t_pos_pre']).timestamp()
t_post = dateparser.parse(product['sv_t_pos_post']).timestamp()
t_pre = ciso8601.parse_datetime(product['sv_t_pos_pre']).timestamp()
t_post = ciso8601.parse_datetime(product['sv_t_pos_post']).timestamp()
product['relative_sv_pre_time'] = t_pre - asc_node_time
product['relative_sv_post_time'] = t_post - asc_node_time

Expand Down
6 changes: 3 additions & 3 deletions SearchAPI/Baseline/Stack.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import dateparser
import ciso8601
from SearchAPI.CMR.Translate import translate_params, input_fixer
from SearchAPI.CMR.Query import CMRQuery
from .Calc import calculate_perpendicular_baselines
Expand Down Expand Up @@ -178,13 +178,13 @@ def get_default_product_type(reference):
def calculate_temporal_baselines(reference, stack):
for product in stack:
if product['granuleName'] == reference:
reference_start = dateparser.parse(product['startTime'])
reference_start = ciso8601.parse_datetime(product['startTime'])
break
for product in stack:
if product['granuleName'] == reference:
product['temporalBaseline'] = 0
else:
start = dateparser.parse(product['startTime'])
start = ciso8601.parse_datetime(product['startTime'])
product['temporalBaseline'] = (start.date() - reference_start.date()).days
return stack

Expand Down
16 changes: 12 additions & 4 deletions SearchAPI/CMR/Output/geojson.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,21 @@ def getItem(self, p):
except TypeError:
pass

if p.get('absoluteOrbit') is not None and len(p.get('absoluteOrbit')):
p['absoluteOrbit'] = p['absoluteOrbit'][0]

coordinates = []

if p.get('shape') is not None:
coordinates = [
[[float(c['lon']), float(c['lat'])] for c in p.get('shape')]
]

result = {
'type': 'Feature',
'geometry': {
'type': 'Polygon',
'coordinates': [
[[float(c['lon']), float(c['lat'])] for c in p['shape']]
]
'coordinates': coordinates
},
'properties': {
'beamModeType': p['beamModeType'],
Expand All @@ -82,7 +90,7 @@ def getItem(self, p):
'insarStackId': p['insarGrouping'],
'md5sum': p['md5sum'],
'offNadirAngle': p['offNadirAngle'],
'orbit': p['absoluteOrbit'][0],
'orbit': p['absoluteOrbit'],
'pathNumber': p['relativeOrbit'],
'platform': p['platform'],
'pointingAngle': p['pointingAngle'],
Expand Down
20 changes: 18 additions & 2 deletions SearchAPI/CMR/Output/jsonlite.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ def req_fields_jsonlite():
'subswath',
'pgeVersion',
'operaBurstID',
'additionalUrls'
'additionalUrls',
's3Urls',
'ariaVersion',
]
return fields

Expand Down Expand Up @@ -181,7 +183,21 @@ def getItem(self, p):
if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'):
result['opera'] = {
'operaBurstID': p.get('operaBurstID'),
'additionalUrls': p.get('additionalUrls')
'additionalUrls': p.get('additionalUrls'),
}
if p.get('validityStartDate'):
result['opera']['validityStartDate'] = p.get('validityStartDate')

if p.get('platform') == 'NISAR':
result['nisar'] = {
'additionalUrls': p.get('additionalUrls', []),
's3Urls': p.get('s3Urls', [])
}


if p.get('ariaVersion') is not None:
granule_name = p.get('granuleName')
if granule_name is not None and 'gunw' in granule_name.lower():
result['ariaVersion'] = p.get('ariaVersion')

return result
6 changes: 6 additions & 0 deletions SearchAPI/CMR/Output/jsonlite2.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,4 +61,10 @@ def getItem(self, p):
if p.get('opera') is not None:
result['s1o'] = p['opera']

if p.get('nisar') is not None:
result['nsr'] = p['nisar']

if p.get('ariaVersion') is not None:
result['ariav'] = p.get('ariaVersion')

return result
2 changes: 1 addition & 1 deletion SearchAPI/CMR/Query.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def chunk_list(source_list, n):
if chunk_type in params:
params[chunk_type] = chunk_list(list(set(params[chunk_type])), 500) # distinct and split

list_param_names = ['platform', 'collections'] # these parameters will dodge the subquery system
list_param_names = ['platform', 'collections', 'shortname'] # these parameters will dodge the subquery system

for k, v in params.items():
if k in list_param_names:
Expand Down
17 changes: 13 additions & 4 deletions SearchAPI/CMR/SubQuery.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from flask import request

from SearchAPI.asf_env import get_config
from SearchAPI.CMR.Translate import parse_cmr_response
from SearchAPI.CMR.Translate import parse_cmr_response, platform_datasets
from SearchAPI.CMR.Exceptions import CMRError

import boto3
Expand All @@ -25,6 +25,9 @@ def __init__(self, req_fields, params, extra_params):
self.headers = {}

token = request.args.get("cmr_token")
if token is None:
token = request.form.get('cmr_token')

if token != None:
self.headers['Authorization'] = f'Bearer {token}'

Expand Down Expand Up @@ -58,9 +61,15 @@ def combine_params(self, params, extra_params):

def should_use_asf_frame(self):
asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS']

asf_frame_datasets = ['SENTINEL-1', 'OPERA-S1', 'SLC-BURST', 'ALOS PALSAR', 'ALOS AVNIR-2']

asf_frame_collections = []
for dataset in asf_frame_datasets:
asf_frame_collections.extend(platform_datasets.get(dataset))

return any([
p[0] == 'platform[]' and p[1] in asf_frame_platforms
p[0] == 'platform[]' and p[1] in asf_frame_platforms
or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections
for p in self.params
])

Expand Down Expand Up @@ -167,7 +176,7 @@ def get_page(self, session):
query_duration = perf_counter() - q_start
logging.debug(f'CMR query time: {query_duration}')

self.log_subquery_time({'time': query_duration, 'status': response.status_code})
# self.log_subquery_time({'time': query_duration, 'status': response.status_code})

if query_duration > 10:
self.log_slow_cmr_response(session, response, query_duration)
Expand Down
137 changes: 135 additions & 2 deletions SearchAPI/CMR/Translate/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@
"C1327985644-ASF",
"C1327985571-ASF",
"C1327985674-ASF",
"C2450786986-ASF",
"C1205428742-ASF",
"C1216244348-ASF",
"C1212201032-ASF",
Expand Down Expand Up @@ -71,7 +70,6 @@
"C1216244596-ASF",
"C1216244588-ASF",
"C1216244599-ASF",
"C1245953394-ASF",
"C1234413245-ASFDEV",
"C1234413263-ASFDEV",
"C1234413229-ASFDEV",
Expand Down Expand Up @@ -116,6 +114,8 @@
"OPERA-S1": [
"C2777443834-ASF",
"C2777436413-ASF",
"C2795135174-ASF", # RTC-STATIC
"C2795135668-ASF", # CSLC-STATIC
"C1258354200-ASF", # maybe extra?
"C1259982010-ASF",
"C1257995185-ASF",
Expand All @@ -125,6 +125,12 @@
"C1257995186-ASF",
"C1259974840-ASF",
],
"OPERA-S1-CALVAL": [
"C1260721945-ASF", # CSLC
"C2803501758-ASF",
"C1260721853-ASF", # RTC
"C2803501097-ASF",
],
"SLC-BURST": ["C2709161906-ASF", "C1257024016-ASF"],
"ALOS PALSAR": [
"C1206487504-ASF",
Expand Down Expand Up @@ -199,6 +205,8 @@
"C1225776657-ASF",
"C1225776658-ASF",
"C1225776659-ASF",
"C2859376221-ASF",
"C1261881077-ASF",
],
"SMAP": [
"C1243122884-ASF",
Expand Down Expand Up @@ -362,4 +370,129 @@
"C1206752770-ASF",
"C1206144699-ASF",
],
# TODO: add check for item['umm']['SpatialExtent']['GranuleSpatialRepresentation'] == 'NO_SPATIAL'
"NISAR": [ # UAT ASFDEV
"C1261815181-ASFDEV",
"C1261832381-ASFDEV",
"C1256533420-ASFDEV",
"C1261813453-ASFDEV",
"C1261832466-ASFDEV",
"C1256524081-ASFDEV",
"C1261815274-ASFDEV",
"C1261832497-ASFDEV",
"C1256358262-ASFDEV",
"C1261815276-ASFDEV",
"C1261832632-ASFDEV",
"C1256358463-ASFDEV",
"C1261813489-ASFDEV",
"C1261832868-ASFDEV",
"C1256363301-ASFDEV",
"C1261819086-ASFDEV",
"C1261832940-ASFDEV",
"C1256381769-ASFDEV",
"C1261819098-ASFDEV",
"C1261832990-ASFDEV",
"C1256420738-ASFDEV",
"C1261819110-ASFDEV",
"C1261832993-ASFDEV",
"C1256411631-ASFDEV",
"C1261819167-ASFDEV",
"C1261833024-ASFDEV",
"C1256413628-ASFDEV",
"C1261819168-ASFDEV",
"C1261833025-ASFDEV",
"C1256432264-ASFDEV",
"C1261819211-ASFDEV",
"C1261833026-ASFDEV",
"C1256477304-ASFDEV",
"C1261819233-ASFDEV",
"C1261833027-ASFDEV",
"C1256479237-ASFDEV",
"C1261819245-ASFDEV",
"C1261833050-ASFDEV",
"C1256568692-ASFDEV",
"C1262134528-ASFDEV",
# UAT
"C1261815288-ASF",
"C1261832657-ASF",
"C1257349121-ASF",
"C1261815147-ASF",
"C1261832658-ASF",
"C1257349120-ASF",
"C1261815289-ASF",
"C1261832659-ASF",
"C1257349115-ASF",
"C1261815301-ASF",
"C1261832671-ASF",
"C1257349114-ASF",
"C1261815148-ASF",
"C1261833052-ASF",
"C1257349109-ASF",
"C1261819120-ASF",
"C1261833063-ASF",
"C1257349108-ASF",
"C1261819121-ASF",
"C1261833064-ASF",
"C1257349107-ASF",
"C1261819145-ASF",
"C1261833076-ASF",
"C1257349103-ASF",
"C1261819258-ASF",
"C1261833127-ASF",
"C1257349102-ASF",
"C1261819270-ASF",
"C1261846741-ASF",
"C1257349096-ASF",
"C1261819275-ASF",
"C1261846880-ASF",
"C1257349095-ASF",
"C1261819281-ASF",
"C1261846994-ASF",
"C1257349094-ASF",
"C1261819282-ASF",
"C1261847095-ASF",
"C1257349093-ASF",
"C1262135006-ASF",
# PROD
"C2850220296-ASF",
"C2853068083-ASF",
"C2727902012-ASF",
"C2850223384-ASF",
"C2853086824-ASF",
"C2727901263-ASF",
"C2850224301-ASF",
"C2853089814-ASF",
"C2727901639-ASF",
"C2850225137-ASF",
"C2853091612-ASF",
"C2727901523-ASF",
"C2850225585-ASF",
"C2853145197-ASF",
"C2727900439-ASF",
"C2850234202-ASF",
"C2853147928-ASF",
"C2723110181-ASF",
"C2850235455-ASF",
"C2853153429-ASF",
"C2727900827-ASF",
"C2850237619-ASF",
"C2853156054-ASF",
"C2727900080-ASF",
"C2850259510-ASF",
"C2854332392-ASF",
"C2727896667-ASF",
"C2850261892-ASF",
"C2854335566-ASF",
"C2727897718-ASF",
"C2850262927-ASF",
"C2854338529-ASF",
"C2727896018-ASF",
"C2850263910-ASF",
"C2854341702-ASF",
"C2727896460-ASF",
"C2850265000-ASF",
"C2854344945-ASF",
"C2727894546-ASF",
"C2874824964-ASF"
],
}
5 changes: 4 additions & 1 deletion SearchAPI/CMR/Translate/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ def get_field_paths():
'track': attr_path('PATH_NUMBER'),
'pgeVersion': "./PGEVersionClass/PGEVersion",
'additionalUrls': "./OnlineAccessURLs",
's3Urls': "./OnlineAccessURLs",

# BURST FIELDS
'absoluteBurstID': attr_path('BURST_ID_ABSOLUTE'),
Expand All @@ -73,6 +74,8 @@ def get_field_paths():
'subswath': attr_path('SUBSWATH_NAME'),

# OPERA RTC FIELDS
'operaBurstID': attr_path('OPERA_BURST_ID'),
'operaBurstID': attr_path('OPERA_BURST_ID'),

'ariaVersion': attr_path('VERSION'),
}
return paths
Loading

0 comments on commit 32b849d

Please sign in to comment.