diff --git a/.github/workflows/reusable-DeployStack-SearchAPI.yml b/.github/workflows/reusable-DeployStack-SearchAPI.yml index b98ad650..9f2bd9a8 100644 --- a/.github/workflows/reusable-DeployStack-SearchAPI.yml +++ b/.github/workflows/reusable-DeployStack-SearchAPI.yml @@ -101,7 +101,6 @@ jobs: run: | sudo apt-get update sudo apt-get install libgdal-dev - export SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True python3 -m pip install --no-cache-dir --upgrade pip python3 -m pip install --no-cache-dir wheel Cython python3 -m pip install -r requirements.txt --use-deprecated=legacy-resolver diff --git a/Dockerfile b/Dockerfile index f822390d..9b6f5d86 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,6 @@ WORKDIR "${LAMBDA_TASK_ROOT}/Discovery-SearchAPI" COPY requirements.txt . RUN mkdir "${LAMBDA_TASK_ROOT}/python-packages" ENV PYTHONPATH "${PYTHONPATH}:${LAMBDA_TASK_ROOT}/python-packages" -ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True RUN python3 -m pip install --no-cache-dir -r requirements.txt --target "${LAMBDA_TASK_ROOT}/python-packages" ## Copy required files (Already inside Discovery-SearchAPI dir): diff --git a/SearchAPI/Baseline/Calc.py b/SearchAPI/Baseline/Calc.py index edc10550..78d4cc7e 100644 --- a/SearchAPI/Baseline/Calc.py +++ b/SearchAPI/Baseline/Calc.py @@ -1,7 +1,6 @@ from math import sqrt, cos, sin, radians import numpy as np -import dateparser - +import ciso8601 # WGS84 constants a = 6378137 f = pow((1.0 - 1 / 298.257224), 2) @@ -15,17 +14,17 @@ def calculate_perpendicular_baselines(reference, stack): product['noStateVectors'] = True continue - asc_node_time = dateparser.parse(product['ascendingNodeTime']).timestamp() + asc_node_time = ciso8601.parse_datetime(product['ascendingNodeTime']).timestamp() - start = dateparser.parse(product['startTime']).timestamp() - end = dateparser.parse(product['stopTime']).timestamp() + start = ciso8601.parse_datetime(product['startTime']).timestamp() + end = ciso8601.parse_datetime(product['stopTime']).timestamp() center = start + ((end - start) / 2) product['relative_start_time'] = start - asc_node_time product['relative_center_time'] = center - asc_node_time product['relative_end_time'] = end - asc_node_time - t_pre = dateparser.parse(product['sv_t_pos_pre']).timestamp() - t_post = dateparser.parse(product['sv_t_pos_post']).timestamp() + t_pre = ciso8601.parse_datetime(product['sv_t_pos_pre']).timestamp() + t_post = ciso8601.parse_datetime(product['sv_t_pos_post']).timestamp() product['relative_sv_pre_time'] = t_pre - asc_node_time product['relative_sv_post_time'] = t_post - asc_node_time diff --git a/SearchAPI/Baseline/Stack.py b/SearchAPI/Baseline/Stack.py index 8d0d7559..0f20ab89 100644 --- a/SearchAPI/Baseline/Stack.py +++ b/SearchAPI/Baseline/Stack.py @@ -1,4 +1,4 @@ -import dateparser +import ciso8601 from SearchAPI.CMR.Translate import translate_params, input_fixer from SearchAPI.CMR.Query import CMRQuery from .Calc import calculate_perpendicular_baselines @@ -178,13 +178,13 @@ def get_default_product_type(reference): def calculate_temporal_baselines(reference, stack): for product in stack: if product['granuleName'] == reference: - reference_start = dateparser.parse(product['startTime']) + reference_start = ciso8601.parse_datetime(product['startTime']) break for product in stack: if product['granuleName'] == reference: product['temporalBaseline'] = 0 else: - start = dateparser.parse(product['startTime']) + start = ciso8601.parse_datetime(product['startTime']) product['temporalBaseline'] = (start.date() - reference_start.date()).days return stack diff --git a/SearchAPI/CMR/Output/geojson.py b/SearchAPI/CMR/Output/geojson.py index 8178162f..9a7d71bf 100644 --- a/SearchAPI/CMR/Output/geojson.py +++ b/SearchAPI/CMR/Output/geojson.py @@ -58,13 +58,21 @@ def getItem(self, p): except TypeError: pass + if p.get('absoluteOrbit') is not None and len(p.get('absoluteOrbit')): + p['absoluteOrbit'] = p['absoluteOrbit'][0] + + coordinates = [] + + if p.get('shape') is not None: + coordinates = [ + [[float(c['lon']), float(c['lat'])] for c in p.get('shape')] + ] + result = { 'type': 'Feature', 'geometry': { 'type': 'Polygon', - 'coordinates': [ - [[float(c['lon']), float(c['lat'])] for c in p['shape']] - ] + 'coordinates': coordinates }, 'properties': { 'beamModeType': p['beamModeType'], @@ -82,7 +90,7 @@ def getItem(self, p): 'insarStackId': p['insarGrouping'], 'md5sum': p['md5sum'], 'offNadirAngle': p['offNadirAngle'], - 'orbit': p['absoluteOrbit'][0], + 'orbit': p['absoluteOrbit'], 'pathNumber': p['relativeOrbit'], 'platform': p['platform'], 'pointingAngle': p['pointingAngle'], diff --git a/SearchAPI/CMR/Output/jsonlite.py b/SearchAPI/CMR/Output/jsonlite.py index 6fabc9db..9183f8ac 100644 --- a/SearchAPI/CMR/Output/jsonlite.py +++ b/SearchAPI/CMR/Output/jsonlite.py @@ -42,7 +42,9 @@ def req_fields_jsonlite(): 'subswath', 'pgeVersion', 'operaBurstID', - 'additionalUrls' + 'additionalUrls', + 's3Urls', + 'ariaVersion', ] return fields @@ -181,7 +183,21 @@ def getItem(self, p): if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'): result['opera'] = { 'operaBurstID': p.get('operaBurstID'), - 'additionalUrls': p.get('additionalUrls') + 'additionalUrls': p.get('additionalUrls'), } + if p.get('validityStartDate'): + result['opera']['validityStartDate'] = p.get('validityStartDate') + + if p.get('platform') == 'NISAR': + result['nisar'] = { + 'additionalUrls': p.get('additionalUrls', []), + 's3Urls': p.get('s3Urls', []) + } + + + if p.get('ariaVersion') is not None: + granule_name = p.get('granuleName') + if granule_name is not None and 'gunw' in granule_name.lower(): + result['ariaVersion'] = p.get('ariaVersion') return result diff --git a/SearchAPI/CMR/Output/jsonlite2.py b/SearchAPI/CMR/Output/jsonlite2.py index b403e566..397f49c0 100644 --- a/SearchAPI/CMR/Output/jsonlite2.py +++ b/SearchAPI/CMR/Output/jsonlite2.py @@ -61,4 +61,10 @@ def getItem(self, p): if p.get('opera') is not None: result['s1o'] = p['opera'] + if p.get('nisar') is not None: + result['nsr'] = p['nisar'] + + if p.get('ariaVersion') is not None: + result['ariav'] = p.get('ariaVersion') + return result diff --git a/SearchAPI/CMR/Query.py b/SearchAPI/CMR/Query.py index 63d1d6bd..77b3f146 100644 --- a/SearchAPI/CMR/Query.py +++ b/SearchAPI/CMR/Query.py @@ -114,7 +114,7 @@ def chunk_list(source_list, n): if chunk_type in params: params[chunk_type] = chunk_list(list(set(params[chunk_type])), 500) # distinct and split - list_param_names = ['platform', 'collections'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'collections', 'shortname'] # these parameters will dodge the subquery system for k, v in params.items(): if k in list_param_names: diff --git a/SearchAPI/CMR/SubQuery.py b/SearchAPI/CMR/SubQuery.py index 405e61ce..00087a00 100644 --- a/SearchAPI/CMR/SubQuery.py +++ b/SearchAPI/CMR/SubQuery.py @@ -7,7 +7,7 @@ from flask import request from SearchAPI.asf_env import get_config -from SearchAPI.CMR.Translate import parse_cmr_response +from SearchAPI.CMR.Translate import parse_cmr_response, platform_datasets from SearchAPI.CMR.Exceptions import CMRError import boto3 @@ -25,6 +25,9 @@ def __init__(self, req_fields, params, extra_params): self.headers = {} token = request.args.get("cmr_token") + if token is None: + token = request.form.get('cmr_token') + if token != None: self.headers['Authorization'] = f'Bearer {token}' @@ -58,9 +61,15 @@ def combine_params(self, params, extra_params): def should_use_asf_frame(self): asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] - + asf_frame_datasets = ['SENTINEL-1', 'OPERA-S1', 'SLC-BURST', 'ALOS PALSAR', 'ALOS AVNIR-2'] + + asf_frame_collections = [] + for dataset in asf_frame_datasets: + asf_frame_collections.extend(platform_datasets.get(dataset)) + return any([ - p[0] == 'platform[]' and p[1] in asf_frame_platforms + p[0] == 'platform[]' and p[1] in asf_frame_platforms + or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections for p in self.params ]) @@ -167,7 +176,7 @@ def get_page(self, session): query_duration = perf_counter() - q_start logging.debug(f'CMR query time: {query_duration}') - self.log_subquery_time({'time': query_duration, 'status': response.status_code}) + # self.log_subquery_time({'time': query_duration, 'status': response.status_code}) if query_duration > 10: self.log_slow_cmr_response(session, response, query_duration) diff --git a/SearchAPI/CMR/Translate/datasets.py b/SearchAPI/CMR/Translate/datasets.py index fef03061..7b9da157 100644 --- a/SearchAPI/CMR/Translate/datasets.py +++ b/SearchAPI/CMR/Translate/datasets.py @@ -36,7 +36,6 @@ "C1327985644-ASF", "C1327985571-ASF", "C1327985674-ASF", - "C2450786986-ASF", "C1205428742-ASF", "C1216244348-ASF", "C1212201032-ASF", @@ -71,7 +70,6 @@ "C1216244596-ASF", "C1216244588-ASF", "C1216244599-ASF", - "C1245953394-ASF", "C1234413245-ASFDEV", "C1234413263-ASFDEV", "C1234413229-ASFDEV", @@ -116,6 +114,8 @@ "OPERA-S1": [ "C2777443834-ASF", "C2777436413-ASF", + "C2795135174-ASF", # RTC-STATIC + "C2795135668-ASF", # CSLC-STATIC "C1258354200-ASF", # maybe extra? "C1259982010-ASF", "C1257995185-ASF", @@ -125,6 +125,12 @@ "C1257995186-ASF", "C1259974840-ASF", ], + "OPERA-S1-CALVAL": [ + "C1260721945-ASF", # CSLC + "C2803501758-ASF", + "C1260721853-ASF", # RTC + "C2803501097-ASF", + ], "SLC-BURST": ["C2709161906-ASF", "C1257024016-ASF"], "ALOS PALSAR": [ "C1206487504-ASF", @@ -199,6 +205,8 @@ "C1225776657-ASF", "C1225776658-ASF", "C1225776659-ASF", + "C2859376221-ASF", + "C1261881077-ASF", ], "SMAP": [ "C1243122884-ASF", @@ -362,4 +370,129 @@ "C1206752770-ASF", "C1206144699-ASF", ], + # TODO: add check for item['umm']['SpatialExtent']['GranuleSpatialRepresentation'] == 'NO_SPATIAL' + "NISAR": [ # UAT ASFDEV + "C1261815181-ASFDEV", + "C1261832381-ASFDEV", + "C1256533420-ASFDEV", + "C1261813453-ASFDEV", + "C1261832466-ASFDEV", + "C1256524081-ASFDEV", + "C1261815274-ASFDEV", + "C1261832497-ASFDEV", + "C1256358262-ASFDEV", + "C1261815276-ASFDEV", + "C1261832632-ASFDEV", + "C1256358463-ASFDEV", + "C1261813489-ASFDEV", + "C1261832868-ASFDEV", + "C1256363301-ASFDEV", + "C1261819086-ASFDEV", + "C1261832940-ASFDEV", + "C1256381769-ASFDEV", + "C1261819098-ASFDEV", + "C1261832990-ASFDEV", + "C1256420738-ASFDEV", + "C1261819110-ASFDEV", + "C1261832993-ASFDEV", + "C1256411631-ASFDEV", + "C1261819167-ASFDEV", + "C1261833024-ASFDEV", + "C1256413628-ASFDEV", + "C1261819168-ASFDEV", + "C1261833025-ASFDEV", + "C1256432264-ASFDEV", + "C1261819211-ASFDEV", + "C1261833026-ASFDEV", + "C1256477304-ASFDEV", + "C1261819233-ASFDEV", + "C1261833027-ASFDEV", + "C1256479237-ASFDEV", + "C1261819245-ASFDEV", + "C1261833050-ASFDEV", + "C1256568692-ASFDEV", + "C1262134528-ASFDEV", + # UAT + "C1261815288-ASF", + "C1261832657-ASF", + "C1257349121-ASF", + "C1261815147-ASF", + "C1261832658-ASF", + "C1257349120-ASF", + "C1261815289-ASF", + "C1261832659-ASF", + "C1257349115-ASF", + "C1261815301-ASF", + "C1261832671-ASF", + "C1257349114-ASF", + "C1261815148-ASF", + "C1261833052-ASF", + "C1257349109-ASF", + "C1261819120-ASF", + "C1261833063-ASF", + "C1257349108-ASF", + "C1261819121-ASF", + "C1261833064-ASF", + "C1257349107-ASF", + "C1261819145-ASF", + "C1261833076-ASF", + "C1257349103-ASF", + "C1261819258-ASF", + "C1261833127-ASF", + "C1257349102-ASF", + "C1261819270-ASF", + "C1261846741-ASF", + "C1257349096-ASF", + "C1261819275-ASF", + "C1261846880-ASF", + "C1257349095-ASF", + "C1261819281-ASF", + "C1261846994-ASF", + "C1257349094-ASF", + "C1261819282-ASF", + "C1261847095-ASF", + "C1257349093-ASF", + "C1262135006-ASF", + # PROD + "C2850220296-ASF", + "C2853068083-ASF", + "C2727902012-ASF", + "C2850223384-ASF", + "C2853086824-ASF", + "C2727901263-ASF", + "C2850224301-ASF", + "C2853089814-ASF", + "C2727901639-ASF", + "C2850225137-ASF", + "C2853091612-ASF", + "C2727901523-ASF", + "C2850225585-ASF", + "C2853145197-ASF", + "C2727900439-ASF", + "C2850234202-ASF", + "C2853147928-ASF", + "C2723110181-ASF", + "C2850235455-ASF", + "C2853153429-ASF", + "C2727900827-ASF", + "C2850237619-ASF", + "C2853156054-ASF", + "C2727900080-ASF", + "C2850259510-ASF", + "C2854332392-ASF", + "C2727896667-ASF", + "C2850261892-ASF", + "C2854335566-ASF", + "C2727897718-ASF", + "C2850262927-ASF", + "C2854338529-ASF", + "C2727896018-ASF", + "C2850263910-ASF", + "C2854341702-ASF", + "C2727896460-ASF", + "C2850265000-ASF", + "C2854344945-ASF", + "C2727894546-ASF", + "C2874824964-ASF" + ], } diff --git a/SearchAPI/CMR/Translate/fields.py b/SearchAPI/CMR/Translate/fields.py index b9f5bebe..273c2eea 100644 --- a/SearchAPI/CMR/Translate/fields.py +++ b/SearchAPI/CMR/Translate/fields.py @@ -61,6 +61,7 @@ def get_field_paths(): 'track': attr_path('PATH_NUMBER'), 'pgeVersion': "./PGEVersionClass/PGEVersion", 'additionalUrls': "./OnlineAccessURLs", + 's3Urls': "./OnlineAccessURLs", # BURST FIELDS 'absoluteBurstID': attr_path('BURST_ID_ABSOLUTE'), @@ -73,6 +74,8 @@ def get_field_paths(): 'subswath': attr_path('SUBSWATH_NAME'), # OPERA RTC FIELDS - 'operaBurstID': attr_path('OPERA_BURST_ID'), + 'operaBurstID': attr_path('OPERA_BURST_ID'), + + 'ariaVersion': attr_path('VERSION'), } return paths diff --git a/SearchAPI/CMR/Translate/input_fixer.py b/SearchAPI/CMR/Translate/input_fixer.py index 0a751532..602c6b1e 100644 --- a/SearchAPI/CMR/Translate/input_fixer.py +++ b/SearchAPI/CMR/Translate/input_fixer.py @@ -108,12 +108,14 @@ def input_fixer(params, is_prod: bool = False, provider: str = "ASF"): if any_processing_level: fixed_params['collections'] = collection_list - elif k == 'datasets': + elif k == 'dataset': fixed_params['collections'] = [] for dataset in params[k]: - logging.warn(dataset) - logging.warn(platform_datasets.get(dataset)) - fixed_params['collections'].extend(platform_datasets.get(dataset)) + if platform_datasets.get(dataset): + fixed_params['collections'].extend(platform_datasets.get(dataset)) + else: + raise ValueError(f'Could not find dataset named "{dataset}" provided for dataset keyword.') + logging.warn(fixed_params) elif k == 'beammode': beammap = { diff --git a/SearchAPI/CMR/Translate/input_map.py b/SearchAPI/CMR/Translate/input_map.py index 4b4a50e9..fc879799 100644 --- a/SearchAPI/CMR/Translate/input_map.py +++ b/SearchAPI/CMR/Translate/input_map.py @@ -56,8 +56,9 @@ def input_map(): 'relativeburstid': ['attribute[]', 'int,BURST_ID_RELATIVE,{0}', parse_int_list], 'absoluteburstid': ['attribute[]', 'int,BURST_ID_ABSOLUTE,{0}', parse_int_list], 'fullburstid': ['attribute[]', 'string,BURST_ID_FULL,{0}', parse_string_list], - 'operaburstid': ['attribute[]', 'string,OPERA_BURST_ID,{0}', parse_string_list], - 'datasets': [None, '{0}', parse_string_list] + 'operaburstid': ['attribute[]', 'string,OPERA_BURST_ID,{0}', parse_string_list], + 'dataset': [None, '{0}', parse_string_list], + 'shortname': ['shortName', '{0}', parse_string_list] } return parameter_map diff --git a/SearchAPI/CMR/Translate/parse_cmr_response.py b/SearchAPI/CMR/Translate/parse_cmr_response.py index 1a4ec8db..194fbb59 100644 --- a/SearchAPI/CMR/Translate/parse_cmr_response.py +++ b/SearchAPI/CMR/Translate/parse_cmr_response.py @@ -2,7 +2,7 @@ from defusedxml.lxml import fromstring import datetime from .fields import get_field_paths, attr_path - +import re def parse_cmr_response(r, req_fields): """ @@ -205,13 +205,46 @@ def float_or_none(a): result['downloadUrl'] = urls[0] result['fileName'] = result['granuleName'] + '.' + urls[0].split('.')[-1] + + + def get_all_urls(): + accessPath = './OnlineAccessURLs/OnlineAccessURL/URL' + resourcesPath = './OnlineResources/OnlineResource/URL' + + access_urls = get_all_vals(accessPath) + if access_urls is None: + access_urls = [] + + resource_urls = get_all_vals(resourcesPath) + if resource_urls is None: + resource_urls = [] + + return list(set([*access_urls, *resource_urls])) + + def get_http_urls(): + return [url for url in get_all_urls() if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] + + def get_s3_urls(): + return [url for url in get_all_urls() if not url.endswith('.md5') and (url.startswith('s3://') or 's3credentials' in url)] + if result.get('product_file_id', '').startswith('OPERA'): result['beamMode'] = get_val(attr_path('BEAM_MODE')) - accessUrls = [url for url in get_all_vals('./OnlineAccessURLs/OnlineAccessURL/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] - OnlineResources = [url for url in get_all_vals('./OnlineResources/OnlineResource/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] - result['additionalUrls'] = list(set([*accessUrls, *OnlineResources])) + result['additionalUrls'] = get_http_urls() result['configurationName'] = "Interferometric Wide. 250 km swath, 5 m x 20 m spatial resolution and burst synchronization for interferometry. IW is considered to be the standard mode over land masses." - result['browse'] = [url for url in get_all_vals('./AssociatedBrowseImageUrls/ProviderBrowseUrl/URL') if not url.startswith('s3://')] + + if (providerbrowseUrls := get_all_vals('./AssociatedBrowseImageUrls/ProviderBrowseUrl/URL')): + result['browse'] = [url for url in providerbrowseUrls if not url.startswith('s3://')] + + if 'STATIC' in result['processingLevel']: + result['validityStartDate'] = get_val('./Temporal/SingleDateTime') + elif result.get('product_file_id', '').startswith('S1-GUNW') and result.get('ariaVersion') is None: + version_unformatted = result.get('granuleName').split('v')[-1] + result['ariaVersion'] = re.sub(r'[^0-9\.]', '', version_unformatted.replace("_", '.')) + + if result.get('platform', '') == 'NISAR': + result['additionalUrls'] = get_http_urls() + result['s3Urls'] = get_s3_urls() + return result @@ -232,16 +265,19 @@ def wkt_from_gpolygon(gpoly): # Close the shape if needed shapes[-1].append(shapes[-1][0]) - longest = shapes[0] - for shape in shapes: - if len(shape) > len(longest): - longest = shape + if len(shapes): + longest = shapes[0] + for shape in shapes: + if len(shape) > len(longest): + longest = shape - wkt_shape = 'POLYGON(({0}))'.format( - ','.join(['{0} {1}'.format(x['lon'], x['lat']) for x in longest]) - ) + wkt_shape = 'POLYGON(({0}))'.format( + ','.join(['{0} {1}'.format(x['lon'], x['lat']) for x in longest]) + ) - return longest, wkt_shape + return longest, wkt_shape + + return '', '' def shape_not_closed(shapes): diff --git a/requirements.txt b/requirements.txt index ad7ba162..8c2868ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,20 +3,20 @@ argcomplete==1.12.3 asn1crypto==1.4.0 atomicwrites==1.4.0 attrs==21.2.0 -backports.zoneinfo==0.2.1;python_version<"3.9" # https://stackoverflow.com/questions/71712258/error-could-not-build-wheels-for-backports-zoneinfo-which-is-required-to-insta +blinker==1.7.0 boto3==1.19.0 botocore==1.22.0 Brotli==1.0.9 -certifi==2021.10.8 +certifi==2023.7.22 cffi==1.15.0 cfn-flip==1.3.0 chardet==4.0.0 charset-normalizer==2.0.7 -click==7.1.2 +ciso8601==2.3.1 +click==8.1.7 click-plugins==1.1.1 cligj==0.7.2 coverage==6.0.2 -# cryptography==3.4.7 # Version 35.0.0 breaks zappa=0.52.0 deployments. (yes, their versions went from 3.4.7, 3.4.8, then 35.0.0) dateparser==1.1.0 DateTime==4.3 defusedxml==0.7.1 @@ -24,58 +24,58 @@ Deprecated==1.2.13 docutils==0.17.1 Dumper==1.2.0 durationpy==0.5 +exceptiongroup==1.2.1 execnet==1.9.0 -Fiona==1.8.20 -Flask==2.0.2 +fiona==1.9.6 +Flask==2.3.2 Flask-Compress==1.10.1 Flask-Cors==3.0.10 flask-lambda-python36==0.1.0 flask-talisman==0.8.1 -future==0.18.2 geojson==2.5.0 geomet==0.3.0 +geopandas==0.10.0 gitdb==4.0.7 gitdb2==4.0.2 -GitPython==3.1.24 -gunicorn==20.1.0 +GitPython==3.1.41 +gunicorn==22.0.0 hjson==3.0.2 hypothesis==6.37.0 idna==3.3 importlib-metadata==4.8.1 -iniconfig==1.1.1 -itsdangerous==2.0.1 -Jinja2==3.0.2 +# iniconfig==1.2.1 +itsdangerous==2.2.0 +Jinja2==3.1.3 jmespath==0.10.0 -joblib==1.1.0 +joblib==1.2.0 kappa==0.6.0 kml2geojson==4.0.2 lambda-packages==0.20.0 libpagure==0.22 -lxml==4.7.1 -MarkupSafe==2.0.1 +lxml==5.2.1 +MarkupSafe==2.1.5 more-itertools==8.10.0 munch==2.5.0 -packaging==21.0 +numpy==1.22.4 +packaging==24.0 pandas==1.3.4 pathlib2==2.3.6 pep517==0.12.0 pexpect==4.8.0 pip-tools==6.4.0 placebo==0.10.0 -pluggy==1.0.0 +pluggy==1.5.0 ptyprocess==0.7.0 -py==1.10.0 pycparser==2.20 PyGithub==1.55 -PyJWT==2.3.0 +PyJWT==2.4.0 pykml==0.2.0 -# PyNaCl==1.4.0 # breaks zappa 0.52.0 (didn't check earlier versions, not sure if we need this) -# pyOpenSSL==21.0.0 # (Requires cryptography, which makes zappa throw) +# PyNaCl==1.5.0 pyparsing==2.4.7 pyproj==3.6.0 pyshp==2.1.3 -pytest==6.2.5 -pytest-automation==1.1.2 +pytest==8.1.1 +pytest-automation==3.0.0 pytest-cov==3.0.0 pytest-forked==1.3.0 pytest-xdist==2.4.0 @@ -84,28 +84,31 @@ python-gitlab==2.10.1 python-slugify==5.0.2 pytz==2021.3 pytz-deprecation-shim==0.1.0.post0 -PyYAML==6.0 +PyYAML==6.0.1 regex==2021.10.8 requests==2.26.0 requests-toolbelt==0.9.1 responses==0.18.0 s3transfer==0.5.0 scandir==1.10.0 -scikit-learn==1.1.3 # WARNING: 0.24.1 breaks ShorelineMask26 test +scikit-learn==1.1.3 +scipy==1.13.0 serverless-wsgi==3.0.0 Shapely==1.7.1 six==1.16.0 +# sklearn==0.0.post5 smmap==4.0.0 +sortedcontainers==2.4.0 text-unidecode==1.3 +threadpoolctl==3.4.0 toml==0.10.2 -tomli==1.2.1 +tomli==2.0.1 typing-extensions==3.10.0.2 tzdata==2021.4 -tzlocal==2.0.0 # tzlocal.get_localzone() changed it's return type after this (No 'localize' attr) +tzlocal==2.0.0 urllib3==1.26.7 -Werkzeug==2.0.2 -WKTUtils==1.1.6 -wrapt==1.13.2 +Werkzeug==2.3.3 +WKTUtils==2.0.0 +wrapt==1.16.0 zipp==3.6.0 -zope.interface==4.7.2 -numpy==1.21.3 +zope.interface==4.7.2 \ No newline at end of file diff --git a/yml_tests/test_URLs.yml b/yml_tests/test_URLs.yml index fccfad74..a716d802 100644 --- a/yml_tests/test_URLs.yml +++ b/yml_tests/test_URLs.yml @@ -1069,8 +1069,8 @@ tests: - platform SB: platform: SB - start: 1+year+ago - end: now + start: "2016-01-01T00:00:00Z" + end: "2017-01-02T00:00:00Z" maxResults: 200 output: csv @@ -2472,8 +2472,8 @@ tests: expected file: csv expected code: 200 -- start yesterday count: - start: yesterday +- start 3 days ago count: + start: 3 days ago output: count expected file: count @@ -2486,8 +2486,8 @@ tests: expected file: count expected code: 200 -- start 1dayago count: - start: 1+day+ago +- start 3daysago count: + start: 3+days+ago output: count expected file: count