diff --git a/.github/workflows/reusable-DeployStack-SearchAPI.yml b/.github/workflows/reusable-DeployStack-SearchAPI.yml index b98ad650..9f2bd9a8 100644 --- a/.github/workflows/reusable-DeployStack-SearchAPI.yml +++ b/.github/workflows/reusable-DeployStack-SearchAPI.yml @@ -101,7 +101,6 @@ jobs: run: | sudo apt-get update sudo apt-get install libgdal-dev - export SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True python3 -m pip install --no-cache-dir --upgrade pip python3 -m pip install --no-cache-dir wheel Cython python3 -m pip install -r requirements.txt --use-deprecated=legacy-resolver diff --git a/Dockerfile b/Dockerfile index f822390d..9b6f5d86 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,6 @@ WORKDIR "${LAMBDA_TASK_ROOT}/Discovery-SearchAPI" COPY requirements.txt . RUN mkdir "${LAMBDA_TASK_ROOT}/python-packages" ENV PYTHONPATH "${PYTHONPATH}:${LAMBDA_TASK_ROOT}/python-packages" -ENV SKLEARN_ALLOW_DEPRECATED_SKLEARN_PACKAGE_INSTALL=True RUN python3 -m pip install --no-cache-dir -r requirements.txt --target "${LAMBDA_TASK_ROOT}/python-packages" ## Copy required files (Already inside Discovery-SearchAPI dir): diff --git a/SearchAPI/Baseline/Calc.py b/SearchAPI/Baseline/Calc.py index edc10550..78d4cc7e 100644 --- a/SearchAPI/Baseline/Calc.py +++ b/SearchAPI/Baseline/Calc.py @@ -1,7 +1,6 @@ from math import sqrt, cos, sin, radians import numpy as np -import dateparser - +import ciso8601 # WGS84 constants a = 6378137 f = pow((1.0 - 1 / 298.257224), 2) @@ -15,17 +14,17 @@ def calculate_perpendicular_baselines(reference, stack): product['noStateVectors'] = True continue - asc_node_time = dateparser.parse(product['ascendingNodeTime']).timestamp() + asc_node_time = ciso8601.parse_datetime(product['ascendingNodeTime']).timestamp() - start = dateparser.parse(product['startTime']).timestamp() - end = dateparser.parse(product['stopTime']).timestamp() + start = ciso8601.parse_datetime(product['startTime']).timestamp() + end = ciso8601.parse_datetime(product['stopTime']).timestamp() center = start + ((end - start) / 2) product['relative_start_time'] = start - asc_node_time product['relative_center_time'] = center - asc_node_time product['relative_end_time'] = end - asc_node_time - t_pre = dateparser.parse(product['sv_t_pos_pre']).timestamp() - t_post = dateparser.parse(product['sv_t_pos_post']).timestamp() + t_pre = ciso8601.parse_datetime(product['sv_t_pos_pre']).timestamp() + t_post = ciso8601.parse_datetime(product['sv_t_pos_post']).timestamp() product['relative_sv_pre_time'] = t_pre - asc_node_time product['relative_sv_post_time'] = t_post - asc_node_time diff --git a/SearchAPI/Baseline/Stack.py b/SearchAPI/Baseline/Stack.py index 8d0d7559..0f20ab89 100644 --- a/SearchAPI/Baseline/Stack.py +++ b/SearchAPI/Baseline/Stack.py @@ -1,4 +1,4 @@ -import dateparser +import ciso8601 from SearchAPI.CMR.Translate import translate_params, input_fixer from SearchAPI.CMR.Query import CMRQuery from .Calc import calculate_perpendicular_baselines @@ -178,13 +178,13 @@ def get_default_product_type(reference): def calculate_temporal_baselines(reference, stack): for product in stack: if product['granuleName'] == reference: - reference_start = dateparser.parse(product['startTime']) + reference_start = ciso8601.parse_datetime(product['startTime']) break for product in stack: if product['granuleName'] == reference: product['temporalBaseline'] = 0 else: - start = dateparser.parse(product['startTime']) + start = ciso8601.parse_datetime(product['startTime']) product['temporalBaseline'] = (start.date() - reference_start.date()).days return stack diff --git a/SearchAPI/CMR/Output/geojson.py b/SearchAPI/CMR/Output/geojson.py index 8178162f..9a7d71bf 100644 --- a/SearchAPI/CMR/Output/geojson.py +++ b/SearchAPI/CMR/Output/geojson.py @@ -58,13 +58,21 @@ def getItem(self, p): except TypeError: pass + if p.get('absoluteOrbit') is not None and len(p.get('absoluteOrbit')): + p['absoluteOrbit'] = p['absoluteOrbit'][0] + + coordinates = [] + + if p.get('shape') is not None: + coordinates = [ + [[float(c['lon']), float(c['lat'])] for c in p.get('shape')] + ] + result = { 'type': 'Feature', 'geometry': { 'type': 'Polygon', - 'coordinates': [ - [[float(c['lon']), float(c['lat'])] for c in p['shape']] - ] + 'coordinates': coordinates }, 'properties': { 'beamModeType': p['beamModeType'], @@ -82,7 +90,7 @@ def getItem(self, p): 'insarStackId': p['insarGrouping'], 'md5sum': p['md5sum'], 'offNadirAngle': p['offNadirAngle'], - 'orbit': p['absoluteOrbit'][0], + 'orbit': p['absoluteOrbit'], 'pathNumber': p['relativeOrbit'], 'platform': p['platform'], 'pointingAngle': p['pointingAngle'], diff --git a/SearchAPI/CMR/Output/jsonlite.py b/SearchAPI/CMR/Output/jsonlite.py index 6fabc9db..660440a7 100644 --- a/SearchAPI/CMR/Output/jsonlite.py +++ b/SearchAPI/CMR/Output/jsonlite.py @@ -42,7 +42,8 @@ def req_fields_jsonlite(): 'subswath', 'pgeVersion', 'operaBurstID', - 'additionalUrls' + 'additionalUrls', + 's3Urls' ] return fields @@ -181,7 +182,15 @@ def getItem(self, p): if p.get('operaBurstID') is not None or result['productID'].startswith('OPERA'): result['opera'] = { 'operaBurstID': p.get('operaBurstID'), - 'additionalUrls': p.get('additionalUrls') + 'additionalUrls': p.get('additionalUrls'), + } + if p.get('validityStartDate'): + result['opera']['validityStartDate'] = p.get('validityStartDate') + + if p.get('platform') == 'NISAR': + result['nisar'] = { + 'additionalUrls': p.get('additionalUrls', []), + 's3Urls': p.get('s3Urls', []) } return result diff --git a/SearchAPI/CMR/Output/jsonlite2.py b/SearchAPI/CMR/Output/jsonlite2.py index b403e566..5cdff707 100644 --- a/SearchAPI/CMR/Output/jsonlite2.py +++ b/SearchAPI/CMR/Output/jsonlite2.py @@ -61,4 +61,7 @@ def getItem(self, p): if p.get('opera') is not None: result['s1o'] = p['opera'] + if p.get('nisar') is not None: + result['nsr'] = p['nisar'] + return result diff --git a/SearchAPI/CMR/Query.py b/SearchAPI/CMR/Query.py index 63d1d6bd..77b3f146 100644 --- a/SearchAPI/CMR/Query.py +++ b/SearchAPI/CMR/Query.py @@ -114,7 +114,7 @@ def chunk_list(source_list, n): if chunk_type in params: params[chunk_type] = chunk_list(list(set(params[chunk_type])), 500) # distinct and split - list_param_names = ['platform', 'collections'] # these parameters will dodge the subquery system + list_param_names = ['platform', 'collections', 'shortname'] # these parameters will dodge the subquery system for k, v in params.items(): if k in list_param_names: diff --git a/SearchAPI/CMR/SubQuery.py b/SearchAPI/CMR/SubQuery.py index 405e61ce..00087a00 100644 --- a/SearchAPI/CMR/SubQuery.py +++ b/SearchAPI/CMR/SubQuery.py @@ -7,7 +7,7 @@ from flask import request from SearchAPI.asf_env import get_config -from SearchAPI.CMR.Translate import parse_cmr_response +from SearchAPI.CMR.Translate import parse_cmr_response, platform_datasets from SearchAPI.CMR.Exceptions import CMRError import boto3 @@ -25,6 +25,9 @@ def __init__(self, req_fields, params, extra_params): self.headers = {} token = request.args.get("cmr_token") + if token is None: + token = request.form.get('cmr_token') + if token != None: self.headers['Authorization'] = f'Bearer {token}' @@ -58,9 +61,15 @@ def combine_params(self, params, extra_params): def should_use_asf_frame(self): asf_frame_platforms = ['SENTINEL-1A', 'SENTINEL-1B', 'ALOS'] - + asf_frame_datasets = ['SENTINEL-1', 'OPERA-S1', 'SLC-BURST', 'ALOS PALSAR', 'ALOS AVNIR-2'] + + asf_frame_collections = [] + for dataset in asf_frame_datasets: + asf_frame_collections.extend(platform_datasets.get(dataset)) + return any([ - p[0] == 'platform[]' and p[1] in asf_frame_platforms + p[0] == 'platform[]' and p[1] in asf_frame_platforms + or p[0] == 'echo_collection_id[]' and p[1] in asf_frame_collections for p in self.params ]) @@ -167,7 +176,7 @@ def get_page(self, session): query_duration = perf_counter() - q_start logging.debug(f'CMR query time: {query_duration}') - self.log_subquery_time({'time': query_duration, 'status': response.status_code}) + # self.log_subquery_time({'time': query_duration, 'status': response.status_code}) if query_duration > 10: self.log_slow_cmr_response(session, response, query_duration) diff --git a/SearchAPI/CMR/Translate/datasets.py b/SearchAPI/CMR/Translate/datasets.py index fef03061..0ac54472 100644 --- a/SearchAPI/CMR/Translate/datasets.py +++ b/SearchAPI/CMR/Translate/datasets.py @@ -36,7 +36,6 @@ "C1327985644-ASF", "C1327985571-ASF", "C1327985674-ASF", - "C2450786986-ASF", "C1205428742-ASF", "C1216244348-ASF", "C1212201032-ASF", @@ -71,7 +70,6 @@ "C1216244596-ASF", "C1216244588-ASF", "C1216244599-ASF", - "C1245953394-ASF", "C1234413245-ASFDEV", "C1234413263-ASFDEV", "C1234413229-ASFDEV", @@ -116,6 +114,8 @@ "OPERA-S1": [ "C2777443834-ASF", "C2777436413-ASF", + "C2795135174-ASF", # RTC-STATIC + "C2795135668-ASF", # CSLC-STATIC "C1258354200-ASF", # maybe extra? "C1259982010-ASF", "C1257995185-ASF", @@ -125,6 +125,12 @@ "C1257995186-ASF", "C1259974840-ASF", ], + "OPERA-S1-CALVAL": [ + "C1260721945-ASF", # CSLC + "C2803501758-ASF", + "C1260721853-ASF", # RTC + "C2803501097-ASF", + ], "SLC-BURST": ["C2709161906-ASF", "C1257024016-ASF"], "ALOS PALSAR": [ "C1206487504-ASF", @@ -362,4 +368,129 @@ "C1206752770-ASF", "C1206144699-ASF", ], + # TODO: add check for item['umm']['SpatialExtent']['GranuleSpatialRepresentation'] == 'NO_SPATIAL' + "NISAR": [ # UAT ASFDEV + "C1261815181-ASFDEV", + "C1261832381-ASFDEV", + "C1256533420-ASFDEV", + "C1261813453-ASFDEV", + "C1261832466-ASFDEV", + "C1256524081-ASFDEV", + "C1261815274-ASFDEV", + "C1261832497-ASFDEV", + "C1256358262-ASFDEV", + "C1261815276-ASFDEV", + "C1261832632-ASFDEV", + "C1256358463-ASFDEV", + "C1261813489-ASFDEV", + "C1261832868-ASFDEV", + "C1256363301-ASFDEV", + "C1261819086-ASFDEV", + "C1261832940-ASFDEV", + "C1256381769-ASFDEV", + "C1261819098-ASFDEV", + "C1261832990-ASFDEV", + "C1256420738-ASFDEV", + "C1261819110-ASFDEV", + "C1261832993-ASFDEV", + "C1256411631-ASFDEV", + "C1261819167-ASFDEV", + "C1261833024-ASFDEV", + "C1256413628-ASFDEV", + "C1261819168-ASFDEV", + "C1261833025-ASFDEV", + "C1256432264-ASFDEV", + "C1261819211-ASFDEV", + "C1261833026-ASFDEV", + "C1256477304-ASFDEV", + "C1261819233-ASFDEV", + "C1261833027-ASFDEV", + "C1256479237-ASFDEV", + "C1261819245-ASFDEV", + "C1261833050-ASFDEV", + "C1256568692-ASFDEV", + "C1262134528-ASFDEV", + # UAT + "C1261815288-ASF", + "C1261832657-ASF", + "C1257349121-ASF", + "C1261815147-ASF", + "C1261832658-ASF", + "C1257349120-ASF", + "C1261815289-ASF", + "C1261832659-ASF", + "C1257349115-ASF", + "C1261815301-ASF", + "C1261832671-ASF", + "C1257349114-ASF", + "C1261815148-ASF", + "C1261833052-ASF", + "C1257349109-ASF", + "C1261819120-ASF", + "C1261833063-ASF", + "C1257349108-ASF", + "C1261819121-ASF", + "C1261833064-ASF", + "C1257349107-ASF", + "C1261819145-ASF", + "C1261833076-ASF", + "C1257349103-ASF", + "C1261819258-ASF", + "C1261833127-ASF", + "C1257349102-ASF", + "C1261819270-ASF", + "C1261846741-ASF", + "C1257349096-ASF", + "C1261819275-ASF", + "C1261846880-ASF", + "C1257349095-ASF", + "C1261819281-ASF", + "C1261846994-ASF", + "C1257349094-ASF", + "C1261819282-ASF", + "C1261847095-ASF", + "C1257349093-ASF", + "C1262135006-ASF", + # PROD + "C2850220296-ASF", + "C2853068083-ASF", + "C2727902012-ASF", + "C2850223384-ASF", + "C2853086824-ASF", + "C2727901263-ASF", + "C2850224301-ASF", + "C2853089814-ASF", + "C2727901639-ASF", + "C2850225137-ASF", + "C2853091612-ASF", + "C2727901523-ASF", + "C2850225585-ASF", + "C2853145197-ASF", + "C2727900439-ASF", + "C2850234202-ASF", + "C2853147928-ASF", + "C2723110181-ASF", + "C2850235455-ASF", + "C2853153429-ASF", + "C2727900827-ASF", + "C2850237619-ASF", + "C2853156054-ASF", + "C2727900080-ASF", + "C2850259510-ASF", + "C2854332392-ASF", + "C2727896667-ASF", + "C2850261892-ASF", + "C2854335566-ASF", + "C2727897718-ASF", + "C2850262927-ASF", + "C2854338529-ASF", + "C2727896018-ASF", + "C2850263910-ASF", + "C2854341702-ASF", + "C2727896460-ASF", + "C2850265000-ASF", + "C2854344945-ASF", + "C2727894546-ASF", + "C2874824964-ASF" + ], } diff --git a/SearchAPI/CMR/Translate/fields.py b/SearchAPI/CMR/Translate/fields.py index b9f5bebe..f639ff28 100644 --- a/SearchAPI/CMR/Translate/fields.py +++ b/SearchAPI/CMR/Translate/fields.py @@ -61,6 +61,7 @@ def get_field_paths(): 'track': attr_path('PATH_NUMBER'), 'pgeVersion': "./PGEVersionClass/PGEVersion", 'additionalUrls': "./OnlineAccessURLs", + 's3Urls': "./OnlineAccessURLs", # BURST FIELDS 'absoluteBurstID': attr_path('BURST_ID_ABSOLUTE'), diff --git a/SearchAPI/CMR/Translate/input_fixer.py b/SearchAPI/CMR/Translate/input_fixer.py index 0a751532..602c6b1e 100644 --- a/SearchAPI/CMR/Translate/input_fixer.py +++ b/SearchAPI/CMR/Translate/input_fixer.py @@ -108,12 +108,14 @@ def input_fixer(params, is_prod: bool = False, provider: str = "ASF"): if any_processing_level: fixed_params['collections'] = collection_list - elif k == 'datasets': + elif k == 'dataset': fixed_params['collections'] = [] for dataset in params[k]: - logging.warn(dataset) - logging.warn(platform_datasets.get(dataset)) - fixed_params['collections'].extend(platform_datasets.get(dataset)) + if platform_datasets.get(dataset): + fixed_params['collections'].extend(platform_datasets.get(dataset)) + else: + raise ValueError(f'Could not find dataset named "{dataset}" provided for dataset keyword.') + logging.warn(fixed_params) elif k == 'beammode': beammap = { diff --git a/SearchAPI/CMR/Translate/input_map.py b/SearchAPI/CMR/Translate/input_map.py index 4b4a50e9..fc879799 100644 --- a/SearchAPI/CMR/Translate/input_map.py +++ b/SearchAPI/CMR/Translate/input_map.py @@ -56,8 +56,9 @@ def input_map(): 'relativeburstid': ['attribute[]', 'int,BURST_ID_RELATIVE,{0}', parse_int_list], 'absoluteburstid': ['attribute[]', 'int,BURST_ID_ABSOLUTE,{0}', parse_int_list], 'fullburstid': ['attribute[]', 'string,BURST_ID_FULL,{0}', parse_string_list], - 'operaburstid': ['attribute[]', 'string,OPERA_BURST_ID,{0}', parse_string_list], - 'datasets': [None, '{0}', parse_string_list] + 'operaburstid': ['attribute[]', 'string,OPERA_BURST_ID,{0}', parse_string_list], + 'dataset': [None, '{0}', parse_string_list], + 'shortname': ['shortName', '{0}', parse_string_list] } return parameter_map diff --git a/SearchAPI/CMR/Translate/parse_cmr_response.py b/SearchAPI/CMR/Translate/parse_cmr_response.py index 1a4ec8db..83c93e31 100644 --- a/SearchAPI/CMR/Translate/parse_cmr_response.py +++ b/SearchAPI/CMR/Translate/parse_cmr_response.py @@ -205,13 +205,41 @@ def float_or_none(a): result['downloadUrl'] = urls[0] result['fileName'] = result['granuleName'] + '.' + urls[0].split('.')[-1] + + + def get_all_urls(): + accessPath = './OnlineAccessURLs/OnlineAccessURL/URL' + resourcesPath = './OnlineResources/OnlineResource/URL' + + access_urls = get_all_vals(accessPath) + if access_urls is None: + access_urls = [] + + resource_urls = get_all_vals(resourcesPath) + if resource_urls is None: + resource_urls = [] + + return list(set([*access_urls, *resource_urls])) + + def get_http_urls(): + return [url for url in get_all_urls() if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] + + def get_s3_urls(): + return [url for url in get_all_urls() if not url.endswith('.md5') and (url.startswith('s3://') or 's3credentials' in url)] + if result.get('product_file_id', '').startswith('OPERA'): result['beamMode'] = get_val(attr_path('BEAM_MODE')) - accessUrls = [url for url in get_all_vals('./OnlineAccessURLs/OnlineAccessURL/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] - OnlineResources = [url for url in get_all_vals('./OnlineResources/OnlineResource/URL') if not url.endswith('.md5') and not url.startswith('s3://') and not 's3credentials' in url] - result['additionalUrls'] = list(set([*accessUrls, *OnlineResources])) + result['additionalUrls'] = get_http_urls() result['configurationName'] = "Interferometric Wide. 250 km swath, 5 m x 20 m spatial resolution and burst synchronization for interferometry. IW is considered to be the standard mode over land masses." - result['browse'] = [url for url in get_all_vals('./AssociatedBrowseImageUrls/ProviderBrowseUrl/URL') if not url.startswith('s3://')] + + if (providerbrowseUrls := get_all_vals('./AssociatedBrowseImageUrls/ProviderBrowseUrl/URL')): + result['browse'] = [url for url in providerbrowseUrls if not url.startswith('s3://')] + + if 'STATIC' in result['processingLevel']: + result['validityStartDate'] = get_val('./Temporal/SingleDateTime') + if result.get('platform', '') == 'NISAR': + result['additionalUrls'] = get_http_urls() + result['s3Urls'] = get_s3_urls() return result @@ -232,16 +260,19 @@ def wkt_from_gpolygon(gpoly): # Close the shape if needed shapes[-1].append(shapes[-1][0]) - longest = shapes[0] - for shape in shapes: - if len(shape) > len(longest): - longest = shape + if len(shapes): + longest = shapes[0] + for shape in shapes: + if len(shape) > len(longest): + longest = shape - wkt_shape = 'POLYGON(({0}))'.format( - ','.join(['{0} {1}'.format(x['lon'], x['lat']) for x in longest]) - ) + wkt_shape = 'POLYGON(({0}))'.format( + ','.join(['{0} {1}'.format(x['lon'], x['lat']) for x in longest]) + ) - return longest, wkt_shape + return longest, wkt_shape + + return '', '' def shape_not_closed(shapes): diff --git a/requirements.txt b/requirements.txt index ad7ba162..8c2868ef 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,20 +3,20 @@ argcomplete==1.12.3 asn1crypto==1.4.0 atomicwrites==1.4.0 attrs==21.2.0 -backports.zoneinfo==0.2.1;python_version<"3.9" # https://stackoverflow.com/questions/71712258/error-could-not-build-wheels-for-backports-zoneinfo-which-is-required-to-insta +blinker==1.7.0 boto3==1.19.0 botocore==1.22.0 Brotli==1.0.9 -certifi==2021.10.8 +certifi==2023.7.22 cffi==1.15.0 cfn-flip==1.3.0 chardet==4.0.0 charset-normalizer==2.0.7 -click==7.1.2 +ciso8601==2.3.1 +click==8.1.7 click-plugins==1.1.1 cligj==0.7.2 coverage==6.0.2 -# cryptography==3.4.7 # Version 35.0.0 breaks zappa=0.52.0 deployments. (yes, their versions went from 3.4.7, 3.4.8, then 35.0.0) dateparser==1.1.0 DateTime==4.3 defusedxml==0.7.1 @@ -24,58 +24,58 @@ Deprecated==1.2.13 docutils==0.17.1 Dumper==1.2.0 durationpy==0.5 +exceptiongroup==1.2.1 execnet==1.9.0 -Fiona==1.8.20 -Flask==2.0.2 +fiona==1.9.6 +Flask==2.3.2 Flask-Compress==1.10.1 Flask-Cors==3.0.10 flask-lambda-python36==0.1.0 flask-talisman==0.8.1 -future==0.18.2 geojson==2.5.0 geomet==0.3.0 +geopandas==0.10.0 gitdb==4.0.7 gitdb2==4.0.2 -GitPython==3.1.24 -gunicorn==20.1.0 +GitPython==3.1.41 +gunicorn==22.0.0 hjson==3.0.2 hypothesis==6.37.0 idna==3.3 importlib-metadata==4.8.1 -iniconfig==1.1.1 -itsdangerous==2.0.1 -Jinja2==3.0.2 +# iniconfig==1.2.1 +itsdangerous==2.2.0 +Jinja2==3.1.3 jmespath==0.10.0 -joblib==1.1.0 +joblib==1.2.0 kappa==0.6.0 kml2geojson==4.0.2 lambda-packages==0.20.0 libpagure==0.22 -lxml==4.7.1 -MarkupSafe==2.0.1 +lxml==5.2.1 +MarkupSafe==2.1.5 more-itertools==8.10.0 munch==2.5.0 -packaging==21.0 +numpy==1.22.4 +packaging==24.0 pandas==1.3.4 pathlib2==2.3.6 pep517==0.12.0 pexpect==4.8.0 pip-tools==6.4.0 placebo==0.10.0 -pluggy==1.0.0 +pluggy==1.5.0 ptyprocess==0.7.0 -py==1.10.0 pycparser==2.20 PyGithub==1.55 -PyJWT==2.3.0 +PyJWT==2.4.0 pykml==0.2.0 -# PyNaCl==1.4.0 # breaks zappa 0.52.0 (didn't check earlier versions, not sure if we need this) -# pyOpenSSL==21.0.0 # (Requires cryptography, which makes zappa throw) +# PyNaCl==1.5.0 pyparsing==2.4.7 pyproj==3.6.0 pyshp==2.1.3 -pytest==6.2.5 -pytest-automation==1.1.2 +pytest==8.1.1 +pytest-automation==3.0.0 pytest-cov==3.0.0 pytest-forked==1.3.0 pytest-xdist==2.4.0 @@ -84,28 +84,31 @@ python-gitlab==2.10.1 python-slugify==5.0.2 pytz==2021.3 pytz-deprecation-shim==0.1.0.post0 -PyYAML==6.0 +PyYAML==6.0.1 regex==2021.10.8 requests==2.26.0 requests-toolbelt==0.9.1 responses==0.18.0 s3transfer==0.5.0 scandir==1.10.0 -scikit-learn==1.1.3 # WARNING: 0.24.1 breaks ShorelineMask26 test +scikit-learn==1.1.3 +scipy==1.13.0 serverless-wsgi==3.0.0 Shapely==1.7.1 six==1.16.0 +# sklearn==0.0.post5 smmap==4.0.0 +sortedcontainers==2.4.0 text-unidecode==1.3 +threadpoolctl==3.4.0 toml==0.10.2 -tomli==1.2.1 +tomli==2.0.1 typing-extensions==3.10.0.2 tzdata==2021.4 -tzlocal==2.0.0 # tzlocal.get_localzone() changed it's return type after this (No 'localize' attr) +tzlocal==2.0.0 urllib3==1.26.7 -Werkzeug==2.0.2 -WKTUtils==1.1.6 -wrapt==1.13.2 +Werkzeug==2.3.3 +WKTUtils==2.0.0 +wrapt==1.16.0 zipp==3.6.0 -zope.interface==4.7.2 -numpy==1.21.3 +zope.interface==4.7.2 \ No newline at end of file diff --git a/yml_tests/helpers.py b/yml_tests/helpers.py index ad555185..d7537228 100644 --- a/yml_tests/helpers.py +++ b/yml_tests/helpers.py @@ -7,7 +7,7 @@ def make_request(full_url, files=None, data=None): if data is None: data = {} try: - r = requests.post(full_url, files=files, data=data) + r = requests.post(full_url, files=files, json=data) except (requests.ConnectionError, requests.Timeout, requests.TooManyRedirects) as e: assert False, "Cannot connect to API: {0}. Error: '{1}'.".format(full_url, str(e)) return r diff --git a/yml_tests/test_Baseline.yml b/yml_tests/test_Baseline.yml index 33296c59..3bdabdaa 100644 --- a/yml_tests/test_Baseline.yml +++ b/yml_tests/test_Baseline.yml @@ -45,24 +45,6 @@ tests: expected file: csv expected code: 200 -- baseline processing level: - reference: ALPSRP279071410 - output: csv - processingLevel: L1.5 - # use_maturity: True - - expected file: csv - expected code: 200 - -- baseline processing level list: - reference: ALPSRP279071410 - output: csv - processingLevel: L1.5,L1.0 - # use_maturity: True - - expected file: csv - expected code: 200 - - baseline count: reference: E1_23942_STD_F155 output: count @@ -76,7 +58,7 @@ tests: output: download # use_maturity: True - expected file: download + expected file: x-python expected code: 200 - baseline jsonlite: @@ -116,15 +98,7 @@ tests: output: json # use_maturity: True - expected file: json - expected code: 200 - -- baseline asf_search: - reference: S1B_IW_SLC__1SDV_20180112T141823_20180112T141850_009139_0105AA_E6F5 - output: asf_search - # use_maturity: True - - expected file: geojson + expected file: jsonlite # dropping json output expected code: 200 - no output specified: @@ -162,22 +136,6 @@ tests: expected file: error json expected code: 400 -- requested processingLevel does not exist: - reference: E1_20936_STD_F155 - processingLevel: L0.5 - # use_maturity: True - - expected file: error json - expected code: 400 - -- missing stack ID: - reference: ALPSRS279162650 - processinglevel: L1.0 - # use_maturity: True - - expected file: error json - expected code: 400 - - Sentinel RAW without baseline: reference: S1B_EW_RAW__0SDH_20200408T180120_20200408T180228_021056_027F1A_8312 # use_maturity: True diff --git a/yml_tests/test_DateParser.yaml b/yml_tests/test_DateParser.yaml index 176dc4f5..7bba8783 100644 --- a/yml_tests/test_DateParser.yaml +++ b/yml_tests/test_DateParser.yaml @@ -3,7 +3,7 @@ tests: - blank date: date: "" expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - today: @@ -15,13 +15,13 @@ tests: - last day: date: last+day expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - last 10 days: date: last+10+days expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - last week: @@ -45,19 +45,19 @@ tests: - next day: date: next+day expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - next 10 days: date: next+10+days expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - next hour: date: next+hour expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - next week: @@ -81,37 +81,37 @@ tests: - yesterday morning: date: yesterday+morning expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - yesterday evening: date: yesterday+evening expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - 7daysago no spaces: date: 7daysago expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - ayearago no spaces: date: ayearago expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - beginning of last week: date: beginning+of+last+week expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - beginning of the year: date: beginning+of+the+year expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - le 01-02-2020: @@ -129,7 +129,7 @@ tests: - now in Italian: date: adesso expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - tomorrow in Italian: @@ -225,7 +225,7 @@ tests: - last February: date: last+February expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - February: @@ -347,13 +347,13 @@ tests: - one oclock yesterday: date: one+oclock+yesterday expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - 1oclock yesterday: date: 1oclock+yesterday expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - midnight yesterday: @@ -422,19 +422,19 @@ tests: - Thur: date: Thur expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - Thurs: date: Thurs expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - last Sunday: date: last+Sunday expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - Tuesday: @@ -498,23 +498,23 @@ tests: - Special Characters 1: date: $@ expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - Special Characters 2: date: _+ expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - Special Characters 3: date: $!) expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date - Special Characters 4: date: ~~~ expected file: error json - expected code: 200 + expected code: 400 expected error: could not parse date \ No newline at end of file diff --git a/yml_tests/test_URLs.yml b/yml_tests/test_URLs.yml index fccfad74..a534b46e 100644 --- a/yml_tests/test_URLs.yml +++ b/yml_tests/test_URLs.yml @@ -44,6 +44,7 @@ tests: - absoluteOrbit zero range list: absoluteOrbit: 19364-19364,19189 + beamMode: EW,IW maxresults: 10 output: csv @@ -76,13 +77,6 @@ tests: expected file: csv expected code: 200 -- bbox: - bbox: -150.2,65.0,-150.1,65.5 - maxresults: 10 - output: csv - - expected file: csv - expected code: 200 - circle: circle: -150.2,65.0,100 @@ -263,7 +257,7 @@ tests: maxresults: 100 output: json - expected file: json + expected file: jsonlite expected code: 200 - collectionName Big Island 100: @@ -271,7 +265,7 @@ tests: maxresults: 100 output: json - expected file: json + expected file: jsonlite expected code: 200 - collectionName Cascade 100: @@ -279,7 +273,7 @@ tests: maxresults: 100 output: json - expected file: json + expected file: jsonlite expected code: 200 - end csv: @@ -380,7 +374,7 @@ tests: maxresults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - flightLine Bonanza Creek: @@ -388,7 +382,7 @@ tests: maxresults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - frame list range: @@ -449,25 +443,18 @@ tests: granule_list: S1B_S6_GRDH_1SDV_20190911T214309_20190911T214338_017995_021E10_5CCB output: json - expected file: json + expected file: jsonlite expected code: 200 - granule_list single geojson: granule_list: S1B_S6_GRDH_1SDV_20190911T214309_20190911T214338_017995_021E10_5CCB output: geojson -- granule_list single asf_search: - granule_list: S1B_S6_GRDH_1SDV_20190911T214309_20190911T214338_017995_021E10_5CCB - output: asf_search - - expected file: geojson - expected code: 200 - - granule_list single download: granule_list: S1B_S6_GRDH_1SDV_20190911T214309_20190911T214338_017995_021E10_5CCB output: download - expected file: download + expected file: x-python expected code: 200 #In CMR reporting script, we report groupid as null on most datasets. Some datasets updated to expect blank file & moved to partial pass yaml @@ -475,42 +462,42 @@ tests: groupid: S1B_S1DV_0492_0497_017567_041 output: json - expected file: json + expected file: jsonlite expected code: 200 - groupid S1 Insar: groupid: S1-GUNW-D-R-087-tops-20190816_20190804-161614-19149N_17138N-PP-fee7-v2_0_2 output: json - expected file: json + expected file: jsonlite expected code: 200 - groupid SMAP: groupid: SP_24535_A_006 output: json - expected file: json + expected file: jsonlite expected code: 200 - groupid UAVSAR: groupid: UA_ChiVol_00700_15030_010_150330_L090_CX_01 output: json - expected file: json + expected file: jsonlite expected code: 200 - groupid ALOS PALSAR: groupid: ALPSRP279071100 output: json - expected file: json + expected file: jsonlite expected code: 200 - groupid AIRSAR: groupid: ts1899 output: json - expected file: json + expected file: jsonlite expected code: 200 - instrument alos palsar: @@ -518,7 +505,7 @@ tests: maxResults: 20 output: json - expected file: json + expected file: jsonlite expected code: 200 - instrument alos avnir: @@ -526,7 +513,7 @@ tests: maxResults: 20 output: json - expected file: json + expected file: jsonlite expected code: 200 - instrument alos avnir jsonlite: @@ -705,7 +692,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - lookDirection L: @@ -713,7 +700,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - lookDirection RIGHT: @@ -721,7 +708,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - lookDirection R: @@ -729,7 +716,7 @@ tests: maxResults: 10 output: JSON - expected file: json + expected file: jsonlite expected code: 200 - maxBaselinePerp: @@ -842,7 +829,7 @@ tests: maxResults: 1 output: json - expected file: json + expected file: jsonlite expected code: 200 - maxResults 2 json: @@ -850,7 +837,7 @@ tests: maxResults: 2 output: json - expected file: json + expected file: jsonlite expected code: 200 - maxResults 1 geojson: @@ -885,22 +872,6 @@ tests: expected file: jsonlite expected code: 200 -- maxResults 1 asf_search: - platform: SENTINEL-1 - maxResults: 1 - output: asf_search - - expected file: geojson - expected code: 200 - -- maxResults 2 asf_search: - platform: SENTINEL-1 - maxResults: 2 - output: asf_search - - expected file: geojson - expected code: 200 - - offNadirAngle single: offNadirAngle: 21.5 maxResults: 10 @@ -913,7 +884,7 @@ tests: offNadirAngle: 21.5,23.1,27.1 maxResults: 10 output: csv - + expected file: csv expected code: 200 @@ -961,7 +932,7 @@ tests: maxResults: 200 output: download - expected file: download + expected file: x-python expected code: 200 - output download Avnir: @@ -970,7 +941,7 @@ tests: maxResults: 2 output: download - expected file: download + expected file: x-python expected code: 200 - output geojson Sentinel: @@ -990,29 +961,12 @@ tests: expected file: geojson expected code: 200 -- output asf_search Sentinel: - platform: SB - maxResults: 200 - output: asf_search - - expected file: geojson - expected code: 200 - -- output asf_search Avnir: - platform: Alos - instrument: Avnir-2 - maxResults: 2 - output: asf_search - - expected file: geojson - expected code: 200 - - output json: platform: SB maxResults: 100 output: json - expected file: json + expected file: jsonlite expected code: 200 - output jsonlite: @@ -1069,8 +1023,8 @@ tests: - platform SB: platform: SB - start: 1+year+ago - end: now + start: "2016-01-01T00:00:00Z" + end: "2017-01-02T00:00:00Z" maxResults: 200 output: csv @@ -1079,7 +1033,8 @@ tests: - platform J1: platform: J1 - polygon: -141.7461,64.8261,-140.3172,63.5722,-137.8891,64.7214,-141.7461,64.8261 + # polygon: -141.7461,64.8261,-140.3172,63.5722,-137.8891,64.7214,-141.7461,64.8261 + intersectsWith: POLYGON((-141.7461 64.8261, -140.3172 63.5722, -137.8891 64.7214, -141.7461 64.8261)) maxResults: 100 output: csv @@ -1089,7 +1044,8 @@ tests: - platform A3: platform: A3 processingLevel: L1.0 - polygon: -148.52,64.63,-150.41,64.64,-149.58,63.86,-148.52,64.63 + # polygon: -148.52,64.63,-150.41,64.64,-149.58,63.86,-148.52,64.63 + intersectsWith: POLYGON((-148.52 64.63, -150.41 64.64, -149.58 63.86, -148.52 64.63)) maxResults: 100 output: csv @@ -1127,7 +1083,7 @@ tests: maxResults: 10 output: jSoN - expected file: json + expected file: jsonlite expected code: 200 - platform R1 2: @@ -1810,7 +1766,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization VV: @@ -1819,7 +1775,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization HH HV: @@ -1827,7 +1783,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Dual VV: @@ -1835,7 +1791,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization QUADRATURE: @@ -1843,7 +1799,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization VV VH: @@ -1859,7 +1815,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Dual VH: @@ -1867,7 +1823,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization hH: @@ -1875,7 +1831,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Vv: @@ -1883,7 +1839,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Hh hV: @@ -1891,7 +1847,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Dual vv: @@ -1899,7 +1855,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization quadrature: @@ -1907,7 +1863,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization vv VH: @@ -1915,7 +1871,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization Dual hv: @@ -1923,7 +1879,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization dual VH: @@ -1931,7 +1887,7 @@ tests: maxResults: 10 output: json - expected file: json + expected file: jsonlite expected code: 200 - polarization dual vh: @@ -1942,24 +1898,19 @@ tests: expected file: jsonlite expected code: 200 -- polygon: - polygon: -148.52,64.63,-150.41,64.64,-149.58,63.86,-148.52,64.63 +- polygon lowercase: + # polygon: -148.52,64.63,-150.41,64.64,-149.58,63.86,-148.52,64.63 + intersectsWith: polygon(( -148.52 64.63, -150.41 64.64, -149.58 63.86, -148.52 64.63 )) maxResults: 100 output: csv expected file: csv expected code: 200 -- polygon uppercase: - POLYGON: -148.52,64.63,-150.41,64.64,-149.58,63.86,-148.52,64.63 - maxResults: 100 - output: csv - - expected file: csv - expected code: 200 - polygon multi: - polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75,12.13,41.74 + # polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75,12.13,41.74 + intersectsWith: POLYGON((12.13 41.74, 13.4 41.74, 13.4 42.75, 12.13 42.75, 12.13 41.74)) platform: Sentinel-1A,Sentinel-1B processingLevel: SLC start: 2015-05-01T00:00:00UTC @@ -1969,19 +1920,21 @@ tests: expected file: csv expected code: 200 -- polygon closure 1: - polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75 - platform: Sentinel-1A,Sentinel-1B - processingLevel: SLC - start: 2015-05-01T00:00:00UTC - output: csv - maxResults: 100 +# - polygon closure 1: +# # polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75 +# intersectsWith: POLYGON((12.13 41.74, 13.4 41.74, 13.4 42.75, 12.13 42.75)) +# platform: Sentinel-1A,Sentinel-1B +# processingLevel: SLC +# start: 2015-05-01T00:00:00UTC +# output: csv +# maxResults: 100 - expected file: csv - expected code: 200 +# expected file: csv +# expected code: 200 - polygon closure 2: - polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75,12.13,41.74 + # polygon: 12.13,41.74,13.4,41.74,13.4,42.75,12.13,42.75,12.13,41.74 + intersectsWith: POLYGON((12.13 41.74, 13.4 41.74, 13.4 42.75, 12.13 42.75, 12.13 41.74)) platform: Sentinel-1A,Sentinel-1B processingLevel: SLC start: 2015-05-01T00:00:00UTC @@ -1992,7 +1945,8 @@ tests: expected code: 200 - polygon closure 3: - polygon: 12.13,41.74,12.13,42.75,13.4,42.75,13.4,41.74,12.13,41.74 + # polygon: 12.13,41.74,12.13,42.75,13.4,42.75,13.4,41.74,12.13,41.74 + intersectsWith: POLYGON(( 12.13 41.74, 12.13 42.75, 13.4 42.75, 13.4 41.74, 12.13 41.74 )) platform: Sentinel-1A,Sentinel-1B processingLevel: SLC start: 2015-05-01T00:00:00UTC @@ -2002,13 +1956,14 @@ tests: expected file: csv expected code: 200 -- polygon closure 4: - polygon: -155.08,65.82,-153.5,61.91,-149.50,63.07,-149.94,64.55 - maxResults: 100 - output: Csv +# - polygon closure 4: +# # polygon: -155.08,65.82,-153.5,61.91,-149.50,63.07,-149.94,64.55 +# intersectsWith: POLYGON(( -155.08 65.82, -153.5 61.91, -149.50 63.07, -149.94 64.55 )) +# maxResults: 100 +# output: Csv - expected file: csv - expected code: 200 +# expected file: csv +# expected code: 200 - processingLevel L1.1: processingLevel: L1.1 @@ -2158,7 +2113,7 @@ tests: product_list: S1B_IW_GRDH_1SDV_20190410T153301_20190410T153328_015746_01D8D2_0E9B-GRD_HD,S1B_S6_GRDH_1SDV_20190911T214309_20190911T214338_017995_021E10_5CCB-GRD_HD,S1B_IW_SLC__1SDV_20180517T005744_20180517T005757_010954_0140DF_9891-SLC,S1B_WV_OCN__2SSV_20180513T055028_20180513T062610_010898_013F0C_059A-METADATA_OCN output: json - expected file: json + expected file: jsonlite expected code: 200 - product_list mix: @@ -2167,7 +2122,7 @@ tests: - ts1902-PSTOKES,ts1902-PTIF,ts1888-LTIF output: json - expected file: json + expected file: jsonlite expected code: 200 - product_list multi: @@ -2179,7 +2134,7 @@ tests: - ts1899 output: json - expected file: json + expected file: jsonlite expected code: 200 - product_list multi list: @@ -2188,7 +2143,7 @@ tests: - ts726-CSTOKES,ts726-DEM,ts726-LSTOKES,ts726-LTIF,ts726-CTIF,ts1721-LTIF,ts1733-PTIF output: json - expected file: json + expected file: jsonlite expected code: 200 - relativeOrbit single: @@ -2472,8 +2427,8 @@ tests: expected file: csv expected code: 200 -- start yesterday count: - start: yesterday +- start 3 days ago count: + start: 3 days ago output: count expected file: count @@ -2486,8 +2441,8 @@ tests: expected file: count expected code: 200 -- start 1dayago count: - start: 1+day+ago +- start 3daysago count: + start: 3+days+ago output: count expected file: count diff --git a/yml_tests/test_URLs_expected_400.yml b/yml_tests/test_URLs_expected_400.yml index b8e983e8..0dd84def 100644 --- a/yml_tests/test_URLs_expected_400.yml +++ b/yml_tests/test_URLs_expected_400.yml @@ -1,16 +1,16 @@ tests: -- invalid query: - output: csv - maxresults: 10 +# - invalid query: # this just gets the first results from the default CMR provider (ASF) +# output: csv +# maxresults: 10 - expected file: error json - expected code: 400 +# expected file: error json +# expected code: 400 -- missing query invalid: - expected file: error json - expected code: 400 +# - missing query invalid: +# expected file: error json +# expected code: 400 - invalid keyword: keyword: invalid @@ -672,7 +672,8 @@ tests: expected code: 400 - polygon 3 points invalid: - polygon: -155.08,65.82,-153.5 + # polygon: -155.08,65.82,-153.5 + intersectsWith: POLYGON((-155.08 65.82, -153.5)) maxResults: 1000 output: csv @@ -680,7 +681,8 @@ tests: expected code: 400 - polygon 2 points invalid: - polygon: -155.08,65.82 + # polygon: -155.08,65.82 + intersectsWith: POLYGON((-155.08 65.82)) maxResults: 1000 output: csv @@ -688,31 +690,17 @@ tests: expected code: 400 - polygon 2 points count invalid: - polygon: -155.08,65.82 + # polygon: -155.08,65.82 + intersectsWith: POLYGON((-155.08 65.82)) maxResults: 1000 output: count expected file: error json expected code: 400 -- polygon specchar invalid: - polygon: ~! - maxResults: 1000 - output: csv - - expected file: error json - expected code: 400 - -- polygon specchar count invalid: - polygon: -# - maxResults: 1000 - output: count - - expected file: error json - expected code: 400 -- polygon test invalid: - polygon: test +- intersectsWith test invalid: + intersectsWith: test maxResults: 1000 output: csv @@ -929,7 +917,8 @@ tests: expected code: 400 - realworld 9 invalid: - polygon: 4794886.03996192,2658783.7409794466,4911667.405803877,2658783.7409794466,4911667.405803877,2775921.3473827764,4794886.03996192,2775921.3473827764,4794886.03996192,2658783.7409794466 + # polygon: 4794886.03996192,2658783.7409794466,4911667.405803877,2658783.7409794466,4911667.405803877,2775921.3473827764,4794886.03996192,2775921.3473827764,4794886.03996192,2658783.7409794466 + intersectsWith: POLYGON((4794886.03996192 2658783.7409794466, 4911667.405803877 2658783.7409794466, 4911667.405803877 2775921.3473827764, 4794886.03996192 2775921.3473827764, 4794886.03996192 2658783.7409794466)) maxResults: 1000 output: csv diff --git a/yml_tests/test_URLs_partial_pass.yml b/yml_tests/test_URLs_partial_pass.yml index fd5884a1..a1def942 100644 --- a/yml_tests/test_URLs_partial_pass.yml +++ b/yml_tests/test_URLs_partial_pass.yml @@ -11,8 +11,8 @@ tests: beamMode: TEST output: csv,count - expected file: blank metalink - expected code: 200 + expected file: error json + expected code: 400 - beamMode specchar blank: beamMode: $~ @@ -32,8 +32,8 @@ tests: beamSwath: TEST output: csv,count - expected file: blank metalink - expected code: 200 + expected file: error json + expected code: 400 - beamSwath specchar blank: beamSwath: ~! @@ -172,14 +172,14 @@ tests: groupid: 12345 output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - groupid hash blank: groupid: sdfkhgsdfkhgsdf output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - groupid TEST blank: @@ -200,35 +200,35 @@ tests: groupid: R1_57704_ST7_F173 output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - groupid ERS blank not in CMR: groupid: E2_84699_STD_F309 output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - groupid JERS blank not in CMR: groupid: J1_36439_STD_F268 output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - groupid Seasat blank not in CMR: groupid: SS_01502_STD output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - instrument test: instrument: test output: json - expected file: blank json + expected file: blank jsonlite expected code: 200 - instrument notvalid: @@ -245,7 +245,7 @@ tests: output: json maxResults: 100 - expected file: blank json + expected file: blank jsonlite expected code: 200 - output test: @@ -254,8 +254,8 @@ tests: output: TEST maxResults: 100 - expected file: metalink - expected code: 200 + expected file: error json + expected code: 400 - output csc: platform: S1 @@ -263,8 +263,8 @@ tests: maxResults: 1 output: CSC - expected file: metalink - expected code: 200 + expected file: error json + expected code: 400 - platform count RADARSAT-1 blank: platform: RADARSAT-1 diff --git a/yml_tests/test_WKTUtils.yml b/yml_tests/test_WKTUtils.yml index 28f96682..7cf4f641 100644 --- a/yml_tests/test_WKTUtils.yml +++ b/yml_tests/test_WKTUtils.yml @@ -2,10 +2,10 @@ tests: # REPAIR: - REPAIR Merge individual shapes together and reverse polygon order: test wkt: GEOMETRYCOLLECTION(POLYGON((46 -19,30 26,-3 41,22 39,49 16,46 -19)), POLYGON((27 24,12 4,18 31,27 24))) - repaired wkt: POLYGON ((46.0000000000000000 -19.0000000000000000, 49.0000000000000000 16.0000000000000000, 22.0000000000000000 39.0000000000000000, -3.0000000000000000 41.0000000000000000, 15.2549019607843128 18.6470588235294130, 12.0000000000000000 4.0000000000000000, 19.2845744680851077 13.7127659574468090, 46.0000000000000000 -19.0000000000000000)) + repaired wkt: POLYGON ((19.28457446808511 13.71276595744681, 46 -19, 49 16, 22 39, -3 41, 15.25490196078431 18.64705882352941, 12 4, 19.28457446808511 13.71276595744681)) repair: - - 'Unconnected shapes: Convex-halled each INDIVIDUAL shape to merge them together.' - - 'Reversed polygon winding order' + - '2 non-overlapping shapes merged by their convex-hulls' + # - 'Reversed polygon winding order' # FILE geojson: - FILE geojson smallest possible json: @@ -15,7 +15,7 @@ tests: # FILE kml: - File kml contains polygon point and line: file wkt: kmls_valid/Polygon_point_line.kml - parsed wkt: GEOMETRYCOLLECTION (POINT (-79.9165174000000000 47.2629855000000000 0.0000000000000000),POLYGON ((-81.6084120000000000 36.5963103000000000 0.0000000000000000, -82.1577284000000000 36.1362865000000000 0.0000000000000000, -82.7729628000000000 36.0119699000000000 0.0000000000000000, -83.1025526000000000 35.7449162000000000 0.0000000000000000, -83.9594862000000000 35.5485053000000000 0.0000000000000000, -84.0473768000000000 35.2798953000000000 0.0000000000000000, -84.3110487000000000 35.3157617000000000 0.0000000000000000, -84.2890760000000000 34.9563835000000000 0.0000000000000000, -83.0366346000000000 34.9743901000000000 0.0000000000000000, -82.4433729000000000 35.1542378000000000 0.0000000000000000, -81.0590956000000000 35.1183000000000000 0.0000000000000000, -80.7734510000000000 34.8121887000000000 0.0000000000000000, -79.6967909000000000 34.8302269000000000 0.0000000000000000, -78.4663221000000000 33.7957745000000000 0.0000000000000000, -76.4887831000000000 34.6858109000000000 0.0000000000000000, -75.7417128000000000 35.5663808000000000 0.0000000000000000, -75.8515760000000000 36.5610205000000000 0.0000000000000000, -81.6084120000000000 36.5963103000000000 0.0000000000000000)),LINESTRING (-83.4541151000000000 40.1269874000000000 0.0000000000000000, -83.1025526000000000 39.4686070000000000 0.0000000000000000, -81.6084120000000000 39.6887673000000000 0.0000000000000000, -82.1357557000000000 40.3282965000000000 0.0000000000000000)) + parsed wkt: GEOMETRYCOLLECTION (POINT (-79.9165174000000000 47.2629855000000000 0),POLYGON ((-81.6084120000000000 36.5963103000000000 0, -82.1577284000000000 36.1362865000000000 0, -82.7729628000000000 36.0119699000000000 0, -83.1025526000000000 35.7449162000000000 0, -83.9594862000000000 35.5485053000000000 0, -84.0473768000000000 35.2798953000000000 0, -84.3110487000000000 35.3157617000000000 0, -84.2890760000000000 34.9563835000000000 0, -83.0366346000000000 34.9743901000000000 0, -82.4433729000000000 35.1542378000000000 0, -81.0590956000000000 35.1183000000000000 0, -80.7734510000000000 34.8121887000000000 0, -79.6967909000000000 34.8302269000000000 0, -78.4663221000000000 33.7957745000000000 0, -76.4887831000000000 34.6858109000000000 0, -75.7417128000000000 35.5663808000000000 0, -75.8515760000000000 36.5610205000000000 0, -81.6084120000000000 36.5963103000000000 0)),LINESTRING (-83.4541151000000000 40.1269874000000000 0, -83.1025526000000000 39.4686070000000000 0, -81.6084120000000000 39.6887673000000000 0, -82.1357557000000000 40.3282965000000000 0)) # FILE shp: - FILE shp PIGSearch basic parse: diff --git a/yml_tests/test_baseline_manager.py b/yml_tests/test_baseline_manager.py index 8ba051ce..c58be13d 100644 --- a/yml_tests/test_baseline_manager.py +++ b/yml_tests/test_baseline_manager.py @@ -1,3 +1,4 @@ +import logging import requests, urllib # For talking w/ API import json, csv # File stuff import re # Opening/Reading the file stuff @@ -129,6 +130,7 @@ def jsonToDict(json_data): h = requests.head(self.query) content_header = h.headers.get('content-type') file_content = requests.get(self.query).content.decode("utf-8") + # text/csv; charset=utf-8 try: content_type = content_header.split('/')[1] @@ -136,7 +138,7 @@ def jsonToDict(json_data): assert False, self.error_msg.format("Header is not formatted as expected. Header: {0}. File Content: \n{1}\n".format(content_header, file_content)) # Take out the "csv; charset=utf-8", without crahsing on things without charset content_type = content_type.split(';')[0] if ';' in content_type else content_type - + logging.warning(content_header) ## COUNT / HTML: if content_type == "html": # They return a number in the html. Convert to a real int: @@ -159,7 +161,8 @@ def jsonToDict(json_data): else: content_type = "download" ## GEOJSON - elif content_type == "geojson": + elif content_type == "geo+json": + content_type = "geojson" if file_content == '{\n "features": [],\n "type": "FeatureCollection"\n}': content_type = "empty geojson" diff --git a/yml_tests/test_dateParser_manager.py b/yml_tests/test_dateParser_manager.py index 2bea7ca1..047c1653 100644 --- a/yml_tests/test_dateParser_manager.py +++ b/yml_tests/test_dateParser_manager.py @@ -57,7 +57,7 @@ def runAssertTests(self, status_code, content_type, content): if content_type == "html" or status_code >= 500: assert False, self.error_msg.format("API returned error page. \nHTML (First 500 char):\n{0}\n".format(content[:500])) if "expected error" in self.test_info: - if "errors" in content: + if "error" in content: assert self.test_info["expected error"].lower() in str(content).lower(), self.error_msg.format("API returned a different error than expected.") else: assert False, self.error_msg.format("API parsed value when validation error expected.") diff --git a/yml_tests/test_known_bugs.yml b/yml_tests/test_known_bugs.yml index 346377cf..a824c049 100644 --- a/yml_tests/test_known_bugs.yml +++ b/yml_tests/test_known_bugs.yml @@ -4,6 +4,7 @@ tests: #DS-2766 / DS-3194 open bug for absoluteOrbit - GRFN scenes search correct range, but file returns different value for absoluteOrbit #Note: This will fail on prod only, due to GRFN data differences in api-test +#Note: This is because OrbitCalculatedSpatialDomains has 2 OrbitNumber fields for GRFN products. This can be ignored - absoluteOrbit range: absoluteOrbit: 5000-6000 maxresults: 10 @@ -14,6 +15,7 @@ tests: #WEB2-1956 open bug for asfframe - for R1, frame in file will not match frame in query #tests that will fail: asfframe R1 single, asfframe R1 list, asfframe R1 range +# Need to grab FRAME_NUMBER instead of ESA_FRAME for radatasat products to pass these 3 asfframe tests - asfframe R1 single: asfframe: 307 platform: R1 @@ -42,6 +44,8 @@ tests: expected code: 200 #DS-1945 open bug for this +# this is still an issue (FARADAY_ROTATION is labeled as a string type which makes searching via float ranges impossible) +# https://cmr.earthdata.nasa.gov/search/collections.umm_json?attribute[]=FARADAY_ROTATION - maxFaradayRotation: maxFaradayRotation: 3 maxResults: 10 diff --git a/yml_tests/test_url_manager.py b/yml_tests/test_url_manager.py index 3f865e53..8bade5d6 100644 --- a/yml_tests/test_url_manager.py +++ b/yml_tests/test_url_manager.py @@ -1,3 +1,4 @@ +import ast import requests, urllib # For talking w/ API import json, csv # File stuff import re # Opening/Reading the file stuff @@ -144,11 +145,13 @@ def jsonToDict(json_data): # text/csv; charset=utf-8 try: content_type = content_header.split('/')[1] + print(f" - HIT CONTENT TYPE BEFORE 1: {content_type}") except AttributeError: assert False, self.error_msg.format("Header is not formatted as expected. Header: {0}.\nFile Content (First 500 char): \n{1}\n".format(content_header, file_content[:500])) # Take out the "csv; charset=utf-8", without crahsing on things that don't have a charset + print(f" - HIT CONTENT TYPE BEFORE 2: {content_type}") content_type = content_type.split(';')[0] if ';' in content_type else content_type - + print(f" - HIT CONTENT TYPE AFTER: {content_type}") ## COUNT / HTML: if content_type == "html": # They return a number in the html. Convert to a real int: @@ -163,15 +166,16 @@ def jsonToDict(json_data): if file_content["count"] == 0: content_type = "blank csv" ## DOWNLOAD / PLAIN - elif content_type == "plain": + elif content_type == "plain" or content_type == "x-python": file_content = downloadToDict(file_content) # how many granules are in the script: if file_content["count"] == 0: content_type = "blank download" else: - content_type = "download" + content_type = "x-python" ## GEOJSON - elif content_type == "geojson": + elif content_type == "geo+json": + content_type = "geojson" if file_content == '{\n "features": [],\n "type": "FeatureCollection"\n}': content_type = "empty geojson" @@ -250,13 +254,32 @@ def checkFileContainsExpected(key, test_info, file_dict): if isinstance(poss_list, type([])): expect_type = type(poss_list[0]) # "found_param" is always a string. Convert it to match - if expect_type(found_param) >= poss_list[0] and expect_type(found_param) <= poss_list[1]: + if found_param.startswith('[') and found_param.endswith(']'): + found_param = ast.literal_eval(found_param) + for param in found_param: + if param >= poss_list[0] and param <= poss_list[1]: + found_in_list = True + break + elif expect_type(found_param) >= poss_list[0] and expect_type(found_param) <= poss_list[1]: found_in_list = True break # This part gets hit for single numbers, and strings. (ie "Platform"): else: expect_type = type(poss_list) - if expect_type(found_param) == poss_list: + if isinstance(found_param, str) and found_param.startswith('[') and found_param.endswith(']'): + # expect_type = type(poss_list) + found_param = ast.literal_eval(found_param) + for param in found_param: + if param == poss_list: + # if expect_type(param) >= poss_list[0] and expect_type(param) <= poss_list[1]: + found_in_list = True + break + elif isinstance(found_param, list): + for param in found_param: + if param == poss_list: + found_in_list = True + break + elif expect_type(found_param) == poss_list: found_in_list = True break # If inner for-loop found it, break out of this one too: