Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,14 +34,17 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

### Changed
- Include wkt in error when raising in `validate_wkt()`
- Specify which binary file types are allowed to be passed to lambda

### Added
- Add dedicated dev branch for test-staging deployment
- Intended Dev->Release workflow
- dev -> test -> prod-staging -> prod
- Added more files to integration testing endpoint
- Add remaining file upload support for .zip and .shp files. All previous file formats now supported

### Changed
- pin `asf-search` to v8.3.1, All basic Vertex dataset searches working
- pin `asf-search` to v8.3.3, All basic Vertex dataset searches working

## [1.0.0](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v0.1.0...v1.0.0)

Expand Down
4 changes: 3 additions & 1 deletion cdk/cdk/cdk_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,8 @@ def __init__(self, scope: Construct, construct_id: str, staging: bool = False, *
timeout=Duration.seconds(30),
memory_size=5308,
code=lambda_.DockerImageCode.from_image_asset(
directory='..'
directory='..',
# build_args={'MATURITY': }
),
**lambda_vpc_kwargs,
)
Expand All @@ -70,6 +71,7 @@ def __init__(self, scope: Construct, construct_id: str, staging: bool = False, *
id=api_id,
handler=search_api_lambda,
proxy=True,
binary_media_types=['multipart/form-data', 'application/octet-stream'],
default_cors_preflight_options=apigateway.CorsOptions(
allow_origins=apigateway.Cors.ALL_ORIGINS, allow_methods=apigateway.Cors.ALL_METHODS
),
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ ujson==5.7.0
uvicorn==0.21.1
watchfiles==0.19.0

asf_search==8.3.1
asf_search==8.3.3
python-json-logger==2.0.7

pyshp==2.1.3
Expand Down
14 changes: 0 additions & 14 deletions src/SearchAPI/application/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,7 @@ async def query_params(searchOptions: SearchOptsModel = Depends(process_search_r
raise HTTPException(detail=repr(exc), status_code=400) from exc

if output.lower() == 'count':
start = time.perf_counter()
count=asf.search_count(opts=opts)
api_logger.info(f'/services/search/param count query time {time.perf_counter()-start}')
return Response(
content=str(count),
status_code=200,
Expand All @@ -65,10 +63,8 @@ async def query_params(searchOptions: SearchOptsModel = Depends(process_search_r
)

if output.lower() == 'python':
start = time.perf_counter()
file_name, search_script = get_asf_search_script(opts)

api_logger.info(f'/services/search/param count query time {time.perf_counter()-start}')
return Response(
content=search_script,
status_code=200,
Expand All @@ -79,9 +75,7 @@ async def query_params(searchOptions: SearchOptsModel = Depends(process_search_r
}
)
try:
start = time.perf_counter()
results = asf.search(opts=opts)
api_logger.info(f'/services/search/param query time {time.perf_counter()-start}')
response_info = as_output(results, output)
return Response(**response_info)

Expand All @@ -102,10 +96,8 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces
# Load the reference scene:

if output.lower() == 'python':
start = time.perf_counter()
file_name, search_script = get_asf_search_script(opts, reference=reference, search_endpoint='baseline')

api_logger.info(f'/services/search/param count query time {time.perf_counter()-start}')
return Response(
content=search_script,
status_code=200,
Expand All @@ -116,9 +108,7 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces
}
)
try:
start = time.perf_counter()
reference_product = asf.granule_search(granule_list=[reference], opts=opts)[0]
api_logger.info(f'/services/search/baseline reference query time {time.perf_counter()-start}')
except (KeyError, IndexError, ValueError) as exc:
raise HTTPException(detail=f"Reference scene not found: {reference}", status_code=400) from exc

Expand Down Expand Up @@ -148,9 +138,7 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces
# Figure out the response params:
if output.lower() == 'count':
stack_opts = reference_product.get_stack_opts()
start = time.perf_counter()
count = asf.search_count(opts=stack_opts)
api_logger.info(f'/services/search/baseline count stack query time {time.perf_counter()-start}')

return Response(
content=str(count),
Expand All @@ -161,9 +149,7 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces

# Finally stream everything back:
try:
start = time.perf_counter()
stack = reference_product.stack(opts=opts)
api_logger.info(f'/services/search/baseline stack query time {time.perf_counter()-start}')
response_info = as_output(stack, output)
return Response(**response_info)

Expand Down
15 changes: 11 additions & 4 deletions src/SearchAPI/application/log_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from fastapi.routing import APIRoute

from .logger import api_logger

import json

class LoggingRoute(APIRoute):
"""
Expand Down Expand Up @@ -39,22 +39,29 @@ def get_route_handler(self) -> Callable:

async def custom_route_handler(request: Request) -> Response:
# Grab the AWS UUID and set it for every log:
context = request.scope.get("aws.context")
if context is not None:
self.aws_request_id = context.aws_request_id

if context := request.headers.get('x-amzn-request-context'):
context_object = json.loads(context)
self.aws_request_id = context_object.get('requestId')

logging.setLogRecordFactory(self.record_factory)
# Time the request itself:
before = time.time()
try:
response: Response = await original_route_handler(request)
finally:
queryBody = {}
if (content_type := request.headers.get('content-type')) is not None:
if content_type == 'application/json':
queryBody = await request.json()
# What to ALWAYS log:
duration = time.time() - before
api_logger.info(
"Query finished running.",
extra={
"QueryTime": duration,
"QueryParams": dict(request.query_params),
"QueryBody": queryBody,
"Endpoint": request.scope['path'],
}
)
Expand Down
24 changes: 20 additions & 4 deletions tests/integration/test_stack.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,10 @@
session = asf.ASFSession()

cwd = os.getcwd()
test_file_path= os.path.join(cwd, 'tests/integration/', 'elvey.geojson')
geojson_test_file_path= os.path.join(cwd, 'tests/integration/', 'elvey.geojson')
kml_test_file_path = os.path.join(cwd, 'tests/yml_tests/Resources/kmls_valid/', '3D_coords.kml')
shp_test_file_path = os.path.join(cwd, 'tests/yml_tests/Resources/shps_valid/', 'NED1_F.shp')
zip_test_file_path = os.path.join(cwd, 'tests/yml_tests/Resources/zips_valid/', 'NED1_F.zip')

basic_search_params = {
'maxResults': 250,
Expand Down Expand Up @@ -102,9 +105,22 @@ def test_wkt_endpoint_post_json():
assert response.status_code == 200, f'Non-200 status code from baseline POST endpoint (data): \nstatus code: {response.status_code}\nresponse: {response.text}'

### WKT FILE UPLOAD TEST
def test_wkt_file_upload_endpoint():
files = {'files': open(test_file_path,'rb')}
def test_wkt_file_upload_endpoint_geojson():
_wkt_file_upload_endpoint(geojson_test_file_path)

def test_wkt_file_upload_endpoint_kml():
_wkt_file_upload_endpoint(kml_test_file_path)

def test_wkt_file_upload_endpoint_shp():
_wkt_file_upload_endpoint(shp_test_file_path)

def test_wkt_file_upload_endpoint_zip():
_wkt_file_upload_endpoint(zip_test_file_path)


def _wkt_file_upload_endpoint(file: str):
files = {'files': open(file,'rb')}
response = session.post(files_wkt_endpoint, files=files)
response.raise_for_status()

assert response.status_code == 200, f'Non-200 status code from baseline POST endpoint (data): \nstatus code: {response.status_code}\nresponse: {response.text}'
assert response.status_code == 200, f'Non-200 status code from files_to_wkt endpoint for file {file.split("/")[-1]}: \nstatus code: {response.status_code}\nresponse: {response.text}'