Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,18 @@ and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
-

-->
------
## [1.0.5](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.4...v1.0.5)
### Added
- Create wrapper class around asf-search `ASFSession`, `SearchAPISession`. Modifies client ID.

### Changed
- Aria stack supports different output types
- Aria stacking uses aria frame id instead of frame number for stacking
- asf_search uses `SearchAPISession` by default for search queries
- bump asf-search to v9.0.4
- increase search query limit to 2000, raise error if expected output is over that number

------
## [1.0.4](https://github.com/asfadmin/Discovery-SearchAPI-v3/compare/v1.0.3...v1.0.4)
### Added
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,9 @@ ujson==5.7.0
uvicorn==0.21.1
watchfiles==0.19.0

asf_search==9.0.2
asf_search==9.0.4
python-json-logger==2.0.7
asf_enumeration

pyshp==2.1.3
geopandas
Expand Down
26 changes: 26 additions & 0 deletions src/SearchAPI/application/SearchAPISession.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from typing import List
from asf_search import ASFSession


class SearchAPISession(ASFSession):
def __init__(
self,
edl_host: str = None,
edl_client_id: str = None,
asf_auth_host: str = None,
cmr_host: str = None,
cmr_collections: str = None,
auth_domains: List[str] = None,
auth_cookie_names: List[str] = None,
):
super().__init__(
edl_host,
edl_client_id,
asf_auth_host,
cmr_host,
cmr_collections,
auth_domains,
auth_cookie_names,
)

self.headers.update({'Client-Id': f'SearchAPI_{self.headers.get("Client-Id")}'})
1 change: 1 addition & 0 deletions src/SearchAPI/application/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,5 @@
from .logger import *
from .log_router import *
from .search import *
from .SearchAPISession import *
from .application import *
24 changes: 15 additions & 9 deletions src/SearchAPI/application/application.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,12 @@
from .output import as_output, get_asf_search_script, make_filename
from .files_to_wkt import FilesToWKT
from . import constants
from .search import stack_aria_gunw
from .SearchAPISession import SearchAPISession
from .search import get_aria_groups_for_frame, stack_aria_gunw
import time
from asf_search.ASFSearchOptions.config import config as asf_config

asf_config['session'] = SearchAPISession()

asf.REPORT_ERRORS = False
router = APIRouter(route_class=LoggingRoute)
Expand Down Expand Up @@ -97,14 +100,17 @@ async def query_baseline(searchOptions: BaselineSearchOptsModel = Depends(proces

if searchOptions.opts.dataset is not None:
if searchOptions.opts.dataset[0] == asf.DATASET.ARIA_S1_GUNW:
return JSONResponse(
content=stack_aria_gunw(reference),
status_code=200,
headers= {
**constants.DEFAULT_HEADERS,
'Content-Disposition': f"attachment; filename={make_filename('json')}",
}
)
if output.lower() == 'count':
return Response(
content=str(len(get_aria_groups_for_frame(reference)[1])),
status_code=200,
media_type='text/html; charset=utf-8',
headers=constants.DEFAULT_HEADERS
)

stack = stack_aria_gunw(reference)
response_info = as_output(stack, output=output)
return Response(**response_info)
# Load the reference scene:

if output.lower() == 'python':
Expand Down
30 changes: 21 additions & 9 deletions src/SearchAPI/application/asf_opts.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from asf_search.ASFSearchOptions import validator_map

from .asf_env import load_config_maturity

from .SearchAPISession import SearchAPISession
from .logger import api_logger

non_search_param = ['output', 'maxresults', 'pagesize', 'maturity']
Expand Down Expand Up @@ -147,7 +147,7 @@ async def get_body(request: Request):
return {}


async def process_search_request(request: Request) -> SearchOptsModel:
async def process_search_request(request: Request, is_baseline: bool = False) -> SearchOptsModel:
"""
Extracts the request's query+body params, returns ASFSearchOptions, request method, output format, and a dictionary
of the merged request args wrapped in a pydantic model (SearchOptsModel)
Expand All @@ -166,7 +166,7 @@ async def process_search_request(request: Request) -> SearchOptsModel:
merged_args = {**query_params, **body}

if (token := merged_args.get('cmr_token')):
session = asf.ASFSession()
session = SearchAPISession()
session.headers.update({'Authorization': 'Bearer {0}'.format(token)})
query_opts.session = session

Expand All @@ -177,13 +177,25 @@ async def process_search_request(request: Request) -> SearchOptsModel:

try:
# we are no longer allowing unbounded searches
if query_opts.granule_list is None and query_opts.product_list is None:
if (
query_opts.granule_list is None
and query_opts.product_list is None
and output not in ['python', 'count']
and not is_baseline
):
if query_opts.maxResults is None:
query_opts.maxResults = asf.search_count(opts=query_opts)
maxResults = asf.search_count(opts=query_opts)
if maxResults > 2000:
raise ValueError(
(
'SearchAPI no longer supports unbounded searches with expected results over 2000, '
'please use the asf-search python module for long-lived searches or set `maxResults` to 2000 or less. '
'To have SearchAPI automatically generate a python script for the equivalent search to your SearchAPI query '
'set `output=python`'
)
)
elif query_opts.maxResults <= 0:
raise ValueError(f'Search keyword "maxResults" must be greater than 0')

query_opts.maxResults = min(1500, query_opts.maxResults)
raise ValueError('Search keyword "maxResults" must be greater than 0')

searchOpts = SearchOptsModel(opts=query_opts, output=output, merged_args=merged_args, request_method=request.method)
except (ValueError, ValidationError) as exc:
Expand All @@ -194,7 +206,7 @@ async def process_search_request(request: Request) -> SearchOptsModel:

async def process_baseline_request(request: Request) -> BaselineSearchOptsModel:
"""Processes request to baseline endpoint"""
searchOpts = await process_search_request(request=request)
searchOpts = await process_search_request(request=request, is_baseline=True)
reference = searchOpts.merged_args.get('reference')
try:
baselineSearchOpts = BaselineSearchOptsModel(**searchOpts.model_dump(), reference=reference)
Expand Down
63 changes: 12 additions & 51 deletions src/SearchAPI/application/search.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,60 +2,21 @@

import dateparser
import asf_search as asf
from asf_search import ASFSearchResults, ASFProduct
from shapely.wkt import dumps as dump_to_wkt
from shapely import Polygon

def stack_aria_gunw(frame: str):
reference = asf.search(frame=int(frame), dataset=asf.DATASET.ARIA_S1_GUNW, maxResults=1)[0]

opts = asf.ASFSearchOptions(
relativeOrbit=reference.properties['pathNumber'],
processingLevel=asf.PRODUCT_TYPE.SLC,
dataset=asf.DATASET.SENTINEL1,
beamMode='IW',
polarization=['VV','VV+VH'],
flightDirection=reference.properties['flightDirection'],
intersectsWith=dump_to_wkt(Polygon(reference.geometry['coordinates'][0]))
)

slc_stack = asf.search(opts=opts)

groups = defaultdict(list)
for product in slc_stack:
group_id = product.properties['platform'] + '_' + str(product.properties['orbit'])
groups[group_id].append(product)
# dateparser.parse(str(value))
aria_groups = [
{
'date': min(dateparser.parse(product.properties['startTime']) for product in group),
'products': [product for product in group],
}
for group in groups.values()
]

# track group index on each product, naively choose first granule available
for idx, group in enumerate(aria_groups):
group_granule_idx = None
for idy, product in enumerate(group['products']):
product.properties['groupIDX'] = idx
if group_granule_idx is None:
if product.has_baseline():
group_granule_idx = idy

group['group_granule_idx'] = group_granule_idx
from asf_enumeration import aria_s1_gunw

def stack_aria_gunw(frame_id: str):
reference, aria_groups = get_aria_groups_for_frame(frame_id)

stack = ASFSearchResults([group.products[0] for group in aria_groups])
target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack)

return target_stack

stack = asf.ASFSearchResults([group['products'][group['group_granule_idx']] for group in aria_groups if group['group_granule_idx'] is not None])
target_stack, warnings = asf.baseline.get_baseline_from_stack(reference, stack)
for product in target_stack:
group_idx = product.properties.pop('groupIDX')
aria_groups[group_idx]['perpendicularBaseline'] = product.properties['perpendicularBaseline']
aria_groups[group_idx]['temporalBaseline'] = product.properties['temporalBaseline']

for group in aria_groups:
for idx, product in enumerate(group['products']):
group['products'][idx] = product.properties['sceneName']
group['date'] = group['date'].strftime('%Y-%m-%dT%H:%M:%SZ')

return aria_groups
def get_aria_groups_for_frame(frame: str) -> tuple[ASFProduct, list[aria_s1_gunw.Sentinel1Acquisition]]:
aria_frame = aria_s1_gunw.get_frame(frame_id=int(frame))
groups = aria_s1_gunw.get_acquisitions(aria_frame)
return groups[0].products[0], groups