Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
from app.services.tiles.base import load_grids
from app.config.logger import setup_logging
from app.config.settings import settings
from app.routers import jobs_status, unit_jobs, health, tiles, upscale_tasks
from app.routers import jobs_status, unit_jobs, health, tiles, upscale_tasks, sync_jobs

setup_logging()

Expand All @@ -33,5 +33,6 @@
app.include_router(tiles.router)
app.include_router(jobs_status.router)
app.include_router(unit_jobs.router)
app.include_router(sync_jobs.router)
app.include_router(upscale_tasks.router)
app.include_router(health.router)
23 changes: 23 additions & 0 deletions app/platforms/base.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from abc import ABC, abstractmethod

from fastapi import Response

from app.schemas.enum import OutputFormatEnum, ProcessingStatusEnum
from app.schemas.unit_job import ServiceDetails

Expand Down Expand Up @@ -33,6 +35,27 @@ async def execute_job(
"""
pass

@abstractmethod
async def execute_synchronous_job(
self,
user_token: str,
title: str,
details: ServiceDetails,
parameters: dict,
format: OutputFormatEnum,
) -> Response:
"""
Execute a processing job synchronously on the platform with the given service ID
and parameters.

:param title: The title of the job to be executed.
:param details: The service details containing the service ID and application.
:param parameters: The parameters required for the job execution.
:param format: Format of the output result.
:return: Return the result of the job.
"""
pass

@abstractmethod
async def get_job_status(
self, user_token: str, job_id: str, details: ServiceDetails
Expand Down
11 changes: 11 additions & 0 deletions app/platforms/implementations/ogc_api_process.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
from fastapi import Response
from app.platforms.base import BaseProcessingPlatform
from app.platforms.dispatcher import register_platform
from app.schemas.enum import OutputFormatEnum, ProcessTypeEnum, ProcessingStatusEnum
Expand All @@ -22,6 +23,16 @@ async def execute_job(
) -> str:
raise NotImplementedError("OGC API Process job execution not implemented yet.")

async def execute_synchronous_job(
self,
user_token: str,
title: str,
details: ServiceDetails,
parameters: dict,
format: OutputFormatEnum,
) -> Response:
raise NotImplementedError("OGC API Process job execution not implemented yet.")

async def get_job_status(
self, user_token: str, job_id: str, details: ServiceDetails
) -> ProcessingStatusEnum:
Expand Down
42 changes: 33 additions & 9 deletions app/platforms/implementations/openeo.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import datetime

from fastapi import Response
import jwt
import openeo
import requests
Expand Down Expand Up @@ -179,14 +180,9 @@ def _get_process_id(self, url: str) -> str:

return process_id

async def execute_job(
self,
user_token: str,
title: str,
details: ServiceDetails,
parameters: dict,
format: OutputFormatEnum,
) -> str:
async def _build_datacube(
self, user_token: str, title: str, details: ServiceDetails, parameters: dict
) -> openeo.DataCube:
process_id = self._get_process_id(details.application)

logger.debug(
Expand All @@ -195,14 +191,42 @@ async def execute_job(
)

connection = await self._setup_connection(user_token, details.endpoint)
service = connection.datacube_from_process(
return connection.datacube_from_process(
process_id=process_id, namespace=details.application, **parameters
)

async def execute_job(
self,
user_token: str,
title: str,
details: ServiceDetails,
parameters: dict,
format: OutputFormatEnum,
) -> str:
service = await self._build_datacube(user_token, title, details, parameters)
job = service.create_job(title=title, out_format=format)
logger.info(f"Executing OpenEO batch job with title={title}")
job.start()

return job.job_id

async def execute_synchronous_job(
self,
user_token: str,
title: str,
details: ServiceDetails,
parameters: dict,
format: OutputFormatEnum,
) -> Response:
service = await self._build_datacube(user_token, title, details, parameters)
logger.info("Executing synchronous OpenEO job")
response = service.execute(auto_decode=False)
return Response(
content=response.content,
status_code=response.status_code,
media_type=response.headers.get("Content-Type"),
)

def _map_openeo_status(self, status: str) -> ProcessingStatusEnum:
"""
Map the status returned by openEO to a status known within the API.
Expand Down
107 changes: 107 additions & 0 deletions app/routers/sync_jobs.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
from typing import Annotated
from fastapi import Body, APIRouter, Depends, HTTPException, Response, status
from loguru import logger

from app.schemas.enum import OutputFormatEnum, ProcessTypeEnum
from app.schemas.unit_job import (
BaseJobRequest,
ServiceDetails,
)
from app.auth import oauth2_scheme
from app.services.processing import (
create_synchronous_job,
)


# from app.auth import get_current_user

router = APIRouter()


@router.post(
"/sync_jobs",
status_code=status.HTTP_201_CREATED,
tags=["Unit Jobs"],
summary="Create a new processing job",
)
async def create_sync_job(
payload: Annotated[
BaseJobRequest,
Body(
openapi_examples={
"openEO Example": {
"summary": "Valid openEO job request",
"description": "The following example demonstrates how to create a processing "
"job using an openEO-based service. This example triggers the "
"[`variability map`](https://github.com/ESA-APEx/apex_algorithms/blob/main/algo"
"rithm_catalog/vito/variabilitymap/records/variabilitymap.json) "
"process using the CDSE openEO Federation. In this case the `endpoint`"
"represents the URL of the openEO backend and the `application` refers to the "
"User Defined Process (UDP) that is being executed on the backend.",
"value": BaseJobRequest(
label=ProcessTypeEnum.OPENEO,
title="Example openEO Job",
service=ServiceDetails(
endpoint="https://openeofed.dataspace.copernicus.eu",
application="https://raw.githubusercontent.com/ESA-APEx/apex_algorithms"
"/32ea3c9a6fa24fe063cb59164cd318cceb7209b0/openeo_udp/variabilitymap/"
"variabilitymap.json",
),
format=OutputFormatEnum.GEOTIFF,
parameters={
"spatial_extent": {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"properties": {},
"geometry": {
"coordinates": [
[
[
5.170043941798298,
51.25050990858725,
],
[
5.171035037521989,
51.24865722468999,
],
[
5.178521828188366,
51.24674578027137,
],
[
5.179084341977159,
51.24984764553983,
],
[
5.170043941798298,
51.25050990858725,
],
]
],
"type": "Polygon",
},
}
],
},
"temporal_extent": ["2025-05-01", "2025-05-01"],
},
).model_dump(),
}
},
),
],
token: str = Depends(oauth2_scheme),
) -> Response:
"""Initiate a synchronous processing job with the provided data and return the result."""
try:
return await create_synchronous_job(token, payload)
except HTTPException as e:
raise e
except Exception as e:
logger.exception(f"Error creating synchronous job: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"An error occurred while creating the synchronous job: {e}",
)
1 change: 1 addition & 0 deletions app/schemas/enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,4 @@ class OutputFormatEnum(str, Enum):
GEOJSON = "geojson"
GEOTIFF = "gtiff"
NETCDF = "netcdf"
JSON = "json"
18 changes: 18 additions & 0 deletions app/services/processing.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import json
from typing import List, Optional

from fastapi import Response
from loguru import logger
from app.auth import get_current_user_id
from app.database.models.processing_job import (
Expand Down Expand Up @@ -192,3 +193,20 @@ async def get_processing_job_by_user_id(
created=record.created,
updated=record.updated,
)


async def create_synchronous_job(
user_token: str,
request: BaseJobRequest,
) -> Response:
logger.info(f"Creating synchronous job with summary: {request}")

platform = get_processing_platform(request.label)

return await platform.execute_synchronous_job(
user_token=user_token,
title=request.title,
details=request.service,
parameters=request.parameters,
format=request.format,
)
Loading