Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions backend/workflow_manager/execution/urls.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,17 @@
)
execution_detail = ExecutionViewSet.as_view({"get": "retrieve"})
execution_log_list = ExecutionLogViewSet.as_view({"get": "list"})
execution_log_export = ExecutionLogViewSet.as_view({"get": "export"})

urlpatterns = format_suffix_patterns(
[
path("", execution_list, name="execution-list"),
path("<uuid:pk>/", execution_detail, name="execution-detail"),
path("<uuid:pk>/logs/", execution_log_list, name="execution-log"),
path(
"<uuid:pk>/logs/export/",
execution_log_export,
name="execution-log-export",
),
]
)
125 changes: 124 additions & 1 deletion backend/workflow_manager/workflow_v2/execution_log_view.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
import csv
import io
import json
import logging

from django.db.models import Q
from django.db.models.query import QuerySet
from django.http import HttpResponse
from django.utils import timezone
from permissions.permission import IsOwner
from rest_framework import viewsets
from rest_framework import status, viewsets
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.versioning import URLPathVersioning
from utils.pagination import CustomPagination

Expand All @@ -14,6 +20,11 @@

logger = logging.getLogger(__name__)

# Cap on synchronous export size. Above this, callers should narrow the
# filter (file_execution_id, log_level) — async deployment-wide export
# is intentionally a separate, future feature.
MAX_SYNC_EXPORT_ROWS = 50_000


class WorkflowExecutionLogViewSet(viewsets.ModelViewSet):
versioning_class = URLPathVersioning
Expand All @@ -32,3 +43,115 @@ def get_queryset(self) -> QuerySet:
return ExecutionLog.objects.filter(
Q(wf_execution_id=execution_id) | Q(execution_id=execution_id)
)

def export(self, request, *args, **kwargs):
Comment thread
muhammad-ali-e marked this conversation as resolved.
"""Export logs for a single workflow execution as CSV or JSON.

Honors the same filters as the list endpoint (file_execution_id,
log_level). Returns 413 if the result set exceeds the sync cap so
the client can prompt the user to narrow their filter.
"""
# NOTE: do not name this query param `format` — DRF reserves it for
# content negotiation and will 404 if no renderer matches the value.
export_format = request.query_params.get("file_format", "json").lower()
if export_format not in ("json", "csv"):
return Response(
{"error": "file_format must be one of: json, csv"},
status=status.HTTP_400_BAD_REQUEST,
)

queryset = (
self.filter_queryset(self.get_queryset())
.order_by("event_time")
.only("id", "event_time", "data", "file_execution_id")
)

# Single materialization with cap+1 sentinel — avoids a separate COUNT
# query and a second full scan during build.
rows = list(queryset[: MAX_SYNC_EXPORT_ROWS + 1])
if len(rows) > MAX_SYNC_EXPORT_ROWS:
return Response(
{
"error": (
f"Too many logs to export (>{MAX_SYNC_EXPORT_ROWS} rows). "
"Narrow the filter (e.g. by file or log level) and retry."
),
"limit": MAX_SYNC_EXPORT_ROWS,
},
status=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
)

if export_format == "csv":
Comment thread
muhammad-ali-e marked this conversation as resolved.
body = self._build_csv(rows)
content_type = "text/csv; charset=utf-8"
else:
body = self._build_json(rows)
content_type = "application/json"
Comment thread
muhammad-ali-e marked this conversation as resolved.
Comment thread
coderabbitai[bot] marked this conversation as resolved.

execution_id = self.kwargs.get("pk")
timestamp = timezone.now().strftime("%Y%m%d_%H%M%S")
filename = f"execution_logs_{execution_id}_{timestamp}.{export_format}"

response = HttpResponse(body, content_type=content_type)
response["Content-Disposition"] = f'attachment; filename="{filename}"'
return response

def _normalize(self, log: ExecutionLog) -> dict:
"""Single source of truth for per-row null-handling and dict-guard.

Logs warning when `data` is non-dict so silent blank rows in CSV
still leave a diagnostic trail in operator logs.
"""
if isinstance(log.data, dict):
data = log.data
else:
if log.data is not None:
logger.warning(
"ExecutionLog %s has non-dict data of type %s; "
"emitting blanks in CSV export",
log.id,
type(log.data).__name__,
)
data = {}
return {
"id": str(log.id),
"event_time": log.event_time.isoformat() if log.event_time else None,
"file_execution_id": (
str(log.file_execution_id) if log.file_execution_id else None
),
"level": data.get("level", ""),
"stage": data.get("stage", ""),
"log_message": data.get("log", ""),
"raw_data": log.data,
}

def _build_csv(self, rows: list[ExecutionLog]) -> str:
output = io.StringIO()
writer = csv.writer(output)
writer.writerow(["event_time", "level", "stage", "log", "file_execution_id"])
for log in rows:
n = self._normalize(log)
writer.writerow(
[
n["event_time"] or "",
n["level"],
n["stage"],
n["log_message"],
n["file_execution_id"] or "",
]
)
return output.getvalue()

def _build_json(self, rows: list[ExecutionLog]) -> str:
# JSON path passes raw_data through verbatim (faithful to the DB)
# rather than projecting it to extracted level/stage/log fields.
entries = [
Comment thread
muhammad-ali-e marked this conversation as resolved.
{
"id": n["id"],
"event_time": n["event_time"],
"file_execution_id": n["file_execution_id"],
"data": n["raw_data"],
}
for n in (self._normalize(log) for log in rows)
]
return json.dumps(entries)
6 changes: 6 additions & 0 deletions backend/workflow_manager/workflow_v2/urls/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
execution_entity = WorkflowExecutionViewSet.as_view({"get": "retrieve"})
execution_list = WorkflowExecutionViewSet.as_view({"get": "list"})
execution_log_list = WorkflowExecutionLogViewSet.as_view({"get": "list"})
execution_log_export = WorkflowExecutionLogViewSet.as_view({"get": "export"})
workflow_clear_file_marker = WorkflowViewSet.as_view({"get": "clear_file_marker"})
workflow_schema = WorkflowViewSet.as_view({"get": "get_schema"})
can_update = WorkflowViewSet.as_view({"get": "can_update"})
Expand Down Expand Up @@ -71,6 +72,11 @@
execution_log_list,
name="execution-log",
),
path(
"execution/<uuid:pk>/logs/export/",
execution_log_export,
name="execution-log-export",
),
path(
"schema/",
workflow_schema,
Expand Down
5 changes: 5 additions & 0 deletions frontend/src/components/logging/log-modal/LogModal.css
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,8 @@
border-color: #1677ff;
color: #1677ff;
}

.log-modal-title .export-btn-outlined {
margin-left: 16px;
height: auto;
}
109 changes: 107 additions & 2 deletions frontend/src/components/logging/log-modal/LogModal.jsx
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { CopyOutlined } from "@ant-design/icons";
import { Button, Modal, Table, Tooltip } from "antd";
import { CopyOutlined, DownloadOutlined } from "@ant-design/icons";
import { Button, Dropdown, Modal, Table, Tooltip } from "antd";
import PropTypes from "prop-types";
import { useEffect, useState } from "react";

Expand Down Expand Up @@ -32,6 +32,7 @@ function LogModal({

const [executionLogs, setExecutionLogs] = useState([]);
const [loading, setLoading] = useState(false);
const [exporting, setExporting] = useState(false);
const [selectedLogLevel, setSelectedLogLevel] = useState(null);
const [ordering, setOrdering] = useState(null);
const [pagination, setPagination] = useState({
Expand Down Expand Up @@ -84,6 +85,89 @@ function LogModal({
setPagination({ current: 1, pageSize: 10, total: 0 }); // Reset pagination
};

const handleExport = async (fileFormat) => {
if (exporting) {
return;
}
setExporting(true);
try {
const url = getUrl(`/execution/${executionId}/logs/export/`);
// `file_format` (not `format`) — `format` is reserved by DRF for
// content negotiation and 404s when set to "csv".
const params = {
file_format: fileFormat,
file_execution_id: fileId || "null",
};
// Skip log_level when nothing is selected; axios serializes `null` as
// the literal string "null" which falls through the level filter to
// an INFO-min default and silently drops DEBUG rows.
if (selectedLogLevel) {
params.log_level = selectedLogLevel;
}
const response = await axiosPrivate.get(url, {
params,
responseType: "blob",
});

const filename =
response.headers?.["content-disposition"]?.match(
/filename="?([^"]+)"?/,
)?.[1] || `execution_logs_${fileId || executionId}.${fileFormat}`;
const blobUrl = globalThis.URL.createObjectURL(new Blob([response.data]));
const link = document.createElement("a");
link.href = blobUrl;
link.download = filename;
document.body.appendChild(link);
link.click();
link.remove();
globalThis.URL.revokeObjectURL(blobUrl);
} catch (err) {
// 413 means we hit the server-side row cap. Always surface a
// narrow-your-filter message — even if decoding the JSON blob body
// fails we keep the right user-facing copy rather than falling
// through to a generic "Request failed" alert.
if (err?.response?.status === 413) {
let message = "Export too large — narrow your filter and retry.";
try {
const text = await err.response.data.text();
const parsed = JSON.parse(text);
if (parsed?.error) {
message = parsed.error;
}
} catch (parseErr) {
console.error("Failed to parse 413 body:", parseErr);
}
setAlertDetails({ type: "error", content: message });
return;
}
setAlertDetails(handleException(err));
} finally {
setExporting(false);
}
};

const exportMenuItems = [
{
key: "json",
label: "Download as JSON",
onClick: () => handleExport("json"),
},
{
key: "csv",
label: "Download as CSV",
onClick: () => handleExport("csv"),
},
];

let exportTooltip;
if (!pagination.total) {
exportTooltip = "No logs to export";
} else if (exporting) {
exportTooltip = "Export in progress…";
} else {
exportTooltip = "Export logs";
}

const logDetailsColumns = [
{
title: "Event Time",
Expand Down Expand Up @@ -167,6 +251,27 @@ function LogModal({
onClick={() => copyToClipboard(displayId, "File Execution ID")}
/>
)}
<Dropdown
menu={{ items: exportMenuItems }}
trigger={["click"]}
disabled={!pagination.total || exporting}
>
<Tooltip title={exportTooltip}>
{/* span wrapper — antd disabled Buttons swallow pointer
events so a direct Tooltip child never receives
mouseenter and the disabled-state tooltip never shows. */}
<span>
<Button
className="export-btn-outlined"
icon={<DownloadOutlined />}
loading={exporting}
disabled={!pagination.total || exporting}
>
Export
</Button>
</span>
</Tooltip>
</Dropdown>
</span>
}
centered
Expand Down