Skip to content

Commit 6f864a5

Browse files
authored
Merge pull request #97 from SentienceAPI/trace_status
set or infer final trace status - close upload gaps
2 parents d663697 + f5b559b commit 6f864a5

File tree

8 files changed

+893
-11
lines changed

8 files changed

+893
-11
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
44

55
[project]
66
name = "sentienceapi"
7-
version = "0.90.18"
7+
version = "0.90.19"
88
description = "Python SDK for Sentience AI Agent Browser Automation"
99
readme = "README.md"
1010
requires-python = ">=3.11"

sentience/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@
7070
)
7171
from .wait import wait_for
7272

73-
__version__ = "0.90.18"
73+
__version__ = "0.90.19"
7474

7575
__all__ = [
7676
# Core SDK

sentience/cloud_tracing.py

Lines changed: 166 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,7 @@ def __init__(
109109
self.trace_file_size_bytes = 0
110110
self.screenshot_total_size_bytes = 0
111111
self.screenshot_count = 0 # Track number of screenshots extracted
112+
self.index_file_size_bytes = 0 # Track index file size
112113

113114
def emit(self, event: dict[str, Any]) -> None:
114115
"""
@@ -327,6 +328,7 @@ def _upload_index(self) -> None:
327328

328329
compressed_index = gzip.compress(index_data)
329330
index_size = len(compressed_index)
331+
self.index_file_size_bytes = index_size # Track index file size
330332

331333
if self.logger:
332334
self.logger.info(f"Index file size: {index_size / 1024:.2f} KB")
@@ -361,9 +363,158 @@ def _upload_index(self) -> None:
361363
if self.logger:
362364
self.logger.warning(f"Error uploading trace index: {e}")
363365

366+
def _infer_final_status_from_trace(self) -> str:
367+
"""
368+
Infer final status from trace events by reading the trace file.
369+
370+
Returns:
371+
Final status: "success", "failure", "partial", or "unknown"
372+
"""
373+
try:
374+
# Read trace file to analyze events
375+
with open(self._path, encoding="utf-8") as f:
376+
events = []
377+
for line in f:
378+
line = line.strip()
379+
if not line:
380+
continue
381+
try:
382+
event = json.loads(line)
383+
events.append(event)
384+
except json.JSONDecodeError:
385+
continue
386+
387+
if not events:
388+
return "unknown"
389+
390+
# Check for run_end event with status
391+
for event in reversed(events):
392+
if event.get("type") == "run_end":
393+
status = event.get("data", {}).get("status")
394+
if status in ("success", "failure", "partial", "unknown"):
395+
return status
396+
397+
# Infer from error events
398+
has_errors = any(e.get("type") == "error" for e in events)
399+
if has_errors:
400+
# Check if there are successful steps too (partial success)
401+
step_ends = [e for e in events if e.get("type") == "step_end"]
402+
if step_ends:
403+
return "partial"
404+
return "failure"
405+
406+
# If we have step_end events and no errors, likely success
407+
step_ends = [e for e in events if e.get("type") == "step_end"]
408+
if step_ends:
409+
return "success"
410+
411+
return "unknown"
412+
413+
except Exception:
414+
# If we can't read the trace, default to unknown
415+
return "unknown"
416+
417+
def _extract_stats_from_trace(self) -> dict[str, Any]:
418+
"""
419+
Extract execution statistics from trace file.
420+
421+
Returns:
422+
Dictionary with stats fields for /v1/traces/complete
423+
"""
424+
try:
425+
# Read trace file to extract stats
426+
with open(self._path, encoding="utf-8") as f:
427+
events = []
428+
for line in f:
429+
line = line.strip()
430+
if not line:
431+
continue
432+
try:
433+
event = json.loads(line)
434+
events.append(event)
435+
except json.JSONDecodeError:
436+
continue
437+
438+
if not events:
439+
return {
440+
"total_steps": 0,
441+
"total_events": 0,
442+
"duration_ms": None,
443+
"final_status": "unknown",
444+
"started_at": None,
445+
"ended_at": None,
446+
}
447+
448+
# Find run_start and run_end events
449+
run_start = next((e for e in events if e.get("type") == "run_start"), None)
450+
run_end = next((e for e in events if e.get("type") == "run_end"), None)
451+
452+
# Extract timestamps
453+
started_at: str | None = None
454+
ended_at: str | None = None
455+
if run_start:
456+
started_at = run_start.get("ts")
457+
if run_end:
458+
ended_at = run_end.get("ts")
459+
460+
# Calculate duration
461+
duration_ms: int | None = None
462+
if started_at and ended_at:
463+
try:
464+
from datetime import datetime
465+
466+
start_dt = datetime.fromisoformat(started_at.replace("Z", "+00:00"))
467+
end_dt = datetime.fromisoformat(ended_at.replace("Z", "+00:00"))
468+
delta = end_dt - start_dt
469+
duration_ms = int(delta.total_seconds() * 1000)
470+
except Exception:
471+
pass
472+
473+
# Count steps (from step_start events, only first attempt)
474+
step_indices = set()
475+
for event in events:
476+
if event.get("type") == "step_start":
477+
step_index = event.get("data", {}).get("step_index")
478+
if step_index is not None:
479+
step_indices.add(step_index)
480+
total_steps = len(step_indices) if step_indices else 0
481+
482+
# If run_end has steps count, use that (more accurate)
483+
if run_end:
484+
steps_from_end = run_end.get("data", {}).get("steps")
485+
if steps_from_end is not None:
486+
total_steps = max(total_steps, steps_from_end)
487+
488+
# Count total events
489+
total_events = len(events)
490+
491+
# Infer final status
492+
final_status = self._infer_final_status_from_trace()
493+
494+
return {
495+
"total_steps": total_steps,
496+
"total_events": total_events,
497+
"duration_ms": duration_ms,
498+
"final_status": final_status,
499+
"started_at": started_at,
500+
"ended_at": ended_at,
501+
}
502+
503+
except Exception as e:
504+
if self.logger:
505+
self.logger.warning(f"Error extracting stats from trace: {e}")
506+
return {
507+
"total_steps": 0,
508+
"total_events": 0,
509+
"duration_ms": None,
510+
"final_status": "unknown",
511+
"started_at": None,
512+
"ended_at": None,
513+
}
514+
364515
def _complete_trace(self) -> None:
365516
"""
366-
Call /v1/traces/complete to report file sizes to gateway.
517+
Call /v1/traces/complete to report file sizes and stats to gateway.
367518
368519
This is a best-effort call - failures are logged but don't affect upload success.
369520
"""
@@ -372,16 +523,25 @@ def _complete_trace(self) -> None:
372523
return
373524

374525
try:
526+
# Extract stats from trace file
527+
stats = self._extract_stats_from_trace()
528+
529+
# Add file size fields
530+
stats.update(
531+
{
532+
"trace_file_size_bytes": self.trace_file_size_bytes,
533+
"screenshot_total_size_bytes": self.screenshot_total_size_bytes,
534+
"screenshot_count": self.screenshot_count,
535+
"index_file_size_bytes": self.index_file_size_bytes,
536+
}
537+
)
538+
375539
response = requests.post(
376540
f"{self.api_url}/v1/traces/complete",
377541
headers={"Authorization": f"Bearer {self.api_key}"},
378542
json={
379543
"run_id": self.run_id,
380-
"stats": {
381-
"trace_file_size_bytes": self.trace_file_size_bytes,
382-
"screenshot_total_size_bytes": self.screenshot_total_size_bytes,
383-
"screenshot_count": self.screenshot_count,
384-
},
544+
"stats": stats,
385545
},
386546
timeout=10,
387547
)

sentience/schemas/trace_v1.json

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,12 @@
198198
"description": "run_end data",
199199
"required": ["steps"],
200200
"properties": {
201-
"steps": {"type": "integer"}
201+
"steps": {"type": "integer"},
202+
"status": {
203+
"type": "string",
204+
"enum": ["success", "failure", "partial", "unknown"],
205+
"description": "Final execution status"
206+
}
202207
}
203208
},
204209
{

sentience/tracer_factory.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,16 @@ def _recover_orphaned_traces(api_key: str, api_url: str = SENTIENCE_API_URL) ->
208208
)
209209

210210
if response.status_code != 200:
211+
# HTTP 409 means trace already exists (already uploaded)
212+
# Treat as success and delete local file
213+
if response.status_code == 409:
214+
print(f"✅ Trace {run_id} already exists in cloud (skipping re-upload)")
215+
# Delete local file since it's already in cloud
216+
try:
217+
os.remove(trace_file)
218+
except Exception:
219+
pass # Ignore cleanup errors
220+
continue
211221
# HTTP 422 typically means invalid run_id (e.g., test files)
212222
# Skip silently for 422, but log other errors
213223
if response.status_code == 422:

0 commit comments

Comments
 (0)