@@ -322,10 +322,27 @@ def _upload_index(self) -> None:
322322 self .logger .warning ("No upload URL in index upload response" )
323323 return
324324
325- # Read and compress index file
326- with open (index_path , "rb " ) as f :
327- index_data = f . read ( )
325+ # Read index file and update trace_file.path to cloud storage path
326+ with open (index_path , "r" , encoding = "utf-8 " ) as f :
327+ index_json = json . load ( f )
328328
329+ # Extract cloud storage path from trace upload URL
330+ # upload_url format: https://...digitaloceanspaces.com/traces/{run_id}.jsonl.gz
331+ # Extract path: traces/{run_id}.jsonl.gz
332+ try :
333+ from urllib .parse import urlparse
334+ parsed_url = urlparse (self .upload_url )
335+ # Extract path after domain (e.g., /traces/run-123.jsonl.gz -> traces/run-123.jsonl.gz)
336+ cloud_trace_path = parsed_url .path .lstrip ("/" )
337+ # Update trace_file.path in index
338+ if "trace_file" in index_json and isinstance (index_json ["trace_file" ], dict ):
339+ index_json ["trace_file" ]["path" ] = cloud_trace_path
340+ except Exception as e :
341+ if self .logger :
342+ self .logger .warning (f"Failed to extract cloud path from upload URL: { e } " )
343+
344+ # Serialize updated index to JSON
345+ index_data = json .dumps (index_json , indent = 2 ).encode ("utf-8" )
329346 compressed_index = gzip .compress (index_data )
330347 index_size = len (compressed_index )
331348 self .index_file_size_bytes = index_size # Track index file size
0 commit comments