Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions pecan/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,8 @@ function App() {
dataStore.clearPersistedSnapshot();
dataStore.notifyBoundsRefresh();
clearCheckpoints();
localStorage.removeItem("dash:plots");
window.location.reload();
};

useEffect(() => {
Expand Down
83 changes: 39 additions & 44 deletions pecan/src/components/PlotManager.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,10 @@ const PLOT_COLORS = [
"#00bbcc",
];

// Helper to calculate downsample resolution based on time window
function calculateDownsampleResolution(windowMs: number): number {
// Under 3s (3000ms), use 200ms resolution
if (windowMs <= 3000) return 200;
// Above 20s (20000ms), use 1000ms resolution
if (windowMs >= 20000) return 1000;

// Linear interpolation between 3000ms and 20000ms
// Range: 17000ms. Value range: 800ms.
return 200 + ((windowMs - 3000) / 17000) * 800;
// Returns downsample resolution in ms, or null for no downsampling (raw points).
function calculateDownsampleResolution(windowMs: number): number | null {
if (windowMs <= 30000) return null;
return 100;
}

export interface PlotSignal {
Expand Down Expand Up @@ -171,43 +165,44 @@ function PlotManager({
const yData: number[] = [];

if (history.length > 0) {
let currentBinStart =
Math.floor(history[0].timestamp / resolution) * resolution;
let currentSum = 0;
let currentCount = 0;

for (const sample of history) {
const signalData = sample.data[signal.signalName];
if (signalData === undefined) continue;

const sampleBin =
Math.floor(sample.timestamp / resolution) * resolution;

if (sampleBin === currentBinStart) {
currentSum += signalData.sensorReading;
currentCount++;
} else {
// Finalize previous bin
if (currentCount > 0) {
const avg = currentSum / currentCount;
const x = (currentBinStart - windowEndMs) / 1000;
xData.push(x);
yData.push(avg);
if (resolution === null) {
for (const sample of history) {
const signalData = sample.data[signal.signalName];
if (signalData === undefined) continue;
xData.push((sample.timestamp - windowEndMs) / 1000);
yData.push(signalData.sensorReading);
}
} else {
let currentBinStart =
Math.floor(history[0].timestamp / resolution) * resolution;
let currentSum = 0;
let currentCount = 0;

for (const sample of history) {
const signalData = sample.data[signal.signalName];
if (signalData === undefined) continue;

const sampleBin =
Math.floor(sample.timestamp / resolution) * resolution;

if (sampleBin === currentBinStart) {
currentSum += signalData.sensorReading;
currentCount++;
} else {
if (currentCount > 0) {
xData.push((currentBinStart - windowEndMs) / 1000);
yData.push(currentSum / currentCount);
}
currentBinStart = sampleBin;
currentSum = signalData.sensorReading;
currentCount = 1;
}

// Move to new bin
currentBinStart = sampleBin;
currentSum = signalData.sensorReading;
currentCount = 1;
}
}

// Finalize last bin
if (currentCount > 0) {
const avg = currentSum / currentCount;
const x = (currentBinStart - windowEndMs) / 1000;
xData.push(x);
yData.push(avg);
if (currentCount > 0) {
xData.push((currentBinStart - windowEndMs) / 1000);
yData.push(currentSum / currentCount);
}
}
}

Expand Down
18 changes: 17 additions & 1 deletion pecan/src/pages/Dashboard.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,13 @@ function Dashboard() {

// Plotting State
// =====================================================================
const [plots, setPlots] = useState<Plot[]>([]);
const [plots, setPlots] = useState<Plot[]>(() => {
try {
const raw = localStorage.getItem("dash:plots");
if (raw) return JSON.parse(raw) as Plot[];
} catch { /* ignore */ }
return [];
});
const [nextPlotId, setNextPlotId] = useState(1);
const livePlotsSnapshotRef = useRef<Plot[] | null>(null);
// Stores the loadedAtMs of the replay session whose layout has been applied,
Expand Down Expand Up @@ -162,6 +168,13 @@ function Dashboard() {
}
}, [plots, viewMode, sortingMethod, session, saveConfig]);

// Persist plots locally so they survive page refresh
useEffect(() => {
try {
localStorage.setItem("dash:plots", JSON.stringify(plots));
} catch { /* ignore */ }
}, [plots]);

// Data
// =====================================================================

Expand Down Expand Up @@ -850,6 +863,9 @@ function Dashboard() {
<label className="text-gray-300 text-sm">
Time Window (seconds, max 120):
</label>
{plotTimeWindow > 30000 && (
<p className="text-yellow-400 text-xs">Downsampling to 100ms bins (window &gt; 30s)</p>
)}
<input
type="number"
min="1"
Expand Down
91 changes: 73 additions & 18 deletions server/installer/file-uploader/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
Response,
)
import uuid, time, threading, json, logging, requests, os, asyncio, io, zipfile
from datetime import datetime, timezone
from zoneinfo import ZoneInfo
from typing import Optional, Tuple, List
from urllib.parse import quote
from helper import CANTimescaleStreamer
Expand All @@ -20,7 +22,7 @@

error_logger = logging.getLogger(__name__)

ALLOWED_EXTENSIONS = {"csv", "zip"}
ALLOWED_EXTENSIONS = {"csv", "zip", "pecan"}
UPLOAD_ZIP_MAX_ARCHIVE_BYTES = int(os.getenv("UPLOAD_ZIP_MAX_ARCHIVE_BYTES", str(2 * 1024**3)))
UPLOAD_ZIP_MAX_MEMBER_BYTES = int(os.getenv("UPLOAD_ZIP_MAX_MEMBER_BYTES", str(4 * 1024**3)))
UPLOAD_ZIP_MAX_TOTAL_UNCOMPRESSED_BYTES = int(
Expand Down Expand Up @@ -155,6 +157,16 @@ def _zip_entry_path_safe(arcname: str) -> bool:
return ".." not in n.split("/")


class _InMemoryFile:
"""Minimal file-like object for passing in-memory bytes through expand_upload_files_to_csv_payloads."""
def __init__(self, filename: str, data: bytes):
self.filename = filename
self._data = data

def read(self) -> bytes:
return self._data


def expand_upload_files_to_csv_payloads(files) -> Tuple[List[Tuple[str, bytes]], Optional[str]]:
out: List[Tuple[str, bytes]] = []
zip_idx = 0
Expand All @@ -178,36 +190,76 @@ def expand_upload_files_to_csv_payloads(files) -> Tuple[List[Tuple[str, bytes]],
infos = [
i for i in z.infolist()
if not i.is_dir()
and i.filename.lower().endswith(".csv")
and (i.filename.lower().endswith(".csv") or i.filename.lower().endswith(".pecan"))
and _zip_entry_path_safe(i.filename)
# exclude macOS resource forks (__MACOSX/ and ._filename)
and not i.filename.startswith("__MACOSX/")
and not os.path.basename(i.filename).startswith("._")
]
if not infos:
return [], f"No CSV files found in zip: {name}"
return [], f"No CSV or .pecan files found in zip: {name}"
if len(infos) > UPLOAD_ZIP_MAX_CSV_IN_ZIP:
return [], f"Too many CSV entries in {name} (max {UPLOAD_ZIP_MAX_CSV_IN_ZIP})"
total_uc = sum(i.file_size for i in infos)
if total_uc > UPLOAD_ZIP_MAX_TOTAL_UNCOMPRESSED_BYTES:
return [], f"Zip {name} uncompressed total too large"
for i in infos:
if i.file_size > UPLOAD_ZIP_MAX_MEMBER_BYTES:
return [], f"CSV inside zip too large: {i.filename} in {name}"
return [], f"File inside zip too large: {i.filename} in {name}"
leaf = os.path.basename(i.filename) or "data.csv"
key = (zlabel, leaf.lower())
if key in seen_in_zip:
return [], f'Duplicate CSV filename "{leaf}" inside zip {name}'
return [], f'Duplicate filename "{leaf}" inside zip {name}'
seen_in_zip.add(key)
with z.open(i, "r") as fp:
body = fp.read()
out.append((f"_z{zlabel}/{leaf}", body))
if leaf.lower().endswith(".pecan"):
# Convert .pecan to CSV in-place so the pipeline is uniform
sub_out, err = expand_upload_files_to_csv_payloads(
[_InMemoryFile(leaf, body)]
)
if err:
return [], f"{err} (inside zip {name})"
out.extend(sub_out)
else:
out.append((f"_z{zlabel}/{leaf}", body))
except zipfile.BadZipFile:
return [], f"Invalid or corrupt zip: {name}"
except RuntimeError as e:
return [], f"Could not read zip {name}: {e}"
elif ext == "pecan":
try:
payload = json.loads(data.decode("utf-8"))
except Exception:
return [], f"Invalid .pecan file (bad JSON): {name}"
if payload.get("format") != "pecan-session" or payload.get("version") != 2:
return [], f".pecan file must be pecan-session v2 format: {name}"
frames = payload.get("frames") or []
if not frames:
return [], f"No frames in .pecan file: {name}"
epoch_base_ms = payload.get("epochBaseMs")
if epoch_base_ms is None:
return [], f".pecan file missing epochBaseMs — cannot determine timestamps: {name}"
tz_toronto = ZoneInfo("America/Toronto")
start_dt = datetime.fromtimestamp(epoch_base_ms / 1000, tz=tz_toronto)
csv_filename = start_dt.strftime("%Y-%m-%d-%H-%M-%S") + ".csv"
lines = []
for frame in frames:
if not isinstance(frame, list) or len(frame) < 4:
continue
try:
t_rel_ms = int(frame[0])
can_id = int(frame[1])
data_bytes = bytes.fromhex(str(frame[3]))
padded = (data_bytes + b"\x00" * 8)[:8]
except Exception:
continue
lines.append(f"{t_rel_ms},CAN,{can_id}," + ",".join(str(b) for b in padded))
if not lines:
return [], f"No parseable frames in .pecan file: {name}"
out.append((csv_filename, "\n".join(lines).encode("utf-8")))
else:
return [], f"Invalid file type (only .csv and .zip): {name}"
return [], f"Invalid file type (only .csv, .zip, and .pecan): {name}"
if not out:
return [], "No CSV data to process"
return out, None
Expand Down Expand Up @@ -501,12 +553,13 @@ def on_progress(sent: int, total: int) -> None:
pass

def worker():
streamer = CANTimescaleStreamer(
postgres_dsn=POSTGRES_DSN,
table=season.lower(),
dbc_path=dbc_temp_path,
)
streamer = None
try:
streamer = CANTimescaleStreamer(
postgres_dsn=POSTGRES_DSN,
table=season.lower(),
dbc_path=dbc_temp_path,
)
asyncio.run(
streamer.stream_multiple_csvs(
file_data=file_data,
Expand All @@ -516,14 +569,16 @@ def worker():
)
except Exception as e:
error_logger.error(traceback.format_exc())
PROGRESS[task_id]["msg"] = f"Error: {e}"
PROGRESS[task_id]["done"] = True
PROGRESS[task_id]["msg"] = f"Error: {e}"
PROGRESS[task_id]["error"] = str(e)
PROGRESS[task_id]["done"] = True
slack.fail(str(e))
finally:
try:
streamer.close()
except Exception as e:
print("error closing streamer", e)
if streamer:
try:
streamer.close()
except Exception as e:
print("error closing streamer", e)
if dbc_temp_path and os.path.exists(dbc_temp_path):
try:
os.unlink(dbc_temp_path)
Expand Down
31 changes: 25 additions & 6 deletions server/installer/file-uploader/static/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ const DROP_SVG = `<svg id="file-upload-img" aria-hidden="true"
d="M13 13h3a3 3 0 0 0 0-6h-.025A5.56 5.56 0 0 0 16 6.5 5.5 5.5 0 0 0 5.207 5.021C5.137
5.017 5.071 5 5 5a4 4 0 0 0 0 8h2.167M10 15V6m0 0L8 8m2-2 2 2"/>
</svg>
<h3>Click to upload CSV or ZIP, or drag and drop</h3>`;
<h3>Click to upload CSV, ZIP, or .pecan, or drag and drop</h3>`;

const SPINNER_HTML = `<svg class="spinner" viewBox="0 0 50 50">
<circle class="path" cx="25" cy="25" r="20" fill="none" stroke-width="5"></circle>
Expand All @@ -165,6 +165,9 @@ function handleProgress(task_id, fileName, season) {
canSubmit = false;
localStorage.setItem(STORAGE_KEY, task_id);
setDropZoneState("uploading");
const errBox = document.getElementById("upload-error-box");
if (errBox) errBox.style.display = "none";
document.getElementById("progress-bar").style.background = "";

// Show safe-to-close banner immediately
if (fileName || season) showSafeToCloseBanner(fileName, season);
Expand Down Expand Up @@ -194,9 +197,24 @@ function handleProgress(task_id, fileName, season) {
localStorage.removeItem(STORAGE_KEY);
hideSafeToCloseBanner();

document.getElementById("progress-bar_pct").innerText = "Done ✓";
document.getElementById("progress-bar_count").innerText =
data.total ? `${data.total.toLocaleString()} rows written` : "Complete";
if (data.error) {
document.getElementById("progress-bar_pct").innerText = "Failed ✗";
document.getElementById("progress-bar").style.background = "#b91c1c";
document.getElementById("progress-bar_count").innerText = "";
const errBox = document.getElementById("upload-error-box") || (() => {
const el = document.createElement("div");
el.id = "upload-error-box";
el.style.cssText = "margin-top:10px;padding:10px 14px;background:#450a0a;border:1px solid #b91c1c;border-radius:6px;color:#fca5a5;font-size:0.85em;white-space:pre-wrap;word-break:break-word;";
document.querySelector(".progress-bar_parent").after(el);
return el;
})();
errBox.innerText = "❌ Upload failed:\n" + data.error;
errBox.style.display = "block";
} else {
document.getElementById("progress-bar_pct").innerText = "Done ✓";
document.getElementById("progress-bar_count").innerText =
data.total ? `${data.total.toLocaleString()} rows written` : "Complete";
}

["drop_zone-input", "season-select", "dbc-select", "dbc-input"].forEach((id) => {
const el = document.getElementById(id);
Expand Down Expand Up @@ -264,8 +282,9 @@ function submitCsvUpload(files) {
const n = file.name.toLowerCase();
const okCsv = file.type === "text/csv" || n.endsWith(".csv") || file.type === "application/csv";
const okZip = n.endsWith(".zip") || file.type === "application/zip" || file.type === "application/x-zip-compressed";
if (!okCsv && !okZip) {
alert(`${file.name} must be .csv or .zip`);
const okPecan = n.endsWith(".pecan");
if (!okCsv && !okZip && !okPecan) {
alert(`${file.name} must be .csv, .zip, or .pecan`);
return;
}
}
Expand Down
6 changes: 3 additions & 3 deletions server/installer/file-uploader/templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
<div class="info">
<img src="{{ url_for('static', filename='logo-32_375x.webp' )}}" />
<h1>Upload DAQ Data</h1>
<h2>(multiple CSVs or .zip of CSVs)</h2>
<h2>(CSV, .zip of CSVs, or .pecan)</h2>
<span id="season-select_span">
<label for="season-select">Season:</label>
<select name="season" id="season-select">
Expand Down Expand Up @@ -65,11 +65,11 @@ <h2>Task: <span id="task-id-label">{{ task_id }}</span></h2>
d="M13 13h3a3 3 0 0 0 0-6h-.025A5.56 5.56 0 0 0 16 6.5 5.5 5.5 0 0 0 5.207 5.021C5.137 5.017 5.071 5 5 5a4 4 0 0 0 0 8h2.167M10 15V6m0 0L8 8m2-2 2 2"
/>
</svg>
<h3>Click to upload CSV or zip, or drag and drop</h3>
<h3>Click to upload CSV, zip, or .pecan, or drag and drop</h3>
</label>
<input
type="file"
accept=".csv,text/csv,.zip,application/zip,application/x-zip-compressed"
accept=".csv,text/csv,.zip,application/zip,application/x-zip-compressed,.pecan"
id="drop_zone-input"
multiple
style="display: none"
Expand Down
Loading
Loading