Skip to content

Commit 3026cd9

Browse files
authored
chore(devx): reduce docs dev cold-load latency (#144)
* Improve docs dev page latency * Fix benchmark readiness detection
1 parent 9c7739b commit 3026cd9

5 files changed

Lines changed: 159 additions & 5 deletions

File tree

scripts/dev-benchmark.ts

Lines changed: 132 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,132 @@
1+
const viteArgs = process.argv.slice(2);
2+
const host = "127.0.0.1";
3+
const port = 3020;
4+
const start = performance.now();
5+
6+
const child = Bun.spawn({
7+
cmd: ["bun", "run", "dev", "--", "--host", host, "--port", String(port), ...viteArgs],
8+
cwd: process.cwd(),
9+
stdin: "ignore",
10+
stdout: "pipe",
11+
stderr: "pipe",
12+
env: process.env,
13+
});
14+
15+
let ready = false;
16+
let stdout = "";
17+
let stderr = "";
18+
19+
async function collect(readable: ReadableStream<Uint8Array>, target: "stdout" | "stderr") {
20+
const decoder = new TextDecoder();
21+
22+
for await (const chunk of readable) {
23+
const text = decoder.decode(chunk, { stream: true });
24+
25+
if (target === "stdout") {
26+
stdout += text;
27+
if (stdout.includes("Local:")) {
28+
ready = true;
29+
}
30+
} else {
31+
stderr += text;
32+
}
33+
}
34+
35+
const tail = decoder.decode();
36+
if (!tail) return;
37+
38+
if (target === "stdout") {
39+
stdout += tail;
40+
if (stdout.includes("Local:")) {
41+
ready = true;
42+
}
43+
} else {
44+
stderr += tail;
45+
}
46+
}
47+
48+
const stdoutTask = collect(child.stdout, "stdout");
49+
const stderrTask = collect(child.stderr, "stderr");
50+
51+
async function waitForReady(timeoutMs: number) {
52+
const start = performance.now();
53+
54+
while (!ready) {
55+
if (child.exitCode !== null) {
56+
throw new Error(`vite dev exited early with code ${child.exitCode}`);
57+
}
58+
if (performance.now() - start > timeoutMs) {
59+
throw new Error("timed out waiting for vite dev");
60+
}
61+
await Bun.sleep(100);
62+
}
63+
}
64+
65+
async function request(pathname: string) {
66+
const requestStart = performance.now();
67+
const response = await fetch(`http://${host}:${port}${pathname}`);
68+
await response.arrayBuffer();
69+
70+
return {
71+
pathname,
72+
status: response.status,
73+
ms: Number((performance.now() - requestStart).toFixed(2)),
74+
};
75+
}
76+
77+
function formatDuration(ms: number) {
78+
return `${(ms / 1000).toFixed(2)}s`;
79+
}
80+
81+
function printTable(
82+
rows: Array<{
83+
step: string;
84+
durationMs: number;
85+
status: string;
86+
}>,
87+
) {
88+
const headers = ["Step", "Duration", "Status"];
89+
const body = rows.map((row) => [row.step, formatDuration(row.durationMs), row.status]);
90+
const widths = headers.map((header, index) =>
91+
Math.max(header.length, ...body.map((row) => row[index].length)),
92+
);
93+
94+
const formatRow = (row: string[]) =>
95+
row.map((cell, index) => cell.padEnd(widths[index])).join(" ");
96+
97+
console.log(formatRow(headers));
98+
console.log(widths.map((width) => "-".repeat(width)).join(" "));
99+
for (const row of body) {
100+
console.log(formatRow(row));
101+
}
102+
}
103+
104+
try {
105+
await waitForReady(30_000);
106+
const readyMs = Number((performance.now() - start).toFixed(2));
107+
108+
const docs = await request("/docs/");
109+
const totalToDocsMs = Number((performance.now() - start).toFixed(2));
110+
const anotherPage = await request("/docs/support/dashboard");
111+
const totalMs = Number((performance.now() - start).toFixed(2));
112+
113+
if (docs.status >= 400 || anotherPage.status >= 400) {
114+
console.error("vite dev benchmark hit an error response");
115+
if (stdout) console.error(stdout.trim());
116+
if (stderr) console.error(stderr.trim());
117+
process.exitCode = 1;
118+
}
119+
120+
printTable([
121+
{ step: "Dev ready", durationMs: readyMs, status: "-" },
122+
{ step: "First /docs/ request", durationMs: docs.ms, status: String(docs.status) },
123+
{ step: "Start -> first /docs/", durationMs: totalToDocsMs, status: String(docs.status) },
124+
{ step: "Second docs page", durationMs: anotherPage.ms, status: String(anotherPage.status) },
125+
{ step: "Full benchmark", durationMs: totalMs, status: "-" },
126+
]);
127+
} finally {
128+
child.kill("SIGTERM");
129+
await child.exited;
130+
await stdoutTask;
131+
await stderrTask;
132+
}

source.config.ts

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,17 @@ export const docs = defineDocs({
1616
docs: {
1717
async: isDevelopment,
1818
postprocess: {
19-
includeProcessedMarkdown: true,
19+
includeProcessedMarkdown: !isDevelopment,
2020
},
2121
},
2222
});
2323

2424
export default defineConfig({
2525
mdxOptions: {
26+
// Shiki highlighting is one of the most expensive parts of the MDX pipeline.
27+
// Keep it in builds, but skip it in local dev so first-page SSR doesn't have
28+
// to highlight hundreds of code-heavy docs up front.
29+
rehypeCodeOptions: isDevelopment ? false : undefined,
2630
remarkPlugins: (existing) => [
2731
remarkImagePaths,
2832
remarkLinkPaths,

src/lib/source.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -135,7 +135,10 @@ export const source = loader({
135135
});
136136

137137
export async function getLLMText(page: InferPageType<typeof source>) {
138-
const processed = await page.data.getText("processed");
138+
// Dev disables processed markdown generation to avoid paying that extra MDX
139+
// postprocess cost on the first docs request, so fall back to raw text there.
140+
const textType = process.env.NODE_ENV === "development" ? "raw" : "processed";
141+
const processed = await page.data.getText(textType);
139142

140143
return `# ${page.data.title}
141144

src/routes/api/search.ts

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,19 @@ import { createFileRoute } from "@tanstack/react-router";
22
import { source } from "@/lib/source";
33
import { createFromSource } from "fumadocs-core/search/server";
44

5-
const server = createFromSource(source, { language: "english" });
5+
let server:
6+
| ReturnType<typeof createFromSource>
7+
| null = null;
8+
9+
function getServer() {
10+
if (server) return server;
11+
12+
// Building the local Orama index is expensive; keep it off the main dev
13+
// startup path and only initialize it when /api/search is actually hit.
14+
server = createFromSource(source, { language: "english" });
15+
16+
return server;
17+
}
618

719
export const Route = createFileRoute("/api/search")({
820
server: {
@@ -13,5 +25,5 @@ export const Route = createFileRoute("/api/search")({
1325
});
1426

1527
export function handleSearchGet(request: Request) {
16-
return server.GET(request);
28+
return getServer().GET(request);
1729
}

src/routes/llms[.]mdx.docs.$.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,11 @@ export const Route = createFileRoute("/llms.mdx/docs/$")({
88
const slugs = params._splat?.split("/") ?? [];
99
const page = source.getPage(slugs);
1010
if (!page) throw notFound();
11+
// Dev disables processed markdown generation to keep the first docs
12+
// request fast, so the LLM endpoint serves raw markdown in development.
13+
const textType = process.env.NODE_ENV === "development" ? "raw" : "processed";
1114

12-
return new Response(await page.data.getText("processed"), {
15+
return new Response(await page.data.getText(textType), {
1316
headers: {
1417
"Content-Type": "text/markdown",
1518
"Access-Control-Allow-Origin": "*",

0 commit comments

Comments
 (0)