Skip to content

Commit 1692055

Browse files
committed
fix(seo): block non-indexable routes from crawlers
1 parent 09114fa commit 1692055

File tree

2 files changed

+27
-3
lines changed

2 files changed

+27
-3
lines changed

src/hooks.server.ts

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,5 +160,24 @@ export const handle: Handle = async ({ event, resolve }) => {
160160
}
161161

162162
const response = await resolve(event);
163-
return withSecurityHeaders(response);
163+
const securedResponse = withSecurityHeaders(response);
164+
165+
// Prevent indexing of non-content routes
166+
const noindexPrefixes = ['/api/', '/dashboard/', '/cli-auth/', '/install'];
167+
const noindexSuffixes = ['/install', '/config', '/og'];
168+
const shouldNoindex =
169+
noindexPrefixes.some((p) => path.startsWith(p)) ||
170+
noindexSuffixes.some((s) => path.endsWith(s));
171+
172+
if (shouldNoindex) {
173+
const headers = new Headers(securedResponse.headers);
174+
headers.set('X-Robots-Tag', 'noindex');
175+
return new Response(securedResponse.body, {
176+
status: securedResponse.status,
177+
statusText: securedResponse.statusText,
178+
headers
179+
});
180+
}
181+
182+
return securedResponse;
164183
};

static/robots.txt

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
1-
# allow crawling everything by default
1+
# allow crawling important pages
22
User-agent: *
3-
Disallow:
3+
Disallow: /api/
4+
Disallow: /dashboard/
5+
Disallow: /cli-auth/
6+
Disallow: */install$
7+
Disallow: */config$
8+
Disallow: */og$
49

510
# explicitly allow AI search crawlers
611
User-agent: GPTBot

0 commit comments

Comments
 (0)