Skip to content

Commit c6d206b

Browse files
Add robots meta tags and implement robots.txt handling in worker
1 parent cae2125 commit c6d206b

2 files changed

Lines changed: 33 additions & 5 deletions

File tree

pages/defaultPage.html

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
<head>
44
<meta charset="utf-8" />
55
<meta name="viewport" content="width=device-width, initial-scale=1" />
6+
<meta name="robots" content="noindex, nofollow, noarchive, nosnippet, noimageindex, notranslate" />
7+
<meta name="googlebot" content="noindex, nofollow, noarchive, nosnippet, noimageindex, notranslate" />
68
<title>MineTools API</title>
79
<style>/*__INLINE_CSS__*/</style>
810
</head>

worker.js

Lines changed: 31 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,11 +4,33 @@ import { handleProfileRoute } from "./routes/profileRoute";
44
import { handleUuidRoute } from "./routes/uuidRoute";
55
import { internalError, notFound } from "./utils/responses";
66

7+
const ROBOTS_POLICY = "noindex, nofollow, noarchive, nosnippet, noimageindex, notranslate";
8+
79
const ROUTE_HANDLERS = {
810
uuid: handleUuidRoute,
911
profile: handleProfileRoute
1012
};
1113

14+
function withRobotsBlocked(response) {
15+
const headers = new Headers(response.headers);
16+
headers.set("X-Robots-Tag", ROBOTS_POLICY);
17+
18+
return new Response(response.body, {
19+
status: response.status,
20+
statusText: response.statusText,
21+
headers
22+
});
23+
}
24+
25+
function robotsTxtResponse() {
26+
return new Response("User-agent: *\nDisallow: /\n", {
27+
headers: {
28+
"content-type": "text/plain; charset=utf-8",
29+
"cache-control": "public, max-age=3600"
30+
}
31+
});
32+
}
33+
1234
function resolveRoute(pathname) {
1335
if (!pathname || pathname === "/") {
1436
return null;
@@ -39,24 +61,28 @@ export default {
3961
async fetch(request, env) {
4062
try {
4163
const pathname = new URL(request.url).pathname;
64+
if (pathname === "/robots.txt") {
65+
return withRobotsBlocked(robotsTxtResponse());
66+
}
67+
4268
if (pathname === "/") {
43-
return serveDefaultPage(request);
69+
return withRobotsBlocked(serveDefaultPage(request));
4470
}
4571

4672
const route = resolveRoute(pathname);
4773
if (!route) {
48-
return notFound("Route not found");
74+
return withRobotsBlocked(notFound("Route not found"));
4975
}
5076

5177
const rateLimitedResponse = await enforceRateLimit(request, env);
5278
if (rateLimitedResponse) {
53-
return rateLimitedResponse;
79+
return withRobotsBlocked(rateLimitedResponse);
5480
}
5581

56-
return route.handler(route.identifier, env);
82+
return withRobotsBlocked(await route.handler(route.identifier, env));
5783
} catch (error) {
5884
console.error("Unhandled worker error", error);
59-
return internalError();
85+
return withRobotsBlocked(internalError());
6086
}
6187
}
6288
};

0 commit comments

Comments
 (0)