Create robots file

This commit is contained in:
Quinn Hegeman
2026-03-08 13:04:41 +01:00
parent a1b202686f
commit 2411c6fdc8
4 changed files with 95 additions and 0 deletions

View File

@@ -0,0 +1,15 @@
import { createDirectusConnection } from "@/lib/directus";
import { print } from 'graphql';
import getRobotsQuery from '@/graphql/settings/robots.graphql';
export async function getRobotsSettings(): Promise<RobotsSettings> {
const client = await createDirectusConnection();
const result = await client.query(print(getRobotsQuery));
const robotsResult = result["Robots"];
return {
crawlers: robotsResult["crawlers"],
extraContent: robotsResult["extra_content"]
};
}

View File

@@ -0,0 +1,9 @@
query Robots {
Robots {
id,
date_created,
date_updated,
crawlers,
extra_content
}
}

View File

@@ -0,0 +1,67 @@
import { getRobotsSettings } from "@/content/settings/robots";
import { getSettings } from "@/content/settings/settings";
import type { APIRoute } from "astro";
import minifyXML from "minify-xml";
export const GET = (async () => {
const settings = await getSettings();
const robots = await getRobotsSettings();
let crawlers = [
{ id: 'google', name: 'Googlebot' },
{ id: 'bing', name: "Bingbot" },
{ id: "slurp", name: "Slurp" },
{ id: "duckduckgo", name: "DuckDuckBot" },
{ id: "baidu", name: "Baiduspider" },
{ id: "yandex", name: "YandexBot" },
{ id: "sogou", name: "Sogou web spider" },
{ id: "seznam", name: "SeznamBot" },
{ id: "qwantbot", name: "Qwantbot" },
{ id: "naverbot", name: "Naverbot" },
{ id: "coccocbot", name: "Coccocbot" },
{ id: "mojeekbot", name: "Mojeekbot" },
{ id: "ahrefs", name: "Ahrefsbot" },
{ id: "semrush", name: "SemrushBot" },
{ id: "mj12bot", name: "MJ12Bot" },
{ id: "dotbot", name: "DotBot" },
{ id: "petalbot", name: "PetalBot" },
{ id: "gptbot", name: "GPTBot" },
{ id: "ccbot", name: "CCBot" },
{ id: "ia_archiver", name: "ia_archiver" },
{ id: "claudebot", name: "ClaudeBot" },
{ id: "perplexity", name: "PerplexityBot" },
{ id: "facebookexternalhit", name: "facebookexternalhit/1.1" },
{ id: "twitterbot", name: "Twitterbot" },
{ id: "linkedinbot", name: "LinkedInBot" },
{ id: "bytespider", name: "ByteSpider" },
{ id: "applebot", name: "AppleBot" },
{ id: "amazonbot", name: "AmazonBot" }
]
let crawlerContent = "";
crawlers.forEach((crawler) => {
if (robots.crawlers.some(c => c === crawler.id)) {
const crawlerData = crawlers.find(c => c.id === crawler.id);
crawlerContent = crawlerContent +
`User-agent: ${crawlerData!.name}\nAllow: /\nCrawl-delay: 5\nSitemap: ${settings.website.domainName}/sitemap/index.xml`
}
else {
const crawlerData = crawlers.find(c => c.id === crawler.id);
crawlerContent = crawlerContent +
`User-agent: ${crawlerData!.name}\nDisallow: /`
}
crawlerContent = crawlerContent + "\n\n\n"
});
return new Response(crawlerContent, {
status: 200,
statusText: "OK",
headers: {
"Content-Type": "text/plain"
}
});
}) satisfies APIRoute;

4
astro/src/types/settings/robots.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
type RobotsSettings = {
crawlers: string[];
extraContent: string | null;
}