Create robots file
This commit is contained in:
67
astro/src/pages/robots.txt.ts
Normal file
67
astro/src/pages/robots.txt.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import { getRobotsSettings } from "@/content/settings/robots";
|
||||
import { getSettings } from "@/content/settings/settings";
|
||||
import type { APIRoute } from "astro";
|
||||
import minifyXML from "minify-xml";
|
||||
|
||||
export const GET = (async () => {
|
||||
const settings = await getSettings();
|
||||
const robots = await getRobotsSettings();
|
||||
|
||||
let crawlers = [
|
||||
{ id: 'google', name: 'Googlebot' },
|
||||
{ id: 'bing', name: "Bingbot" },
|
||||
{ id: "slurp", name: "Slurp" },
|
||||
{ id: "duckduckgo", name: "DuckDuckBot" },
|
||||
{ id: "baidu", name: "Baiduspider" },
|
||||
{ id: "yandex", name: "YandexBot" },
|
||||
{ id: "sogou", name: "Sogou web spider" },
|
||||
{ id: "seznam", name: "SeznamBot" },
|
||||
{ id: "qwantbot", name: "Qwantbot" },
|
||||
{ id: "naverbot", name: "Naverbot" },
|
||||
{ id: "coccocbot", name: "Coccocbot" },
|
||||
{ id: "mojeekbot", name: "Mojeekbot" },
|
||||
{ id: "ahrefs", name: "Ahrefsbot" },
|
||||
{ id: "semrush", name: "SemrushBot" },
|
||||
{ id: "mj12bot", name: "MJ12Bot" },
|
||||
{ id: "dotbot", name: "DotBot" },
|
||||
{ id: "petalbot", name: "PetalBot" },
|
||||
{ id: "gptbot", name: "GPTBot" },
|
||||
{ id: "ccbot", name: "CCBot" },
|
||||
{ id: "ia_archiver", name: "ia_archiver" },
|
||||
{ id: "claudebot", name: "ClaudeBot" },
|
||||
{ id: "perplexity", name: "PerplexityBot" },
|
||||
{ id: "facebookexternalhit", name: "facebookexternalhit/1.1" },
|
||||
{ id: "twitterbot", name: "Twitterbot" },
|
||||
{ id: "linkedinbot", name: "LinkedInBot" },
|
||||
{ id: "bytespider", name: "ByteSpider" },
|
||||
{ id: "applebot", name: "AppleBot" },
|
||||
{ id: "amazonbot", name: "AmazonBot" }
|
||||
]
|
||||
|
||||
let crawlerContent = "";
|
||||
|
||||
crawlers.forEach((crawler) => {
|
||||
if (robots.crawlers.some(c => c === crawler.id)) {
|
||||
const crawlerData = crawlers.find(c => c.id === crawler.id);
|
||||
|
||||
crawlerContent = crawlerContent +
|
||||
`User-agent: ${crawlerData!.name}\nAllow: /\nCrawl-delay: 5\nSitemap: ${settings.website.domainName}/sitemap/index.xml`
|
||||
}
|
||||
else {
|
||||
const crawlerData = crawlers.find(c => c.id === crawler.id);
|
||||
|
||||
crawlerContent = crawlerContent +
|
||||
`User-agent: ${crawlerData!.name}\nDisallow: /`
|
||||
}
|
||||
|
||||
crawlerContent = crawlerContent + "\n\n\n"
|
||||
});
|
||||
|
||||
return new Response(crawlerContent, {
|
||||
status: 200,
|
||||
statusText: "OK",
|
||||
headers: {
|
||||
"Content-Type": "text/plain"
|
||||
}
|
||||
});
|
||||
}) satisfies APIRoute;
|
||||
Reference in New Issue
Block a user