autogenerate robots.txt

this automatically generates our robots.txt file, grabbing an updated list of ai scrapers to block each time
main
maia arson crimew 2024-11-10 21:21:02 +01:00
parent 395fb36b50
commit 500a00fcc5
3 changed files with 20 additions and 5 deletions

4
src/_data/aibots.js Normal file
View File

@ -0,0 +1,4 @@
module.exports = async function() {
response = await fetch("https://raw.githubusercontent.com/ai-robots-txt/ai.robots.txt/refs/heads/main/robots.txt");
return response.text()
}

16
src/robots.njk Normal file
View File

@ -0,0 +1,16 @@
---
permalink: /robots.txt
---
# omg haiiiii robots ^-^
# i love robots :3
# AI scrapers
{{ aibots }}
# everyone else
User-agent: *
Allow: /
# sitemaps
Sitemap: {{ site.url }}/sitemap.xml
Sitemap: {{ site.url }}/sitemap-news.xml

View File

@ -1,5 +0,0 @@
User-agent: GPTBot
Disallow: /
User-agent: Google-Extended
Disallow: /