autogenerate robots.txt
this automatically generates our robots.txt file, grabbing an updated list of ai scrapers to block each timemain
parent
395fb36b50
commit
500a00fcc5
|
@ -0,0 +1,4 @@
|
|||
module.exports = async function() {
|
||||
response = await fetch("https://raw.githubusercontent.com/ai-robots-txt/ai.robots.txt/refs/heads/main/robots.txt");
|
||||
return response.text()
|
||||
}
|
|
@ -0,0 +1,16 @@
|
|||
---
|
||||
permalink: /robots.txt
|
||||
---
|
||||
# omg haiiiii robots ^-^
|
||||
# i love robots :3
|
||||
|
||||
# AI scrapers
|
||||
{{ aibots }}
|
||||
|
||||
# everyone else
|
||||
User-agent: *
|
||||
Allow: /
|
||||
|
||||
# sitemaps
|
||||
Sitemap: {{ site.url }}/sitemap.xml
|
||||
Sitemap: {{ site.url }}/sitemap-news.xml
|
|
@ -1,5 +0,0 @@
|
|||
User-agent: GPTBot
|
||||
Disallow: /
|
||||
|
||||
User-agent: Google-Extended
|
||||
Disallow: /
|
Loading…
Reference in New Issue