Block robots from scraping git.captnemo.in
- Too many requests
Diff
gitea/main.tf | 4 ++++
gitea/conf/robots.txt | 20 ++++++++++++++++++++
gitea/conf/public/robots.txt | 20 ++++++++++++++++++++
3 files changed, 44 insertions(+)
@@ -46,6 +46,10 @@
content = "${file("${path.module}/../docker/conf/humans.txt")}"
file = "/data/gitea/public/humans.txt"
}
upload {
content = "${file("${path.module}/conf/public/robots.txt")}"
file = "/data/gitea/public/robots.txt"
}
upload {
content = "${file("${path.module}/conf/extra_links.tmpl")}"
@@ -1,0 +1,20 @@
User-agent: MJ12bot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: SemrushBot-SA
Disallow: /
User-agent: rogerbot
Disallow:/
User-agent: dotbot
Disallow:/
User-agent: AhrefsBot
Disallow: /
User-agent: Alexibot
Disallow: /
User-agent: SurveyBot
Disallow: /
User-agent: Xenu’s
Disallow: /
User-agent: Xenu’s Link Sleuth 1.1c
Disallow: /
@@ -1,0 +1,20 @@
User-agent: MJ12bot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: SemrushBot-SA
Disallow: /
User-agent: rogerbot
Disallow:/
User-agent: dotbot
Disallow:/
User-agent: AhrefsBot
Disallow: /
User-agent: Alexibot
Disallow: /
User-agent: SurveyBot
Disallow: /
User-agent: Xenu’s
Disallow: /
User-agent: Xenu’s Link Sleuth 1.1c
Disallow: /