Block robots from scraping git.captnemo.in

- Too many requests
This commit is contained in:
Nemo 2019-04-27 16:33:52 +05:30
parent 3bc9e40b61
commit d748e65a37
3 changed files with 44 additions and 0 deletions

View File

@ -0,0 +1,20 @@
User-agent: MJ12bot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: SemrushBot-SA
Disallow: /
User-agent: rogerbot
Disallow:/
User-agent: dotbot
Disallow:/
User-agent: AhrefsBot
Disallow: /
User-agent: Alexibot
Disallow: /
User-agent: SurveyBot
Disallow: /
User-agent: Xenus
Disallow: /
User-agent: Xenus Link Sleuth 1.1c
Disallow: /

20
gitea/conf/robots.txt Normal file
View File

@ -0,0 +1,20 @@
User-agent: MJ12bot
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: SemrushBot-SA
Disallow: /
User-agent: rogerbot
Disallow:/
User-agent: dotbot
Disallow:/
User-agent: AhrefsBot
Disallow: /
User-agent: Alexibot
Disallow: /
User-agent: SurveyBot
Disallow: /
User-agent: Xenus
Disallow: /
User-agent: Xenus Link Sleuth 1.1c
Disallow: /

View File

@ -46,6 +46,10 @@ resource "docker_container" "gitea" {
content = "${file("${path.module}/../docker/conf/humans.txt")}"
file = "/data/gitea/public/humans.txt"
}
upload {
content = "${file("${path.module}/conf/public/robots.txt")}"
file = "/data/gitea/public/robots.txt"
}
# Extra Links in header
upload {
content = "${file("${path.module}/conf/extra_links.tmpl")}"