From d748e65a3773d7203b16b109e92c1639f9f4b6dc Mon Sep 17 00:00:00 2001 From: Nemo Date: Sat, 27 Apr 2019 16:33:52 +0530 Subject: [PATCH] Block robots from scraping git.captnemo.in - Too many requests --- gitea/conf/public/robots.txt | 20 ++++++++++++++++++++ gitea/conf/robots.txt | 20 ++++++++++++++++++++ gitea/main.tf | 4 ++++ 3 files changed, 44 insertions(+) create mode 100644 gitea/conf/public/robots.txt create mode 100644 gitea/conf/robots.txt diff --git a/gitea/conf/public/robots.txt b/gitea/conf/public/robots.txt new file mode 100644 index 0000000..ae35f66 --- /dev/null +++ b/gitea/conf/public/robots.txt @@ -0,0 +1,20 @@ +User-agent: MJ12bot +Disallow: / +User-agent: SemrushBot +Disallow: / +User-agent: SemrushBot-SA +Disallow: / +User-agent: rogerbot +Disallow:/ +User-agent: dotbot +Disallow:/ +User-agent: AhrefsBot +Disallow: / +User-agent: Alexibot +Disallow: / +User-agent: SurveyBot +Disallow: / +User-agent: Xenu’s +Disallow: / +User-agent: Xenu’s Link Sleuth 1.1c +Disallow: / diff --git a/gitea/conf/robots.txt b/gitea/conf/robots.txt new file mode 100644 index 0000000..ae35f66 --- /dev/null +++ b/gitea/conf/robots.txt @@ -0,0 +1,20 @@ +User-agent: MJ12bot +Disallow: / +User-agent: SemrushBot +Disallow: / +User-agent: SemrushBot-SA +Disallow: / +User-agent: rogerbot +Disallow:/ +User-agent: dotbot +Disallow:/ +User-agent: AhrefsBot +Disallow: / +User-agent: Alexibot +Disallow: / +User-agent: SurveyBot +Disallow: / +User-agent: Xenu’s +Disallow: / +User-agent: Xenu’s Link Sleuth 1.1c +Disallow: / diff --git a/gitea/main.tf b/gitea/main.tf index 55e1fe3..56cd107 100644 --- a/gitea/main.tf +++ b/gitea/main.tf @@ -46,6 +46,10 @@ resource "docker_container" "gitea" { content = "${file("${path.module}/../docker/conf/humans.txt")}" file = "/data/gitea/public/humans.txt" } + upload { + content = "${file("${path.module}/conf/public/robots.txt")}" + file = "/data/gitea/public/robots.txt" + } # Extra Links in header upload { content = "${file("${path.module}/conf/extra_links.tmpl")}"