From d748e65a3773d7203b16b109e92c1639f9f4b6dc Mon Sep 17 00:00:00 2001
From: Nemo <me@captnemo.in>
Date: Sat, 27 Apr 2019 16:33:52 +0530
Subject: [PATCH] Block robots from scraping git.captnemo.in

- Too many requests
---
 gitea/main.tf                |  4 ++++
 gitea/conf/robots.txt        | 20 ++++++++++++++++++++
 gitea/conf/public/robots.txt | 20 ++++++++++++++++++++
 3 files changed, 44 insertions(+)

diff --git a/gitea/main.tf b/gitea/main.tf
index 55e1fe3..56cd107 100644
--- a/gitea/main.tf
+++ a/gitea/main.tf
@@ -46,6 +46,10 @@
     content = "${file("${path.module}/../docker/conf/humans.txt")}"
     file    = "/data/gitea/public/humans.txt"
   }
+  upload {
+    content = "${file("${path.module}/conf/public/robots.txt")}"
+    file    = "/data/gitea/public/robots.txt"
+  }
   # Extra Links in header
   upload {
     content = "${file("${path.module}/conf/extra_links.tmpl")}"
diff --git a/gitea/conf/robots.txt b/gitea/conf/robots.txt
new file mode 100644
index 0000000..ae35f66 100644
--- /dev/null
+++ a/gitea/conf/robots.txt
@@ -1,0 +1,20 @@
+User-agent: MJ12bot
+Disallow: /
+User-agent: SemrushBot
+Disallow: /
+User-agent: SemrushBot-SA
+Disallow: /
+User-agent: rogerbot
+Disallow:/
+User-agent: dotbot
+Disallow:/
+User-agent: AhrefsBot
+Disallow: /
+User-agent: Alexibot
+Disallow: /
+User-agent: SurveyBot
+Disallow: /
+User-agent: Xenu’s
+Disallow: /
+User-agent: Xenu’s Link Sleuth 1.1c
+Disallow: /
diff --git a/gitea/conf/public/robots.txt b/gitea/conf/public/robots.txt
new file mode 100644
index 0000000..ae35f66 100644
--- /dev/null
+++ a/gitea/conf/public/robots.txt
@@ -1,0 +1,20 @@
+User-agent: MJ12bot
+Disallow: /
+User-agent: SemrushBot
+Disallow: /
+User-agent: SemrushBot-SA
+Disallow: /
+User-agent: rogerbot
+Disallow:/
+User-agent: dotbot
+Disallow:/
+User-agent: AhrefsBot
+Disallow: /
+User-agent: Alexibot
+Disallow: /
+User-agent: SurveyBot
+Disallow: /
+User-agent: Xenu’s
+Disallow: /
+User-agent: Xenu’s Link Sleuth 1.1c
+Disallow: /
--
rgit 0.1.5