From 7c99c86e914277f202ff1e4a2c1608a3ff62a89e Mon Sep 17 00:00:00 2001
From: Alexander Bilz <mail@alexbilz.com>
Date: Sat, 11 Jul 2020 09:35:36 +0000
Subject: [PATCH] 🕷 Added robots.txt support

---
 README.md |    7 +++++++
 1 files changed, 7 insertions(+), 0 deletions(-)

diff --git a/README.md b/README.md
index 768cb92..afc32f6 100644
--- a/README.md
+++ b/README.md
@@ -159,6 +159,13 @@
 [params]
   mainSections = ["post", "docs"]
 ```
+### Robots.txt
+If you want Hugo to generate a robots.txt, you will have to set the `enableRobotsTXT` in the `config.toml` to `true`. By default a robots.txt, which allows search engine crawlers to access to any page, will be generated. It will look like this:
+```
+User-agent: *
+```
+If certain sites shoud be excluded from being accessed, you might want to setup a custom robots.txt file within your `static` folder of your site. 
+
 ## License
 
 Anatole is licensed under the [MIT license](https://github.com/lxndrblz/anatole/blob/master/LICENSE).

--
Gitblit v1.10.0