doc: add robots.txt

use a robots.txt file to prevent search engines from indexing old
content.

Signed-off-by: David B. Kinder <david.b.kinder@intel.com>
This commit is contained in:
David B. Kinder 2019-04-04 15:37:49 -07:00 committed by David Kinder
parent fe470cfe23
commit c09046abbf
2 changed files with 8 additions and 0 deletions

View File

@ -77,6 +77,7 @@ publish:
cp -r $(BUILDDIR)/html/* $(PUBLISHDIR)
cp scripts/publish-README.md $(PUBLISHDIR)/../README.md
cp scripts/publish-index.html $(PUBLISHDIR)/../index.html
cp scripts/publish-robots.txt $(PUBLISHDIR)/../robots.txt
cd $(PUBLISHDIR)/..; git add -A; git commit -s -m "publish $(RELEASE)"; git push origin master;

View File

@ -0,0 +1,7 @@
User-agent: *
Allow: /
Disallow: /0.1/
Disallow: /0.2/
Disallow: /0.3/
Disallow: /0.4/
Disallow: /0.5/