Disallow crawling of versioned documentation in robots.txt (#5549)

This commit is contained in:
jack1142 2022-01-29 17:16:44 +01:00 committed by GitHub
parent b05933274a
commit e33985f969
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 12 additions and 0 deletions

6
docs/_html/robots.txt Normal file
View File

@ -0,0 +1,6 @@
User-agent: *
Disallow: /
Allow: /en/stable
Allow: /en/latest
Sitemap: https://docs.discord.red/sitemap.xml

View File

@ -118,6 +118,12 @@ rst_prolog += f"\n.. |DPY_VERSION| replace:: {dpy_version}"
# #
html_theme = "sphinx_rtd_theme" html_theme = "sphinx_rtd_theme"
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#
html_extra_path = ["_html"]
# Theme options are theme-specific and customize the look and feel of a theme # Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the # further. For a list of options available for each theme, see the
# documentation. # documentation.