[Python-checkins] r57033 - doctools/trunk/sphinx/web/application.py doctools/trunk/sphinx/web/robots.py

georg.brandl python-checkins at python.org
Tue Aug 14 22:06:25 CEST 2007


Author: georg.brandl
Date: Tue Aug 14 22:06:25 2007
New Revision: 57033

Added:
   doctools/trunk/sphinx/web/robots.py
Modified:
   doctools/trunk/sphinx/web/application.py
Log:
Add a robots.py to the web app.


Modified: doctools/trunk/sphinx/web/application.py
==============================================================================
--- doctools/trunk/sphinx/web/application.py	(original)
+++ doctools/trunk/sphinx/web/application.py	Tue Aug 14 22:06:25 2007
@@ -32,6 +32,7 @@
      blackhole_dict, striptags
 from .admin import AdminPanel
 from .userdb import UserDatabase
+from .robots import robots_txt
 from .oldurls import handle_html_url
 from .antispam import AntiSpam
 from .database import connect, set_connection, Comment
@@ -150,7 +151,7 @@
                 self.globalcontext = pickle.load(f)
             with file(path.join(self.data_root, 'searchindex.pickle')) as f:
                 self.search_frontend = SearchFrontend(pickle.load(f))
-            self.buildmtime = path.getmtime(self.buildfile)
+            self.buildmtime = new_mtime
             self.cache.clear()
         finally:
             env_lock.release()
@@ -679,6 +680,8 @@
             if req.path == 'favicon.ico':
                 # TODO: change this to real favicon?
                 resp = self.get_error_404()
+            elif req.path == 'robots.txt':
+                resp = Response(robots_txt, mimetype='text/plain')
             elif not req.path.endswith('/') and req.method == 'GET':
                 # may be an old URL
                 if url.endswith('.html'):
@@ -720,7 +723,7 @@
             # start the fuzzy search
             elif url[:2] == 'q/':
                 resp = self.get_keyword_matches(req, url[2:])
-            # special URLs
+            # special URLs -- don't forget to add them to robots.py
             elif url[0] == '@':
                 # source view
                 if url[:8] == '@source/':

Added: doctools/trunk/sphinx/web/robots.py
==============================================================================
--- (empty file)
+++ doctools/trunk/sphinx/web/robots.py	Tue Aug 14 22:06:25 2007
@@ -0,0 +1,28 @@
+# -*- coding: utf-8 -*-
+"""
+    sphinx.web.robots
+    ~~~~~~~~~~~~~~~~~
+
+    robots.txt
+
+    :copyright: 2007 by Georg Brandl.
+    :license: Python license.
+"""
+
+robots_txt = """\
+User-agent: *
+Disallow: /@source/
+Disallow: /@edit/
+Disallow: /@submit/
+Disallow: /@comments/
+Disallow: /@rss/
+Disallow: /@admin
+
+User-agent: Googlebot
+Disallow: /@source/
+Disallow: /@edit/
+Disallow: /@submit/
+Disallow: /@comments/
+Disallow: /@rss/
+Disallow: /@admin
+"""


More information about the Python-checkins mailing list