PK­~;“×2EGG-INFO/dependency_links.txt PK­~; ÓzÇ44EGG-INFO/entry_points.txt[trac.plugins] robotstxt.web_ui = robotstxt.web_ui PK­~;.ÔÔEGG-INFO/PKG-INFOMetadata-Version: 1.0 Name: TracRobotsTxt Version: 2.0 Summary: Serve a robots.txt file from Trac. Home-page: http://trac-hacks.org/wiki/RobotsTxtPlugin Author: Noah Kantrowitz Author-email: noah@coderanger.net License: BSD Download-URL: http://trac-hacks.org/svn/robotstxtplugin/0.11#egg=TracRobotsTxt-dev Description: Notes ===== Serve a robots.txt file from Trac. Mostly useful to tracd users, but works on anything. Just put the data you want in the wiki page ``RobotsTxt``. Configuration ============= To enable the plugin:: [components] robotstxt.* = enabled A typical ``RobotsTxt`` wiki page will look like:: User-agent: * Disallow: /browser Disallow: /log Disallow: /changeset Disallow: /report Disallow: /newticket Disallow: /search Keywords: trac plugin robots Platform: UNKNOWN Classifier: Framework :: Trac Classifier: Development Status :: 6 - Mature Classifier: Environment :: Web Environment Classifier: License :: OSI Approved :: BSD License Classifier: Natural Language :: English Classifier: Operating System :: OS Independent PK­~;ӑšEGG-INFO/requires.txtTracPK­~;´ ÂęEGG-INFO/SOURCES.txtREADME setup.py TracRobotsTxt.egg-info/PKG-INFO TracRobotsTxt.egg-info/SOURCES.txt TracRobotsTxt.egg-info/dependency_links.txt TracRobotsTxt.egg-info/entry_points.txt TracRobotsTxt.egg-info/requires.txt TracRobotsTxt.egg-info/top_level.txt robotstxt/__init__.py robotstxt/web_ui.pyPK­~;kf$F EGG-INFO/top_level.txtrobotstxt PK­~;“×2EGG-INFO/zip-safe PKx~;robotstxt/__init__.pyPK­~;|Źč‡‡robotstxt/__init__.pyc;ň D~Kc@sdS(N((((s7build/bdist.darwin-9.7.0-i386/egg/robotstxt/__init__.pys?sPKx~;˙9uNZZrobotstxt/web_ui.pyfrom trac.core import * from trac.web.api import IRequestHandler from trac.wiki.model import WikiPage class RobotsTxtModule(Component): """Serve a robots.txt file from Trac.""" implements(IRequestHandler) # IRequestHandler methods def match_request(self, req): return req.path_info == '/robots.txt' def process_request(self, req): page = WikiPage(self.env, 'RobotsTxt') data = '' if page.exists: data = page.text data = data.replace('{{{', '').replace('}}}', '') req.send(data, 'text/plain') PK­~;ŤüšŞ€€robotstxt/web_ui.pyc;ň D~Kc@s;dkTdklZdklZdefd„ƒYZdS((s*(sIRequestHandler(sWikiPagesRobotsTxtModulecBs*tZdZeeƒd„Zd„ZRS(s"Serve a robots.txt file from Trac.cCs|idjSdS(Ns /robots.txt(sreqs path_info(sselfsreq((s5build/bdist.darwin-9.7.0-i386/egg/robotstxt/web_ui.pys match_request scCsat|idƒ}d}|io |i}n|iddƒiddƒ}|i |dƒdS(Ns RobotsTxtss{{{s}}}s text/plain( sWikiPagesselfsenvspagesdatasexistsstextsreplacesreqssend(sselfsreqsdataspage((s5build/bdist.darwin-9.7.0-i386/egg/robotstxt/web_ui.pysprocess_requests   (s__name__s __module__s__doc__s implementssIRequestHandlers match_requestsprocess_request(((s5build/bdist.darwin-9.7.0-i386/egg/robotstxt/web_ui.pysRobotsTxtModules   N(s trac.cores trac.web.apisIRequestHandlerstrac.wiki.modelsWikiPages ComponentsRobotsTxtModule(sRobotsTxtModulesWikiPagesIRequestHandler((s5build/bdist.darwin-9.7.0-i386/egg/robotstxt/web_ui.pys?s  PK­~;“×2¤EGG-INFO/dependency_links.txtPK­~; ÓzÇ44¤<EGG-INFO/entry_points.txtPK­~;.ÔÔ¤§EGG-INFO/PKG-INFOPK­~;ӑš¤ŞEGG-INFO/requires.txtPK­~;´ Â꤁áEGG-INFO/SOURCES.txtPK­~;kf$F ¤,EGG-INFO/top_level.txtPK­~;“×2¤jEGG-INFO/zip-safePKx~;¤šrobotstxt/__init__.pyPK­~;|Źč‡‡¤Írobotstxt/__init__.pycPKx~;˙9uNZZ¤ˆrobotstxt/web_ui.pyPK­~;ŤüšŞ€€¤ robotstxt/web_ui.pycPK ăĹ