From 596e8bbf21040d730e750f0eb2294ed13c2cfbc7 Mon Sep 17 00:00:00 2001 From: Martijn Voncken Date: Sat, 16 Feb 2008 14:29:35 +0000 Subject: [PATCH] webui : robots.txt, prevent search-engine indexing --- deluge/ui/webui/webui_plugin/pages.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/deluge/ui/webui/webui_plugin/pages.py b/deluge/ui/webui/webui_plugin/pages.py index 4a693f517..5e274ae79 100644 --- a/deluge/ui/webui/webui_plugin/pages.py +++ b/deluge/ui/webui/webui_plugin/pages.py @@ -91,7 +91,8 @@ urls = ( #"/downloads/(.*)","downloads" disabled until it can handle large downloads #default-pages "/", "home", - "", "home" + "", "home", + "/robots.txt","robots" ) #/routing @@ -345,5 +346,12 @@ class downloads(static_handler): if not ws.config.get('share_downloads'): raise Exception('Access to downloads is forbidden.') return static_handler.GET(self, name) + +class robots: + def GET(self): + "no robots/prevent searchengines from indexing" + web.header("Content-Type", "text/plain") + print "User-agent: *\nDisallow:\n" + #/pages