From 085433983b22d19a87bc92caec62dde675e12577 Mon Sep 17 00:00:00 2001 From: Bernie Innocenti Date: Fri, 14 Oct 2011 21:39:43 -0400 Subject: [PATCH] Tell crawlers not to follow links to non-existing pages --- geekigeeki.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/geekigeeki.py b/geekigeeki.py index 4a7ffea..341978e 100755 --- a/geekigeeki.py +++ b/geekigeeki.py @@ -3,7 +3,7 @@ # # Copyright (C) 1999, 2000 Martin Pool # Copyright (C) 2002 Gerardo Poggiali -# Copyright (C) 2007, 2008, 2009, 2010 Bernie Innocenti +# Copyright (C) 2007, 2008, 2009, 2010, 2011 Bernie Innocenti # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as @@ -152,9 +152,9 @@ def link_tag(dest, text=None, privileged=False, **kvargs): text = config_get('nonexist_pfx', '') + text link_class = 'nonexistent' - # Prevent crawlers from following links potentially added by spammers or to generated pages + # Prevent crawlers from following links potentially added by spammers and to autogenerated pages nofollow = '' - if link_class == 'external' or link_class == 'navlink': + if link_class in ('external', 'navlink', 'nonexistent'): nofollow = 'rel="nofollow" ' return '%s' % (link_class, nofollow, relative_url(dest, privileged=privileged), text) -- 2.25.1