From: Bernie Innocenti <bernie@codewiz.org>
Date: Sat, 15 Oct 2011 01:39:43 +0000 (-0400)
Subject: Tell crawlers not to follow links to non-existing pages
X-Git-Url: https://codewiz.org/gitweb?a=commitdiff_plain;h=085433983b22d19a87bc92caec62dde675e12577;p=geekigeeki.git

Tell crawlers not to follow links to non-existing pages
---

diff --git a/geekigeeki.py b/geekigeeki.py
index 4a7ffea..341978e 100755
--- a/geekigeeki.py
+++ b/geekigeeki.py
@@ -3,7 +3,7 @@
 #
 # Copyright (C) 1999, 2000 Martin Pool <mbp@humbug.org.au>
 # Copyright (C) 2002 Gerardo Poggiali
-# Copyright (C) 2007, 2008, 2009, 2010 Bernie Innocenti <bernie@codewiz.org>
+# Copyright (C) 2007, 2008, 2009, 2010, 2011 Bernie Innocenti <bernie@codewiz.org>
 #
 # This program is free software: you can redistribute it and/or modify
 # it under the terms of the GNU Affero General Public License as
@@ -152,9 +152,9 @@ def link_tag(dest, text=None, privileged=False, **kvargs):
             text = config_get('nonexist_pfx', '') + text
             link_class = 'nonexistent'
 
-    # Prevent crawlers from following links potentially added by spammers or to generated pages
+    # Prevent crawlers from following links potentially added by spammers and to autogenerated pages
     nofollow = ''
-    if link_class == 'external' or link_class == 'navlink':
+    if link_class in ('external', 'navlink', 'nonexistent'):
         nofollow = 'rel="nofollow" '
 
     return '<a class="%s" %shref="%s">%s</a>' % (link_class, nofollow, relative_url(dest, privileged=privileged), text)