projects
/
geekigeeki.git
/ blobdiff
commit
grep
author
committer
pickaxe
?
search:
re
summary
|
shortlog
|
log
|
commit
|
commitdiff
|
tree
raw
|
inline
| side by side
Merge remote-tracking branch 'giskard/master'
[geekigeeki.git]
/
geekigeeki.py
diff --git
a/geekigeeki.py
b/geekigeeki.py
index 7700585b975aefeda856cefd643d4276f66cb1d0..f6163110b35b15c6b8d313158a8f42d9145303ca 100755
(executable)
--- a/
geekigeeki.py
+++ b/
geekigeeki.py
@@
-3,7
+3,7
@@
#
# Copyright (C) 1999, 2000 Martin Pool <mbp@humbug.org.au>
# Copyright (C) 2002 Gerardo Poggiali
#
# Copyright (C) 1999, 2000 Martin Pool <mbp@humbug.org.au>
# Copyright (C) 2002 Gerardo Poggiali
-# Copyright (C) 2007, 2008, 2009, 2010 Bernie Innocenti <bernie@codewiz.org>
+# Copyright (C) 2007, 2008, 2009, 2010
, 2011
Bernie Innocenti <bernie@codewiz.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
@@
-21,7
+21,7
@@
title_done = False
import cgi, sys, os, re, errno, stat, glob
image_ext = 'png|gif|jpg|jpeg|bmp|ico'
import cgi, sys, os, re, errno, stat, glob
image_ext = 'png|gif|jpg|jpeg|bmp|ico'
-video_ext = "ogg|ogv|oga|webm" # Not supported by Firefox 3.
5
: mkv|mpg|mpeg|mp4|avi|asf|flv|wmv|qt
+video_ext = "ogg|ogv|oga|webm" # Not supported by Firefox 3.
6
: mkv|mpg|mpeg|mp4|avi|asf|flv|wmv|qt
image_re = re.compile(r".*\.(" + image_ext + "|" + video_ext + ")$", re.IGNORECASE)
video_re = re.compile(r".*\.(" + video_ext + ")$", re.IGNORECASE)
# FIXME: we accept stuff like foo/../bar and we shouldn't
image_re = re.compile(r".*\.(" + image_ext + "|" + video_ext + ")$", re.IGNORECASE)
video_re = re.compile(r".*\.(" + video_ext + ")$", re.IGNORECASE)
# FIXME: we accept stuff like foo/../bar and we shouldn't
@@
-110,8
+110,9
@@
def emit_header(mtime=None, mime_type="text/html"):
# Prevent caching when the wiki engine gets updated
mtime = max(mtime, os.stat(__file__).st_mtime)
print("Last-Modified: " + strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(mtime)))
# Prevent caching when the wiki engine gets updated
mtime = max(mtime, os.stat(__file__).st_mtime)
print("Last-Modified: " + strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime(mtime)))
- if mime_type:
- print("Content-type: " + mime_type + "; charset=utf-8")
+ else:
+ print("Cache-Control: must-revalidate, max-age=0")
+ print("Content-type: " + mime_type + "; charset=utf-8")
print('')
def send_guru(msg_text, msg_type):
print('')
def send_guru(msg_text, msg_type):
@@
-125,7
+126,7
@@
def send_guru(msg_text, msg_type):
print('</pre><script type="text/javascript" src="%s" defer="defer"></script>' \
% relative_url('sys/GuruMeditation.js'))
print('</pre><script type="text/javascript" src="%s" defer="defer"></script>' \
% relative_url('sys/GuruMeditation.js'))
-def send_httperror(status="40
3
Not Found", query="", trace=False):
+def send_httperror(status="40
4
Not Found", query="", trace=False):
print("Status: %s" % status)
msg_text = "%s: on query '%s'" % (status, query)
if trace:
print("Status: %s" % status)
msg_text = "%s: on query '%s'" % (status, query)
if trace:
@@
-151,9
+152,9
@@
def link_tag(dest, text=None, privileged=False, **kvargs):
text = config_get('nonexist_pfx', '') + text
link_class = 'nonexistent'
text = config_get('nonexist_pfx', '') + text
link_class = 'nonexistent'
- # Prevent crawlers from following links potentially added by spammers
or to
generated pages
+ # Prevent crawlers from following links potentially added by spammers
and to auto
generated pages
nofollow = ''
nofollow = ''
- if link_class
== 'external' or link_class == 'navlink'
:
+ if link_class
in ('external', 'navlink', 'nonexistent')
:
nofollow = 'rel="nofollow" '
return '<a class="%s" %shref="%s">%s</a>' % (link_class, nofollow, relative_url(dest, privileged=privileged), text)
nofollow = 'rel="nofollow" '
return '<a class="%s" %shref="%s">%s</a>' % (link_class, nofollow, relative_url(dest, privileged=privileged), text)
@@
-305,13
+306,13
@@
class WikiFormatter:
return '—'
def _tit_repl(self, word):
return '—'
def _tit_repl(self, word):
+ link = permalink(self.line)
if self.h_level:
if self.h_level:
- result = '<
/h%d><p>\n' % self.h_level
+ result = '<
a class="heading" href="#%s">¶</a></h%d><p>\n' % (link, self.h_level)
self.h_level = 0
else:
self.h_level = len(word) - 1
self.h_level = 0
else:
self.h_level = len(word) - 1
- link = permalink(self.line)
- result = '\n</p><h%d id="%s"><a class="heading" href="#%s">¶</a> ' % (self.h_level, link, link)
+ result = '\n</p><h%d id="%s">' % (self.h_level, link)
return result
def _br_repl(self, word):
return result
def _br_repl(self, word):
@@
-474,8
+475,8
@@
class WikiFormatter:
| (?P<hurl> \[\[[^\]]+\]\])
# Inline HTML
| (?P<hurl> \[\[[^\]]+\]\])
# Inline HTML
- | (?P<html> <(br|hr|div|span|form|iframe|input|textarea|a|img|h[1-5])\b )
- | (?P<htmle> ( /\s*> | </(br|hr|div|span|form|iframe|input|textarea|a|img|h[1-5])> ) )
+ | (?P<html> <(br|hr|
small|
div|span|form|iframe|input|textarea|a|img|h[1-5])\b )
+ | (?P<htmle> ( /\s*> | </(br|hr|
small|
div|span|form|iframe|input|textarea|a|img|h[1-5])> ) )
| (?P<ent> [<>&] )
# Auto links (LEGACY)
| (?P<ent> [<>&] )
# Auto links (LEGACY)
@@
-493,8
+494,6
@@
class WikiFormatter:
| (?P<tr> ^\s*\|\|(=|)\s*)
| (?P<tre> \s*\|\|(=|)\s*$)
| (?P<td> \s*\|\|(=|)\s*)
| (?P<tr> ^\s*\|\|(=|)\s*)
| (?P<tre> \s*\|\|(=|)\s*$)
| (?P<td> \s*\|\|(=|)\s*)
-
- # TODO: highlight search words (look at referrer)
)""", re.VERBOSE)
pre_re = re.compile("""(?:
(?P<pre>\s*\}\}\})
)""", re.VERBOSE)
pre_re = re.compile("""(?:
(?P<pre>\s*\}\}\})
@@
-569,14
+568,12
@@
class Page:
return True
return False
return True
return False
- def get_raw_body(self
, default=None
):
+ def get_raw_body(self):
try:
return open(self._filename(), 'rb').read()
except IOError, err:
if err.errno == errno.ENOENT:
try:
return open(self._filename(), 'rb').read()
except IOError, err:
if err.errno == errno.ENOENT:
- if default is None:
- default = '//[[%s?a=edit|Describe %s]]//' % (self.page_name, self.page_name)
- return default
+ return ''
if err.errno == errno.EISDIR:
return self.format_dir()
raise err
if err.errno == errno.EISDIR:
return self.format_dir()
raise err
@@
-646,10
+643,10
@@
class Page:
if title_done: return
# HEAD
if title_done: return
# HEAD
- emit_header(self._mtime())
+ emit_header(
name and
self._mtime())
print('<!doctype html>\n<html lang="en">')
print("<head><title>%s: %s</title>" % (config_get('site_name', "Unconfigured Wiki"), text))
print('<!doctype html>\n<html lang="en">')
print("<head><title>%s: %s</title>" % (config_get('site_name', "Unconfigured Wiki"), text))
- print(' <meta charset="
UTF
-8">')
+ print(' <meta charset="
utf
-8">')
if not name:
print(' <meta name="robots" content="noindex,nofollow" />')
if not name:
print(' <meta name="robots" content="noindex,nofollow" />')
@@
-665,10
+662,8
@@
class Page:
print(' <link rel="alternate" type="application/x-wiki" title="Edit this page" href="%s" />' \
% relative_url(name + '?a=edit', privileged=True))
print(' <link rel="alternate" type="application/x-wiki" title="Edit this page" href="%s" />' \
% relative_url(name + '?a=edit', privileged=True))
- history = config_get('history_url')
- if history is not None:
- print(' <link rel="alternate" type="application/rss+xml" title="RSS" href="%s" />' \
- % relative_url(history + '?a=rss'))
+ print(' <link rel="alternate" type="application/atom+xml" title="Atom feed" href="%s" />' \
+ % relative_url(name + '?a=atom'))
print('</head>')
print('</head>')
@@
-692,6
+687,7
@@
class Page:
else:
print(' <b>' + text + '</b> ')
print(' | ' + link_tag('FindPage', 'Find Page', cssclass='navlink'))
else:
print(' <b>' + text + '</b> ')
print(' | ' + link_tag('FindPage', 'Find Page', cssclass='navlink'))
+ history = config_get('history_url')
if history:
print(' | <a href="' + relative_url(history) + '" class="navlink">Recent Changes</a>')
if name:
if history:
print(' | <a href="' + relative_url(history) + '" class="navlink">Recent Changes</a>')
if name:
@@
-721,13
+717,16
@@
class Page:
link_inline(footer, kvargs = {
'LAST_MODIFIED': strftime(config_get('datetime_fmt', '%Y-%m-%dT%I:%M:%S%p'), localtime(self._mtime()))
})
link_inline(footer, kvargs = {
'LAST_MODIFIED': strftime(config_get('datetime_fmt', '%Y-%m-%dT%I:%M:%S%p'), localtime(self._mtime()))
})
- print(
"</body></html>"
)
+ print(
'</body></html>'
)
def send_naked(self, kvargs=None):
if self.can_read():
def send_naked(self, kvargs=None):
if self.can_read():
- WikiFormatter(self.get_raw_body(), kvargs).print_html()
+ body = self.get_raw_body()
+ if not body:
+ body = "//[[%s?a=edit|Describe %s]]//" % (self.page_name, self.page_name)
+ WikiFormatter(body, kvargs).print_html()
else:
else:
- send_guru(
"Read access denied by ACLs", "notice"
)
+ send_guru(
'Read access denied by ACLs', 'notice'
)
def send(self):
#css foo.css
def send(self):
#css foo.css
@@
-759,7
+758,7
@@
class Page:
return
if preview is None:
return
if preview is None:
- preview = self.get_raw_body(
default=''
)
+ preview = self.get_raw_body()
link_inline("sys/EditPage", kvargs = {
'EDIT_BODY': cgi.escape(preview),
link_inline("sys/EditPage", kvargs = {
'EDIT_BODY': cgi.escape(preview),
@@
-781,7
+780,8
@@
class Page:
if 'maxwidth' in args:
import subprocess
sys.stdout.flush()
if 'maxwidth' in args:
import subprocess
sys.stdout.flush()
- subprocess.check_call(['gm', 'convert', self._filename(),
+ subprocess.check_call(['convert', self._filename(),
+ '-auto-orient', '-orient', 'TopLeft',
'-scale', args['maxwidth'].value + ' >', '-'])
else:
body = self.get_raw_body()
'-scale', args['maxwidth'].value + ' >', '-'])
else:
body = self.get_raw_body()