From: Bernie Innocenti Date: Sat, 11 Apr 2009 15:49:23 +0000 (+0200) Subject: Merge branch 'master' of ssh://bernie@trinity.codewiz.org/~/public_html/wiki/git... X-Git-Tag: v4.0~15 X-Git-Url: https://codewiz.org/gitweb?p=geekigeeki.git;a=commitdiff_plain;h=3f737b8a960486cd16aaa4acacc83acb24c7d42f;hp=0a6dbfd05009685e2c6eeb861023cf7da82ef60f Merge branch 'master' of ssh://bernie@trinity.codewiz.org/~/public_html/wiki/git/geekigeeki into python3 Conflicts: geekigeeki.py --- diff --git a/geekigeeki.conf.py b/geekigeeki.conf.py index 79421c7..d156daa 100644 --- a/geekigeeki.conf.py +++ b/geekigeeki.conf.py @@ -1,6 +1,12 @@ # Configuration values + +# this is displayed in the page title site_name = 'Codewiz' +# this is used for the leftmost button of the navbar. +# It can be set to an empty string or a text message +site_icon = 'AmigaBoing16x16.png' + # set to None for read-only sites, leave empty ('') to allow anonymous edits # otherwise, set to a URL that requires authentication privileged_url = 'https://www.develer.com/~bernie/wiki' @@ -14,8 +20,8 @@ meta_urls = [ ] link_urls = [ # rel href - [ 'icon', 'hacker-favicon.ico' ], - [ 'stylesheet', '../wikidata/geekigeeki.css' ], + [ 'icon', 'sys/favicon.png' ], + [ 'stylesheet', 'sys/geekigeeki.css' ], [ 'openid.server', 'https://www.develer.com/openid/bernie' ], [ 'openid.delegate', 'https://www.develer.com/openid/bernie' ], [ 'pavatar', 'http://www.codewiz.org/wiki/BernieAvatar80x80.png' ], diff --git a/geekigeeki.css b/geekigeeki.css deleted file mode 100755 index 8e7e9cc..0000000 --- a/geekigeeki.css +++ /dev/null @@ -1,391 +0,0 @@ -/* - * $Id$ - * - * Author: Gerardo Poggiali - * Author: Bernardo Innocenti - */ - -body { - font-family: sans-serif; - background-color: #F0F0F8; - color: #000000; - margin-top: 0px; - margin-left: 0px; - margin-right: 0px; -} - -a { color: #1010FF; } -a:visited { color: #501080; } -a.nonexistent { color: #D04040; } -a.navlink { color: #000088; font-size: smaller; font-weight: bold} -a.external { font-style: italic; } -a.heading { text-decoration: none; color: #aaaaaa; font-size: smaller; } - -h1 { - font-size: x-large; - font-weight: bold; - color: #3377FF; - border: #5599EE; - background-color: #C2E2FF; - - margin-bottom: 0.2em; - margin-top: 2em; - padding-right: 10px; - padding-left: 10px; - - border-top-width: 1px; - border-bottom-width: 1px; - border-right-width: 0px; - border-left-width: 0px; - border-style: solid; -} - -h2 { - font-size: large; - font-weight: bold; - color: #22AA33; - border: #22AA33; - background-color: #E8F8F0; - - margin-left: 5px; - margin-right: 5px; - margin-bottom: 0.1em; - margin-top: 2em; - padding-right: 10px; - padding-left: 10px; - - border-top-width: 1px; - border-bottom-width: 1px; - border-left-width: 0px; - border-right-width: 0px; - border-style: solid; -} - -h3 { - font-size: large; - font-weight: bold; - color: #DD2255; - border: #BB5599; - - margin-left: 10px; - margin-right: 10px; - padding-right: 5px; - padding-left: 5px; - - border-top-width: 0px; - border-bottom-width: 1px; - border-right-width: 0px; - border-left-width: 0px; - border-style: solid; -} - -h4 { - font-size: medium; - font-weight: bold; - color: #2222AA; - - margin-left: 10px; - margin-right: 10px; - padding-right: 5px; - padding-left: 5px; - - border: #333399; - border-top-width: 0px; - border-bottom-width: 1px; - border-right-width: 0px; - border-left-width: 0px; - border-style: solid; -} - -h5 { - font-size: medium; - font-weight: bold; - color: #222288; - - margin-left: 10px; - margin-right: 10px; - padding-right: 5px; - padding-left: 5px; -} - -/* Heading anchors */ -.heading:link, .heading:visited { - border: none; - color: #d7d7d7; - font-size: .8em; - vertical-align: text-top; -} -* > .heading:link, * > .heading:visited { - visibility: hidden; -} - -h1:hover .heading, h2:hover .heading, h3:hover .heading, -h4:hover .heading, h5:hover .heading, h6:hover .heading { - visibility: visible; -} - -strong.DONE { - padding-left: 0.1em; - background-color: #22ee22; -} - -strong.TODO { - padding-left: 0.1em; - background-color: #ffdd44; -} - -strong.FIXME { - padding-left: 0.1em; - background-color: #ee2222; -} - -strong.error { - color: red; -} - -div { - margin-left: 10px; - margin-right: 10px; - text-align: justify; -} - -div.wiki { - margin-left: 0; - margin-right: 0; -} - -div.preview { - border: 2px dashed #FF0000; -} - -div.index { - border: 1px solid #EEEEEE; -} - -div.nav { - /*border-color: #88aacc; - background-color: #aaccee;*/ - border-color: #888888; - background-color: #cccccc; - border-style: solid; - border-top-width: 0px; - border-bottom-width: 2px; - border-left-width: 0px; - border-right-width: 0px; - padding-top: 5px; - padding-bottom: 5px; - padding-left: 5px; - padding-right: 5px; - margin-top: 0px; - margin-bottom: 10px; - margin-left: 0px; - margin-right: 0px; -} - -.nav hr { - display: none; -} - -.nav .login { - font-size: small; - color: red; -} - -#footer div { - margin-top: 1em; - margin-left: 0; - margin-right: 0; -} - -#footer hr { - border: none; - border-top: 1px solid #bbbbbb; - margin: 0 0; -} - -#footer :link, #footer :visited { - color: #666666; -} - -#footer p { - color: #888888; - font-size: x-small; - margin-top: 0; -} - -#footer p.copyright { - float: left; - margin-left: 0em; - padding: 0 1em; -} - -#footer .license { - border-width: 0; - vertical-align: top; - margin-left: 0em; - margin-right: 1em; -} - -#footer p.modified { - float: right; - text-align: right; - margin-right: 1em; -} - -p.dialog { - border-color: black; - background-color: white; - padding-left: 5px; - margin-left: 10px; - margin-right: 10px; - border-style: solid; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px -} - -p { - margin-left: 10px; - margin-right: 10px; - margin-bottom: 10px; - text-align: justify; -} - -table { - /*width: 100%;*/ - border: 2px solid #ccc; - border-collapse: collapse; - margin-left: 1em; - margin-right: 1em; - margin-top: 0.5em; - margin-bottom: 0.5em; -} - -table tbody thead { - background: #f7f7f0 -} - -/* FIXME: should be thead */ -table tbody th { - border: 1px solid #d7d7d7; - border-bottom-color: #999; - font-weight: bold; - padding: 2px .5em; - vertical-align: bottom; -} - -table tbody td { - border: 1px dotted #ddd; - padding: .3em .5em; - vertical-align: top; -} - -table tbody tr { border-top: 1px solid #ddd } -table tbody tr.even { background-color: #f0f0f0 } -table tbody tr.odd { background-color: #e7e7e7 } -table tbody tr:hover { background: #eed !important } - -table thead th :link:hover, table thead th :visited:hover table tbody td a:hover, table tbody th a:hover { - background-color: transparent; -} - -pre { - color: #222222; - border-color: #E9E609; - background: #FFFDD0; - - margin-left: 10px; - margin-right: 10px; - padding-left: 5px; - padding-right: 5px; - padding-top: 5px; - padding-bottom: 5px; - border-style: solid; - border-top-width: 1px; - border-right-width: 1px; - border-bottom-width: 1px; - border-left-width: 1px; -} - -pre.error { - color: #FF0000; - background: #000000; - border-top-color: #FF0000; - border-color: #FF0000; - border-style: solid; - border-width: 6px; - margin: 0px; -} - -pre.success { - color: #00FF00; - background: #000000; - border-color: #00FF00; - border-style: solid; - border-width: 6px; - margin: 0px; -} - -pre.notice { - color: #FFCC00; - background: #000000; - border-color: #FFCC00; - border-style: solid; - border-width: 6px; - margin: 0px; -} - -#editor { - font-family: monospace; - width: 100%; - color: black; - background-color: white; - border: 1px solid #8cacbb; -} - -/* thumbnails */ - -div.extthumb { - float: right; - width: 100%; -} - -div.thumb { - float: left; - border: 1px solid #ccc; - padding: 3px !important; - margin: 10px; - margin-bottom: .5em; - background-color: #f9f9f9; - text-align: center; -} - -div.thumb a img { - border: 1px solid #ccc; - vertical-align:bottom; -} - -div.thumb div.caption { - border: none; - overflow: hidden; - font-size: 94%; - line-height: 1.4em; - padding: .3em 0 .1em 0; - text-align: center; -} - - -/* Styles for search word highlighting */ -@media screen { - .searchword0 { background: #ff9 } - .searchword1 { background: #cfc } - .searchword2 { background: #cff } - .searchword3 { background: #ccf } - .searchword4 { background: #fcf } -} - -@media print { - #footer { display: none } - .nav { display: none } -} diff --git a/geekigeeki.py b/geekigeeki.py index 6e1ba6e..d208bb7 100755 --- a/geekigeeki.py +++ b/geekigeeki.py @@ -3,7 +3,7 @@ # # Copyright 1999, 2000 Martin Pool # Copyright 2002 Gerardo Poggiali -# Copyright 2007, 2008 Bernie Innocenti +# Copyright 2007, 2008, 2009 Bernie Innocenti # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -30,18 +30,25 @@ import cgi, sys, os, re, errno, stat word_re = re.compile(r"^\b((([A-Z][a-z0-9]+){2,}/)*([A-Z][a-z0-9]+){2,})\b$") # FIXME: we accept stuff like foo/../bar and we shouldn't file_re = re.compile(r"^\b([A-Za-z0-9_\-][A-Za-z0-9_\.\-/]*)\b$") -img_re = re.compile(r"^.*\.(png|gif|jpg|jpeg|bmp|ico)$", re.IGNORECASE) +img_re = re.compile(r"^.*\.(png|gif|jpg|jpeg|bmp|ico|ogm|ogg|mkv|mpg|mpeg|mp4|avi|asf|flv|wmv|qt)$", re.IGNORECASE) +video_re = re.compile(r"^.*\.(ogm|ogg|mkv|mpg|mpeg|mp4|avi|asf|flv|wmv|qt)$", re.IGNORECASE) url_re = re.compile(r"^[a-z]{3,8}://[^\s'\"]+\S$") -link_re = re.compile("(?:\[\[|{{)([^\s\|]+)(?:\s*\|\s*([^\]]+)|)(?:\]\]|}})") +link_re = re.compile(r"(?:\[\[|{{)([^\s\|]+)(?:\s*\|\s*([^\]]+)|)(?:\]\]|}})") +ext_re = re.compile(r"\.([^\./]+)$") title_done = False - # CGI stuff --------------------------------------------------------- - def script_name(): return os.environ.get('SCRIPT_NAME', '') +def query_string(): + path_info = os.environ.get('PATH_INFO', '') + if len(path_info) and path_info[0] == '/': + return path_info[1:] or 'FrontPage' + else: + return os.environ.get('QUERY_STRING', '') or 'FrontPage' + def privileged_path(): return privileged_url or script_name() @@ -61,14 +68,17 @@ def get_hostname(addr): except Exception: return addr +def is_external_url(pathname): + return (url_re.match(pathname) or pathname.startswith('/')) + def relative_url(pathname, privileged=False): - if not (url_re.match(pathname) or pathname.startswith('/')): + if not is_external_url(pathname): if privileged: url = privileged_path() else: url = script_name() pathname = url + '/' + pathname - return pathname + return cgi.escape(pathname, quote=True) def permalink(s): return re.sub(' ', '-', re.sub('[^a-z0-9_ ]', '', s.lower()).strip()) @@ -84,30 +94,9 @@ def send_guru(msg_text, msg_type): print(' Software Failure. Press left mouse button to continue.\n') print(msg_text) if msg_type == 'error': - print('\n Guru Meditation #DEADBEEF.ABADC0DE') - print('') - # FIXME: This little JS snippet is harder to pass than ACID 3.0 - print(""" - """) + print '\n Guru Meditation #DEADBEEF.ABADC0DE' + print('' \ + % relative_url('sys/GuruMeditation.js')) def send_title(name, text="Limbo", msg_text=None, msg_type='error', writable=False): global title_done @@ -153,11 +142,11 @@ def send_title(name, text="Limbo", msg_text=None, msg_type='error', writable=Fal # Navbar print('') @@ -186,19 +175,41 @@ def send_httperror(status="403 Not Found", query=""): send_title(None, msg_text=("%s: on query '%s'" % (status, query))) send_footer() -def link_tag(params, text=None, ss_class=None, privileged=False): +def link_tag(params, text=None, link_class=None, privileged=False): if text is None: text = params # default - classattr = '' - if ss_class: - classattr += 'class="%s" ' % ss_class - # Prevent crawlers from following links potentially added by spammers or to generated pages - if ss_class == 'external' or ss_class == 'navlink': - classattr += 'rel="nofollow" ' - elif url_re.match(params): - classattr += 'rel="nofollow" ' + elif img_re.match(text): + text = '' + text + '' + + if not link_class: + if is_external_url(params): + link_class = 'external' + elif file_re.match(params) and Page(params).exists(): + link_class = 'wikilink' + else: + params = nonexist_pfx + params + link_class = 'nonexistent' + + classattr = 'class="%s" ' % link_class + # Prevent crawlers from following links potentially added by spammers or to generated pages + if link_class == 'external' or link_class == 'navlink': + classattr += 'rel="nofollow"' + return '%s' % (classattr, relative_url(params, privileged=privileged), text) +def link_inline(name, descr=None, args=''): + if not descr: descr = name + url = relative_url(name) + if video_re.match(name): + return '' % url + elif img_re.match(name): + return '%s' % (url, url + args, descr) + elif file_re.match(name) and not ext_re.search(name): # FIXME: this guesses a wiki page + return Page(name).send_naked() + else: + return '' \ + % (url, url, name) + # Search --------------------------------------------------- def handle_fullsearch(needle): @@ -220,8 +231,8 @@ def handle_fullsearch(needle): print("
    ") for (count, page_name) in hits: - print('
  • ' + Page(page_name).link_to()) - print(' . . . . ' + str(count)) + print('

  • ' + link_tag(page_name)) + print(' . . . . ' + `count`) print(['match', 'matches'][count != 1]) print('

  • ') print("
") @@ -238,7 +249,7 @@ def handle_titlesearch(needle): print("
    ") for filename in hits: - print('
  • ' + Page(filename).link_to() + "

  • ") + print('
  • ' + link_tag(filename) + "

  • ") print("
") print_search_stats(len(hits), len(all_pages)) @@ -275,9 +286,10 @@ def handle_edit(pagename): text = form['savetext'].value pg.send_editor(text) +# Used by macros/WordIndex and macros/TitleIndex def make_index_key(): links = ['%s' % (ch, ch) for ch in 'abcdefghijklmnopqrstuvwxyz'] - return '

'+ ' | '.join(links) + '

' + return '

' + ' | '.join(links) + '

' def page_list(dirname = None, re = word_re): return sorted(filter(re.match, os.listdir(dirname or data_dir))) @@ -290,10 +302,10 @@ def send_footer(mod_string=None): print(''' ') @@ -314,14 +326,14 @@ class WikiFormatter: self.styles = { #wiki html enabled? "//": ["em", False], - "''": ["em", False], "**": ["b", False], - "'''": ["b", False], "##": ["tt", False], - "``": ["tt", False], "__": ["u", False], "^^": ["sup", False], - ",,": ["sub", False] + ",,": ["sub", False], + "''": ["em", False], # LEGACY + "'''": ["b", False], # LEGACY + "``": ["tt", False], # LEGACY } def _b_repl(self, word): @@ -345,19 +357,6 @@ class WikiFormatter: def _rule_repl(self, word): return self._undent() + '\n
\n' % (len(word) - 2) - def _word_repl(self, word): - return Page(word).link_to() - - def _img_repl(self, word): - pathname = relative_url(word) - return '' % (pathname, pathname) - - def _url_repl(self, word): - if img_re.match(word): - return '' % (word, word) - else: - return '%s' % (word, word) - def _macro_repl(self, word): m = re.compile("\<\<([^\s\|\>]+)(?:\s*\|\s*([^\>]+)|)\>\>").match(word) name = m.group(1) @@ -376,54 +375,64 @@ class WikiFormatter: if macro: return macro(argv) else: - return '<<' + '|'.join(argv) + '>>' + msg = '<<' + '|'.join(argv) + '>>' + if not self.in_html: + msg = '' + msg + '' + return msg def _hurl_repl(self, word): m = link_re.match(word) - name = m.group(1) - descr = m.group(2) - if descr is None: - descr = name - elif img_re.match(m.group(2)): - descr = '' - - return link_tag(name, descr, 'wikilink') + return link_tag(m.group(1), m.group(2)) def _inl_repl(self, word): - m = link_re.match(word) - name = m.group(1) - descr = m.group(2) or name - name = relative_url(name) - argv = descr.split('|') - descr = argv.pop(0) + (name, descr) = link_re.match(word).groups() - if argv: - args = '?' + '&'.join(argv) - else: + if descr: + argv = descr.split('|') + descr = argv.pop(0) args = '' + if argv: + args = '?' + '&'.join(argv) - if descr: # The "extthumb" nonsense works around a limitation of the HTML block model - return '
%s
%s
' \ - % (name, name + args, descr, descr) + return '
' \ + + link_inline(name, descr, args) \ + + '
' + descr + '
' else: - return '' % (name, name + args) - - def _email_repl(self, word): - return '%s' % (word, word) + return link_inline(name, name) def _html_repl(self, word): + if not self.in_html and word.startswith('': - self.in_html -= 1 - return '>' + if self.in_html: + return s; # Pass through return {'&': '&', '<': '<', '>': '>'}[s] + def _img_repl(self, word): # LEGACY + return self._inl_repl('{{' + word + '}}') + + def _word_repl(self, word): # LEGACY + if self.in_html: return word # pass through + return link_tag(word) + + def _url_repl(self, word): # LEGACY + if self.in_html: return word # pass through + return link_tag(word) + + def _email_repl(self, word): # LEGACY + if self.in_html: return word # pass through + return '%s' % (word, word) + def _li_repl(self, match): if self.in_li: return '
  • ' @@ -499,51 +508,50 @@ class WikiFormatter: def print_html(self): print('

    ') - # For each line, we scan through looking for magic - # strings, outputting verbatim any intervening text - # TODO: highlight search words (look at referrer) - scan_re = re.compile( - r"(?:" - # Formatting - + r"(?P\*\*|'''|//|''|##|``|__|\^\^|,,)" - + r"|(?P\={2,6})" - + r"|(?P
    \\\\)" - + r"|(?P^-{3,})" - + r"|(?P\b(FIXME|TODO|DONE)\b)" + scan_re = re.compile(r"""(?: + # Styles and formatting + (?P \*\*|'''|//|''|\#\#|``|__|\^\^|,,) + | (?P \={2,6}) + | (?P
    \\\\) + | (?P ^-{3,}) + | (?P \b( FIXME | TODO | DONE )\b ) # Links - + r"|(?P\<\<([^\s\|\>]+)(?:\s*\|\s*([^\>]+)|)\>\>)" - + r"|(?P\[\[([^\s\|]+)(?:\s*\|\s*([^\]]+)|)\]\])" + | (?P \<\<([^\s\|\>]+)(?:\s*\|\s*([^\>]+)|)\>\>) + | (?P \[\[([^\s\|]+)(?:\s*\|\s*([^\]]+)|)\]\]) # Inline HTML - + r"|(?P<(/|)(br|hr|div|span|form|iframe|input|textarea|a|img|h[1-5])[^>]*>)" - + r"|(?P[<>&])" + | (?P <(br|hr|div|span|form|iframe|input|textarea|a|img|h[1-5])\b ) + | (?P ( /\s*> | ) ) + | (?P [<>&] ) # Auto links (LEGACY) - + r"|(?P\b[a-zA-Z0-9_/-]+\.(png|gif|jpg|jpeg|bmp|ico))" - + r"|(?P\b(?:[A-Z][a-z]+){2,}\b)" - + r"|(?P(http|https|ftp|mailto)\:[^\s'\"]+\S)" - + r"|(?P[-\w._+]+\@[\w.-]+)" + | (?P \b[a-zA-Z0-9_/-]+\.(png|gif|jpg|jpeg|bmp|ico|ogm|ogg|mkv|mpg|mpeg|mp4|avi|asf|flv|wmv|qt)) + | (?P \b(?:[A-Z][a-z]+){2,}\b) + | (?P (http|https|ftp|mailto)\:[^\s'\"]+\S) + | (?P [-\w._+]+\@[\w.-]+) # Lists, divs, spans - + r"|(?P

  • ^\s+[\*#] +)" - + r"|(?P
    \{\{\{|\s*\}\}\})"
    -            + r"|(?P\{\{([^\s\|]+)(?:\s*\|\s*([^\]]+)|)\}\})"
    +            | (?P
  • ^\s+[\*\#]\s+) + | (?P
       \{\{\{|\s*\}\}\})
    +            | (?P   \{\{([^\s\|]+)(?:\s*\|\s*([^\]]+)|)\}\})
     
                 # Tables
    -            + r"|(?P^\s*\|\|(=|)\s*)"
    -            + r"|(?P\s*\|\|(=|)\s*$)"
    -            + r"|(?P\s*\|\|(=|)\s*)"
    -            + r")")
    -        pre_re = re.compile(
    -            r"(?:"
    -            + r"(?P
    \s*\}\}\})"
    -            + r"|(?P[<>&])"
    -            + r")")
    +            | (?P    ^\s*\|\|(=|)\s*)
    +            | (?P   \s*\|\|(=|)\s*$)
    +            | (?P    \s*\|\|(=|)\s*)
    +
    +            # TODO: highlight search words (look at referrer)
    +          )""", re.VERBOSE)
    +        pre_re = re.compile("""(?:
    +              (?P
    \s*\}\}\})
    +            | (?P[<>&])"
    +            )""", re.VERBOSE)
             blank_re = re.compile(r"^\s*$")
             indent_re = re.compile(r"^\s*")
             tr_re = re.compile(r"^\s*\|\|")
             eol_re = re.compile(r"\r?\n")
    +        # For each line, we scan through looking for magic strings, outputting verbatim any intervening text
             for self.line in eol_re.split(str(self.raw.expandtabs(), 'utf-8')):
                 # Skip pragmas
                 if self.in_header:
    @@ -595,19 +603,14 @@ class Page:
                     return False
                 raise err
     
    -    def link_to(self):
    -        word = self.page_name
    -        if self.exists():
    -            return link_tag(word, word, 'wikilink')
    -        else:
    -            return link_tag(word, nonexist_pfx + word, 'nonexistent')
    -
    -    def get_raw_body(self):
    +    def get_raw_body(self, default=None):
             try:
                 return open(self._filename(), 'rb').read()
             except IOError as err:
                 if err.errno == errno.ENOENT:
    -                return '' # just doesn't exist, use default
    +                if default is None:
    +                    default = '//[[?edit=%s|Describe %s]]//' % (self.page_name, self.page_name)
    +                return default
                 if err.errno == errno.EISDIR:
                     return self.format_dir()
                 raise err
    @@ -715,7 +718,8 @@ class Page:
             print('
    ' % relative_url(self.page_name)) print('' % (self.page_name)) print('
    ' % (self.page_name)) - print('' % cgi.escape(preview or self.get_raw_body())) + print('' \ + % cgi.escape(preview or self.get_raw_body(default=''))) print(' ' % filename) print("""
    @@ -731,22 +735,18 @@ class Page: //--> """) - print("

    " + Page('EditingTips').link_to() + "

    ") + print("

    " + link_tag('EditingTips') + "

    ") if preview: print("
    ") WikiFormatter(preview).print_html() print("
    ") send_footer() - def send_raw(self, mimetype='text/plain'): - if self.can_read(): - body = self.get_raw_body() - emit_header(mimetype) - print(body) - else: + def send_raw(self, mimetype='text/plain', args=[]): + if not self.can_read(): send_title(None, msg_text='Read access denied by ACLs', msg_type='notice') + return - def send_image(self, mimetype, args=[]): if 'maxwidth' in args: import subprocess emit_header(mimetype) @@ -754,7 +754,9 @@ class Page: subprocess.check_call(['gm', 'convert', self._filename(), '-scale', args['maxwidth'].value + ' >', '-']) else: - self.send_raw(mimetype) + body = self.get_raw_body() + emit_header(mimetype) + print(body) def _write_file(self, data): tmp_filename = self._tmp_filename() @@ -777,19 +779,13 @@ class Page: self._write_file(newdata) rc = 0 if post_edit_hook: - # FIXME: what's the std way to perform shell quoting in python? - cmd = ( post_edit_hook - + " '" + data_dir + '/' + self.page_name - + "' '" + remote_user() - + "' '" + remote_host() - + "' '" + changelog + "'" - ) - out = os.popen(cmd) - output = out.read() - rc = out.close() + import subprocess + cmd = [ post_edit_hook, data_dir + '/' + self.page_name, remote_user(), remote_host(), changelog] + child = subprocess.Popen(cmd, stdout=subprocess.PIPE, close_fds=True) + output = child.stdout.read() + rc = child.wait() if rc: - self.msg_text += "Post-editing hook returned %d.\n" % rc - self.msg_text += 'Command was: ' + cmd + '\n' + self.msg_text += "Post-editing hook returned %d. Command was:\n'%s'\n" % (rc, "' '".join(cmd)) if output: self.msg_text += 'Output follows:\n' + output else: @@ -803,25 +799,15 @@ def main(): handler(form[cmd].value) break else: - path_info = os.environ.get('PATH_INFO', '') - if len(path_info) and path_info[0] == '/': - query = path_info[1:] or 'FrontPage' - else: - query = os.environ.get('QUERY_STRING', '') or 'FrontPage' - + query = query_string() if file_re.match(query): - if word_re.match(query): - Page(query).format() + # FIMXE: this is all bullshit, MimeTypes bases its guess on the extension! + from mimetypes import MimeTypes + mimetype, encoding = MimeTypes().guess_type(query) + if mimetype: + Page(query).send_raw(mimetype=mimetype, args=form) else: - from mimetypes import MimeTypes - mimetype, encoding = MimeTypes().guess_type(query) - if mimetype: - if mimetype.startswith('image/'): - Page(query).send_image(mimetype=mimetype, args=form) - else: - Page(query).send_raw(mimetype=mimetype) - else: - Page(query).format() + Page(query).format() else: send_httperror("403 Forbidden", query) diff --git a/git-logo.png b/git-logo.png deleted file mode 100644 index 16ae8d5..0000000 Binary files a/git-logo.png and /dev/null differ diff --git a/gitweb.cgi b/gitweb.cgi index 2b78db3..962c046 100755 --- a/gitweb.cgi +++ b/gitweb.cgi @@ -23,10 +23,35 @@ BEGIN { } our $cgi = new CGI; -our $version = "1.5.6.5"; +our $version = "1.6.2.103.gc4994"; our $my_url = $cgi->url(); our $my_uri = $cgi->url(-absolute => 1); +# Base URL for relative URLs in gitweb ($logo, $favicon, ...), +# needed and used only for URLs with nonempty PATH_INFO +our $base_url = $my_url; + +# When the script is used as DirectoryIndex, the URL does not contain the name +# of the script file itself, and $cgi->url() fails to strip PATH_INFO, so we +# have to do it ourselves. We make $path_info global because it's also used +# later on. +# +# Another issue with the script being the DirectoryIndex is that the resulting +# $my_url data is not the full script URL: this is good, because we want +# generated links to keep implying the script name if it wasn't explicitly +# indicated in the URL we're handling, but it means that $my_url cannot be used +# as base URL. +# Therefore, if we needed to strip PATH_INFO, then we know that we have +# to build the base URL ourselves: +our $path_info = $ENV{"PATH_INFO"}; +if ($path_info) { + if ($my_url =~ s,\Q$path_info\E$,, && + $my_uri =~ s,\Q$path_info\E$,, && + defined $ENV{'SCRIPT_NAME'}) { + $base_url = $cgi->url(-base => 1) . $ENV{'SCRIPT_NAME'}; + } +} + # core git executable to use # this can just be "git" if your webserver has a sensible PATH our $GIT = "/usr/bin/git"; @@ -43,7 +68,7 @@ our $project_maxdepth = 2007; our $home_link = $my_uri || "/"; # string of the home link on top of all pages -our $home_link_str = $ENV{'SERVER_NAME'} ? "git://" . $ENV{'SERVER_NAME'} : "projects"; +our $home_link_str = "projects"; # name of your site or organization to appear in page titles # replace this with something more descriptive for clearer bookmarks @@ -86,6 +111,11 @@ our $default_projects_order = "project"; # (only effective if this variable evaluates to true) our $export_ok = ""; +# show repository only if this subroutine returns true +# when given the path to the project, for example: +# sub { return -e "$_[0]/git-daemon-export-ok"; } +our $export_auth_hook = undef; + # only allow viewing of repositories also shown on the overview page our $strict_export = ""; @@ -118,6 +148,10 @@ our $fallback_encoding = 'latin1'; # - one might want to include '-B' option, e.g. '-B', '-M' our @diff_opts = ('-M'); # taken from git_commit +# Disables features that would allow repository owners to inject script into +# the gitweb domain. +our $prevent_xss = 0; + # information about snapshot formats that gitweb is capable of serving our %known_snapshot_formats = ( # name => { @@ -176,7 +210,9 @@ our %feature = ( # if there is no 'sub' key (no feature-sub), then feature cannot be # overriden # - # use gitweb_check_feature() to check if is enabled + # use gitweb_get_feature() to retrieve the value + # (an array) or gitweb_check_feature() to check if + # is enabled # Enable the 'blame' blob view, showing the last commit that modified # each line in the file. This can be very CPU-intensive. @@ -187,7 +223,7 @@ our %feature = ( # $feature{'blame'}{'override'} = 1; # and in project config gitweb.blame = 0|1; 'blame' => { - 'sub' => \&feature_blame, + 'sub' => sub { feature_bool('blame', @_) }, 'override' => 0, 'default' => [0]}, @@ -225,6 +261,7 @@ our %feature = ( # $feature{'grep'}{'override'} = 1; # and in project config gitweb.grep = 0|1; 'grep' => { + 'sub' => sub { feature_bool('grep', @_) }, 'override' => 0, 'default' => [1]}, @@ -238,7 +275,7 @@ our %feature = ( # $feature{'pickaxe'}{'override'} = 1; # and in project config gitweb.pickaxe = 0|1; 'pickaxe' => { - 'sub' => \&feature_pickaxe, + 'sub' => sub { feature_bool('pickaxe', @_) }, 'override' => 0, 'default' => [1]}, @@ -275,9 +312,62 @@ our %feature = ( 'forks' => { 'override' => 0, 'default' => [0]}, + + # Insert custom links to the action bar of all project pages. + # This enables you mainly to link to third-party scripts integrating + # into gitweb; e.g. git-browser for graphical history representation + # or custom web-based repository administration interface. + + # The 'default' value consists of a list of triplets in the form + # (label, link, position) where position is the label after which + # to insert the link and link is a format string where %n expands + # to the project name, %f to the project path within the filesystem, + # %h to the current hash (h gitweb parameter) and %b to the current + # hash base (hb gitweb parameter); %% expands to %. + + # To enable system wide have in $GITWEB_CONFIG e.g. + # $feature{'actions'}{'default'} = [('graphiclog', + # '/git-browser/by-commit.html?r=%n', 'summary')]; + # Project specific override is not supported. + 'actions' => { + 'override' => 0, + 'default' => []}, + + # Allow gitweb scan project content tags described in ctags/ + # of project repository, and display the popular Web 2.0-ish + # "tag cloud" near the project list. Note that this is something + # COMPLETELY different from the normal Git tags. + + # gitweb by itself can show existing tags, but it does not handle + # tagging itself; you need an external application for that. + # For an example script, check Girocco's cgi/tagproj.cgi. + # You may want to install the HTML::TagCloud Perl module to get + # a pretty tag cloud instead of just a list of tags. + + # To enable system wide have in $GITWEB_CONFIG + # $feature{'ctags'}{'default'} = ['path_to_tag_script']; + # Project specific override is not supported. + 'ctags' => { + 'override' => 0, + 'default' => [0]}, + + # The maximum number of patches in a patchset generated in patch + # view. Set this to 0 or undef to disable patch view, or to a + # negative number to remove any limit. + + # To disable system wide have in $GITWEB_CONFIG + # $feature{'patches'}{'default'} = [0]; + # To have project specific config enable override in $GITWEB_CONFIG + # $feature{'patches'}{'override'} = 1; + # and in project config gitweb.patches = 0|n; + # where n is the maximum number of patches allowed in a patchset. + 'patches' => { + 'sub' => \&feature_patches, + 'override' => 0, + 'default' => [16]}, ); -sub gitweb_check_feature { +sub gitweb_get_feature { my ($name) = @_; return unless exists $feature{$name}; my ($sub, $override, @defaults) = ( @@ -292,16 +382,33 @@ sub gitweb_check_feature { return $sub->(@defaults); } -sub feature_blame { - my ($val) = git_get_project_config('blame', '--bool'); +# A wrapper to check if a given feature is enabled. +# With this, you can say +# +# my $bool_feat = gitweb_check_feature('bool_feat'); +# gitweb_check_feature('bool_feat') or somecode; +# +# instead of +# +# my ($bool_feat) = gitweb_get_feature('bool_feat'); +# (gitweb_get_feature('bool_feat'))[0] or somecode; +# +sub gitweb_check_feature { + return (gitweb_get_feature(@_))[0]; +} + + +sub feature_bool { + my $key = shift; + my ($val) = git_get_project_config($key, '--bool'); - if ($val eq 'true') { - return 1; + if (!defined $val) { + return ($_[0]); + } elsif ($val eq 'true') { + return (1); } elsif ($val eq 'false') { - return 0; + return (0); } - - return $_[0]; } sub feature_snapshot { @@ -316,25 +423,11 @@ sub feature_snapshot { return @fmts; } -sub feature_grep { - my ($val) = git_get_project_config('grep', '--bool'); +sub feature_patches { + my @val = (git_get_project_config('patches', '--int')); - if ($val eq 'true') { - return (1); - } elsif ($val eq 'false') { - return (0); - } - - return ($_[0]); -} - -sub feature_pickaxe { - my ($val) = git_get_project_config('pickaxe', '--bool'); - - if ($val eq 'true') { - return (1); - } elsif ($val eq 'false') { - return (0); + if (@val) { + return @val; } return ($_[0]); @@ -353,7 +446,8 @@ sub check_head_link { sub check_export_ok { my ($dir) = @_; return (check_head_link($dir) && - (!$export_ok || -e "$dir/$export_ok")); + (!$export_ok || -e "$dir/$export_ok") && + (!$export_auth_hook || $export_auth_hook->($dir))); } # process alternate names for backward compatibility @@ -383,191 +477,343 @@ $projects_list ||= $projectroot; # ====================================================================== # input validation and dispatch -our $action = $cgi->param('a'); + +# input parameters can be collected from a variety of sources (presently, CGI +# and PATH_INFO), so we define an %input_params hash that collects them all +# together during validation: this allows subsequent uses (e.g. href()) to be +# agnostic of the parameter origin + +our %input_params = (); + +# input parameters are stored with the long parameter name as key. This will +# also be used in the href subroutine to convert parameters to their CGI +# equivalent, and since the href() usage is the most frequent one, we store +# the name -> CGI key mapping here, instead of the reverse. +# +# XXX: Warning: If you touch this, check the search form for updating, +# too. + +our @cgi_param_mapping = ( + project => "p", + action => "a", + file_name => "f", + file_parent => "fp", + hash => "h", + hash_parent => "hp", + hash_base => "hb", + hash_parent_base => "hpb", + page => "pg", + order => "o", + searchtext => "s", + searchtype => "st", + snapshot_format => "sf", + extra_options => "opt", + search_use_regexp => "sr", +); +our %cgi_param_mapping = @cgi_param_mapping; + +# we will also need to know the possible actions, for validation +our %actions = ( + "blame" => \&git_blame, + "blobdiff" => \&git_blobdiff, + "blobdiff_plain" => \&git_blobdiff_plain, + "blob" => \&git_blob, + "blob_plain" => \&git_blob_plain, + "commitdiff" => \&git_commitdiff, + "commitdiff_plain" => \&git_commitdiff_plain, + "commit" => \&git_commit, + "forks" => \&git_forks, + "heads" => \&git_heads, + "history" => \&git_history, + "log" => \&git_log, + "patch" => \&git_patch, + "patches" => \&git_patches, + "rss" => \&git_rss, + "atom" => \&git_atom, + "search" => \&git_search, + "search_help" => \&git_search_help, + "shortlog" => \&git_shortlog, + "summary" => \&git_summary, + "tag" => \&git_tag, + "tags" => \&git_tags, + "tree" => \&git_tree, + "snapshot" => \&git_snapshot, + "object" => \&git_object, + # those below don't need $project + "opml" => \&git_opml, + "project_list" => \&git_project_list, + "project_index" => \&git_project_index, +); + +# finally, we have the hash of allowed extra_options for the commands that +# allow them +our %allowed_options = ( + "--no-merges" => [ qw(rss atom log shortlog history) ], +); + +# fill %input_params with the CGI parameters. All values except for 'opt' +# should be single values, but opt can be an array. We should probably +# build an array of parameters that can be multi-valued, but since for the time +# being it's only this one, we just single it out +while (my ($name, $symbol) = each %cgi_param_mapping) { + if ($symbol eq 'opt') { + $input_params{$name} = [ $cgi->param($symbol) ]; + } else { + $input_params{$name} = $cgi->param($symbol); + } +} + +# now read PATH_INFO and update the parameter list for missing parameters +sub evaluate_path_info { + return if defined $input_params{'project'}; + return if !$path_info; + $path_info =~ s,^/+,,; + return if !$path_info; + + # find which part of PATH_INFO is project + my $project = $path_info; + $project =~ s,/+$,,; + while ($project && !check_head_link("$projectroot/$project")) { + $project =~ s,/*[^/]*$,,; + } + return unless $project; + $input_params{'project'} = $project; + + # do not change any parameters if an action is given using the query string + return if $input_params{'action'}; + $path_info =~ s,^\Q$project\E/*,,; + + # next, check if we have an action + my $action = $path_info; + $action =~ s,/.*$,,; + if (exists $actions{$action}) { + $path_info =~ s,^$action/*,,; + $input_params{'action'} = $action; + } + + # list of actions that want hash_base instead of hash, but can have no + # pathname (f) parameter + my @wants_base = ( + 'tree', + 'history', + ); + + # we want to catch + # [$hash_parent_base[:$file_parent]..]$hash_parent[:$file_name] + my ($parentrefname, $parentpathname, $refname, $pathname) = + ($path_info =~ /^(?:(.+?)(?::(.+))?\.\.)?(.+?)(?::(.+))?$/); + + # first, analyze the 'current' part + if (defined $pathname) { + # we got "branch:filename" or "branch:dir/" + # we could use git_get_type(branch:pathname), but: + # - it needs $git_dir + # - it does a git() call + # - the convention of terminating directories with a slash + # makes it superfluous + # - embedding the action in the PATH_INFO would make it even + # more superfluous + $pathname =~ s,^/+,,; + if (!$pathname || substr($pathname, -1) eq "/") { + $input_params{'action'} ||= "tree"; + $pathname =~ s,/$,,; + } else { + # the default action depends on whether we had parent info + # or not + if ($parentrefname) { + $input_params{'action'} ||= "blobdiff_plain"; + } else { + $input_params{'action'} ||= "blob_plain"; + } + } + $input_params{'hash_base'} ||= $refname; + $input_params{'file_name'} ||= $pathname; + } elsif (defined $refname) { + # we got "branch". In this case we have to choose if we have to + # set hash or hash_base. + # + # Most of the actions without a pathname only want hash to be + # set, except for the ones specified in @wants_base that want + # hash_base instead. It should also be noted that hand-crafted + # links having 'history' as an action and no pathname or hash + # set will fail, but that happens regardless of PATH_INFO. + $input_params{'action'} ||= "shortlog"; + if (grep { $_ eq $input_params{'action'} } @wants_base) { + $input_params{'hash_base'} ||= $refname; + } else { + $input_params{'hash'} ||= $refname; + } + } + + # next, handle the 'parent' part, if present + if (defined $parentrefname) { + # a missing pathspec defaults to the 'current' filename, allowing e.g. + # someproject/blobdiff/oldrev..newrev:/filename + if ($parentpathname) { + $parentpathname =~ s,^/+,,; + $parentpathname =~ s,/$,,; + $input_params{'file_parent'} ||= $parentpathname; + } else { + $input_params{'file_parent'} ||= $input_params{'file_name'}; + } + # we assume that hash_parent_base is wanted if a path was specified, + # or if the action wants hash_base instead of hash + if (defined $input_params{'file_parent'} || + grep { $_ eq $input_params{'action'} } @wants_base) { + $input_params{'hash_parent_base'} ||= $parentrefname; + } else { + $input_params{'hash_parent'} ||= $parentrefname; + } + } + + # for the snapshot action, we allow URLs in the form + # $project/snapshot/$hash.ext + # where .ext determines the snapshot and gets removed from the + # passed $refname to provide the $hash. + # + # To be able to tell that $refname includes the format extension, we + # require the following two conditions to be satisfied: + # - the hash input parameter MUST have been set from the $refname part + # of the URL (i.e. they must be equal) + # - the snapshot format MUST NOT have been defined already (e.g. from + # CGI parameter sf) + # It's also useless to try any matching unless $refname has a dot, + # so we check for that too + if (defined $input_params{'action'} && + $input_params{'action'} eq 'snapshot' && + defined $refname && index($refname, '.') != -1 && + $refname eq $input_params{'hash'} && + !defined $input_params{'snapshot_format'}) { + # We loop over the known snapshot formats, checking for + # extensions. Allowed extensions are both the defined suffix + # (which includes the initial dot already) and the snapshot + # format key itself, with a prepended dot + while (my ($fmt, %opt) = each %known_snapshot_formats) { + my $hash = $refname; + my $sfx; + $hash =~ s/(\Q$opt{'suffix'}\E|\Q.$fmt\E)$//; + next unless $sfx = $1; + # a valid suffix was found, so set the snapshot format + # and reset the hash parameter + $input_params{'snapshot_format'} = $fmt; + $input_params{'hash'} = $hash; + # we also set the format suffix to the one requested + # in the URL: this way a request for e.g. .tgz returns + # a .tgz instead of a .tar.gz + $known_snapshot_formats{$fmt}{'suffix'} = $sfx; + last; + } + } +} +evaluate_path_info(); + +our $action = $input_params{'action'}; if (defined $action) { - if ($action =~ m/[^0-9a-zA-Z\.\-_]/) { - die_error(undef, "Invalid action parameter"); + if (!validate_action($action)) { + die_error(400, "Invalid action parameter"); } } # parameters which are pathnames -our $project = $cgi->param('p'); +our $project = $input_params{'project'}; if (defined $project) { - if (!validate_pathname($project) || - !(-d "$projectroot/$project") || - !check_head_link("$projectroot/$project") || - ($export_ok && !(-e "$projectroot/$project/$export_ok")) || - ($strict_export && !project_in_list($project))) { + if (!validate_project($project)) { undef $project; - die_error(undef, "No such project"); + die_error(404, "No such project"); } } -our $file_name = $cgi->param('f'); +our $file_name = $input_params{'file_name'}; if (defined $file_name) { if (!validate_pathname($file_name)) { - die_error(undef, "Invalid file parameter"); + die_error(400, "Invalid file parameter"); } } -our $file_parent = $cgi->param('fp'); +our $file_parent = $input_params{'file_parent'}; if (defined $file_parent) { if (!validate_pathname($file_parent)) { - die_error(undef, "Invalid file parent parameter"); + die_error(400, "Invalid file parent parameter"); } } # parameters which are refnames -our $hash = $cgi->param('h'); +our $hash = $input_params{'hash'}; if (defined $hash) { if (!validate_refname($hash)) { - die_error(undef, "Invalid hash parameter"); + die_error(400, "Invalid hash parameter"); } } -our $hash_parent = $cgi->param('hp'); +our $hash_parent = $input_params{'hash_parent'}; if (defined $hash_parent) { if (!validate_refname($hash_parent)) { - die_error(undef, "Invalid hash parent parameter"); + die_error(400, "Invalid hash parent parameter"); } } -our $hash_base = $cgi->param('hb'); +our $hash_base = $input_params{'hash_base'}; if (defined $hash_base) { if (!validate_refname($hash_base)) { - die_error(undef, "Invalid hash base parameter"); + die_error(400, "Invalid hash base parameter"); } } -my %allowed_options = ( - "--no-merges" => [ qw(rss atom log shortlog history) ], -); - -our @extra_options = $cgi->param('opt'); -if (defined @extra_options) { - foreach my $opt (@extra_options) { - if (not exists $allowed_options{$opt}) { - die_error(undef, "Invalid option parameter"); - } - if (not grep(/^$action$/, @{$allowed_options{$opt}})) { - die_error(undef, "Invalid option parameter for this action"); - } +our @extra_options = @{$input_params{'extra_options'}}; +# @extra_options is always defined, since it can only be (currently) set from +# CGI, and $cgi->param() returns the empty array in array context if the param +# is not set +foreach my $opt (@extra_options) { + if (not exists $allowed_options{$opt}) { + die_error(400, "Invalid option parameter"); + } + if (not grep(/^$action$/, @{$allowed_options{$opt}})) { + die_error(400, "Invalid option parameter for this action"); } } -our $hash_parent_base = $cgi->param('hpb'); +our $hash_parent_base = $input_params{'hash_parent_base'}; if (defined $hash_parent_base) { if (!validate_refname($hash_parent_base)) { - die_error(undef, "Invalid hash parent base parameter"); + die_error(400, "Invalid hash parent base parameter"); } } # other parameters -our $page = $cgi->param('pg'); +our $page = $input_params{'page'}; if (defined $page) { if ($page =~ m/[^0-9]/) { - die_error(undef, "Invalid page parameter"); + die_error(400, "Invalid page parameter"); } } -our $searchtype = $cgi->param('st'); +our $searchtype = $input_params{'searchtype'}; if (defined $searchtype) { if ($searchtype =~ m/[^a-z]/) { - die_error(undef, "Invalid searchtype parameter"); + die_error(400, "Invalid searchtype parameter"); } } -our $search_use_regexp = $cgi->param('sr'); +our $search_use_regexp = $input_params{'search_use_regexp'}; -our $searchtext = $cgi->param('s'); +our $searchtext = $input_params{'searchtext'}; our $search_regexp; if (defined $searchtext) { if (length($searchtext) < 2) { - die_error(undef, "At least two characters are required for search parameter"); + die_error(403, "At least two characters are required for search parameter"); } $search_regexp = $search_use_regexp ? $searchtext : quotemeta $searchtext; } -# now read PATH_INFO and use it as alternative to parameters -sub evaluate_path_info { - return if defined $project; - my $path_info = $ENV{"PATH_INFO"}; - return if !$path_info; - $path_info =~ s,^/+,,; - return if !$path_info; - # find which part of PATH_INFO is project - $project = $path_info; - $project =~ s,/+$,,; - while ($project && !check_head_link("$projectroot/$project")) { - $project =~ s,/*[^/]*$,,; - } - # validate project - $project = validate_pathname($project); - if (!$project || - ($export_ok && !-e "$projectroot/$project/$export_ok") || - ($strict_export && !project_in_list($project))) { - undef $project; - return; - } - # do not change any parameters if an action is given using the query string - return if $action; - $path_info =~ s,^\Q$project\E/*,,; - my ($refname, $pathname) = split(/:/, $path_info, 2); - if (defined $pathname) { - # we got "project.git/branch:filename" or "project.git/branch:dir/" - # we could use git_get_type(branch:pathname), but it needs $git_dir - $pathname =~ s,^/+,,; - if (!$pathname || substr($pathname, -1) eq "/") { - $action ||= "tree"; - $pathname =~ s,/$,,; - } else { - $action ||= "blob_plain"; - } - $hash_base ||= validate_refname($refname); - $file_name ||= validate_pathname($pathname); - } elsif (defined $refname) { - # we got "project.git/branch" - $action ||= "shortlog"; - $hash ||= validate_refname($refname); - } -} -evaluate_path_info(); - # path to the current git repository our $git_dir; $git_dir = "$projectroot/$project" if $project; -# dispatch -my %actions = ( - "blame" => \&git_blame2, - "blobdiff" => \&git_blobdiff, - "blobdiff_plain" => \&git_blobdiff_plain, - "blob" => \&git_blob, - "blob_plain" => \&git_blob_plain, - "commitdiff" => \&git_commitdiff, - "commitdiff_plain" => \&git_commitdiff_plain, - "commit" => \&git_commit, - "forks" => \&git_forks, - "heads" => \&git_heads, - "history" => \&git_history, - "log" => \&git_log, - "rss" => \&git_rss, - "atom" => \&git_atom, - "search" => \&git_search, - "search_help" => \&git_search_help, - "shortlog" => \&git_shortlog, - "summary" => \&git_summary, - "tag" => \&git_tag, - "tags" => \&git_tags, - "tree" => \&git_tree, - "snapshot" => \&git_snapshot, - "object" => \&git_object, - # those below don't need $project - "opml" => \&git_opml, - "project_list" => \&git_project_list, - "project_index" => \&git_project_index, -); +# list of supported snapshot formats +our @snapshot_fmts = gitweb_get_feature('snapshot'); +@snapshot_fmts = filter_snapshot_fmts(@snapshot_fmts); +# dispatch if (!defined $action) { if (defined $hash) { $action = git_get_type($hash); @@ -580,11 +826,11 @@ if (!defined $action) { } } if (!defined($actions{$action})) { - die_error(undef, "Unknown action"); + die_error(400, "Unknown action"); } if ($action !~ m/^(opml|project_list|project_index)$/ && !$project) { - die_error(undef, "Project needed"); + die_error(400, "Project needed"); } $actions{$action}->(); exit; @@ -597,55 +843,96 @@ sub href (%) { # default is to use -absolute url() i.e. $my_uri my $href = $params{-full} ? $my_url : $my_uri; - # XXX: Warning: If you touch this, check the search form for updating, - # too. - - my @mapping = ( - project => "p", - action => "a", - file_name => "f", - file_parent => "fp", - hash => "h", - hash_parent => "hp", - hash_base => "hb", - hash_parent_base => "hpb", - page => "pg", - order => "o", - searchtext => "s", - searchtype => "st", - snapshot_format => "sf", - extra_options => "opt", - search_use_regexp => "sr", - ); - my %mapping = @mapping; - $params{'project'} = $project unless exists $params{'project'}; if ($params{-replay}) { - while (my ($name, $symbol) = each %mapping) { + while (my ($name, $symbol) = each %cgi_param_mapping) { if (!exists $params{$name}) { - # to allow for multivalued params we use arrayref form - $params{$name} = [ $cgi->param($symbol) ]; + $params{$name} = $input_params{$name}; } } } - my ($use_pathinfo) = gitweb_check_feature('pathinfo'); - if ($use_pathinfo) { - # use PATH_INFO for project name - $href .= "/".esc_url($params{'project'}) if defined $params{'project'}; + my $use_pathinfo = gitweb_check_feature('pathinfo'); + if ($use_pathinfo and defined $params{'project'}) { + # try to put as many parameters as possible in PATH_INFO: + # - project name + # - action + # - hash_parent or hash_parent_base:/file_parent + # - hash or hash_base:/filename + # - the snapshot_format as an appropriate suffix + + # When the script is the root DirectoryIndex for the domain, + # $href here would be something like http://gitweb.example.com/ + # Thus, we strip any trailing / from $href, to spare us double + # slashes in the final URL + $href =~ s,/$,,; + + # Then add the project name, if present + $href .= "/".esc_url($params{'project'}); delete $params{'project'}; - # Summary just uses the project path URL - if (defined $params{'action'} && $params{'action'} eq 'summary') { + # since we destructively absorb parameters, we keep this + # boolean that remembers if we're handling a snapshot + my $is_snapshot = $params{'action'} eq 'snapshot'; + + # Summary just uses the project path URL, any other action is + # added to the URL + if (defined $params{'action'}) { + $href .= "/".esc_url($params{'action'}) unless $params{'action'} eq 'summary'; delete $params{'action'}; } + + # Next, we put hash_parent_base:/file_parent..hash_base:/file_name, + # stripping nonexistent or useless pieces + $href .= "/" if ($params{'hash_base'} || $params{'hash_parent_base'} + || $params{'hash_parent'} || $params{'hash'}); + if (defined $params{'hash_base'}) { + if (defined $params{'hash_parent_base'}) { + $href .= esc_url($params{'hash_parent_base'}); + # skip the file_parent if it's the same as the file_name + delete $params{'file_parent'} if $params{'file_parent'} eq $params{'file_name'}; + if (defined $params{'file_parent'} && $params{'file_parent'} !~ /\.\./) { + $href .= ":/".esc_url($params{'file_parent'}); + delete $params{'file_parent'}; + } + $href .= ".."; + delete $params{'hash_parent'}; + delete $params{'hash_parent_base'}; + } elsif (defined $params{'hash_parent'}) { + $href .= esc_url($params{'hash_parent'}). ".."; + delete $params{'hash_parent'}; + } + + $href .= esc_url($params{'hash_base'}); + if (defined $params{'file_name'} && $params{'file_name'} !~ /\.\./) { + $href .= ":/".esc_url($params{'file_name'}); + delete $params{'file_name'}; + } + delete $params{'hash'}; + delete $params{'hash_base'}; + } elsif (defined $params{'hash'}) { + $href .= esc_url($params{'hash'}); + delete $params{'hash'}; + } + + # If the action was a snapshot, we can absorb the + # snapshot_format parameter too + if ($is_snapshot) { + my $fmt = $params{'snapshot_format'}; + # snapshot_format should always be defined when href() + # is called, but just in case some code forgets, we + # fall back to the default + $fmt ||= $snapshot_fmts[0]; + $href .= $known_snapshot_formats{$fmt}{'suffix'}; + delete $params{'snapshot_format'}; + } } # now encode the parameters explicitly my @result = (); - for (my $i = 0; $i < @mapping; $i += 2) { - my ($name, $symbol) = ($mapping[$i], $mapping[$i+1]); + for (my $i = 0; $i < @cgi_param_mapping; $i += 2) { + my ($name, $symbol) = ($cgi_param_mapping[$i], $cgi_param_mapping[$i+1]); if (defined $params{$name}) { if (ref($params{$name}) eq "ARRAY") { foreach my $par (@{$params{$name}}) { @@ -665,6 +952,24 @@ sub href (%) { ## ====================================================================== ## validation, quoting/unquoting and escaping +sub validate_action { + my $input = shift || return undef; + return undef unless exists $actions{$input}; + return $input; +} + +sub validate_project { + my $input = shift || return undef; + if (!validate_pathname($input) || + !(-d "$projectroot/$input") || + !check_export_ok("$projectroot/$input") || + ($strict_export && !project_in_list($input))) { + return undef; + } else { + return $input; + } +} + sub validate_pathname { my $input = shift || return undef; @@ -775,7 +1080,7 @@ sub quot_cec { ); my $chr = ( (exists $es{$cntrl}) ? $es{$cntrl} - : sprintf('\%03o', ord($cntrl)) ); + : sprintf('\%2x', ord($cntrl)) ); if ($opts{-nohtml}) { return $chr; } else { @@ -1079,24 +1384,32 @@ sub format_log_line_html { my $line = shift; $line = esc_html($line, -nbsp=>1); - if ($line =~ m/([0-9a-fA-F]{8,40})/) { - my $hash_text = $1; - my $link = - $cgi->a({-href => href(action=>"object", hash=>$hash_text), - -class => "text"}, $hash_text); - $line =~ s/$hash_text/$link/; - } + $line =~ s{\b([0-9a-fA-F]{8,40})\b}{ + $cgi->a({-href => href(action=>"object", hash=>$1), + -class => "text"}, $1); + }eg; + return $line; } # format marker of refs pointing to given object + +# the destination action is chosen based on object type and current context: +# - for annotated tags, we choose the tag view unless it's the current view +# already, in which case we go to shortlog view +# - for other refs, we keep the current view if we're in history, shortlog or +# log view, and select shortlog otherwise sub format_ref_marker { my ($refs, $id) = @_; my $markers = ''; if (defined $refs->{$id}) { foreach my $ref (@{$refs->{$id}}) { + # this code exploits the fact that non-lightweight tags are the + # only indirect objects, and that they are the only objects for which + # we want to use tag instead of shortlog as action my ($type, $name) = qw(); + my $indirect = ($ref =~ s/\^\{\}$//); # e.g. tags/v2.6.11 or heads/next if ($ref =~ m!^(.*?)s?/(.*)$!) { $type = $1; @@ -1106,8 +1419,29 @@ sub format_ref_marker { $name = $ref; } - $markers .= " " . - esc_html($name) . ""; + my $class = $type; + $class .= " indirect" if $indirect; + + my $dest_action = "shortlog"; + + if ($indirect) { + $dest_action = "tag" unless $action eq "tag"; + } elsif ($action =~ /^(history|(short)?log)$/) { + $dest_action = $action; + } + + my $dest = ""; + $dest .= "refs/" unless $ref =~ m!^refs/!; + $dest .= $ref; + + my $link = $cgi->a({ + -href => href( + action=>$dest_action, + hash=>$dest + )}, $name); + + $markers .= " " . + $link . ""; } } @@ -1419,8 +1753,6 @@ sub format_diff_line { # linked. Pass the hash of the tree/commit to snapshot. sub format_snapshot_links { my ($hash) = @_; - my @snapshot_fmts = gitweb_check_feature('snapshot'); - @snapshot_fmts = filter_snapshot_fmts(@snapshot_fmts); my $num_fmts = @snapshot_fmts; if ($num_fmts > 1) { # A parenthesized list of links bearing format names. @@ -1580,18 +1912,19 @@ sub git_parse_project_config { return %config; } -# convert config value to boolean, 'true' or 'false' +# convert config value to boolean: 'true' or 'false' # no value, number > 0, 'true' and 'yes' values are true # rest of values are treated as false (never as error) sub config_to_bool { my $val = shift; + return 1 if !defined $val; # section.key + # strip leading and trailing whitespace $val =~ s/^\s+//; $val =~ s/\s+$//; - return (!defined $val || # section.key - ($val =~ /^\d+$/ && $val) || # section.key = 1 + return (($val =~ /^\d+$/ && $val) || # section.key = 1 ($val =~ /^(?:true|yes)$/i)); # section.key = true } @@ -1644,6 +1977,9 @@ sub git_get_project_config { $config_file = "$git_dir/config"; } + # check if config variable (key) exists + return unless exists $config{"gitweb.$key"}; + # ensure given type if (!defined $type) { return $config{"gitweb.$key"}; @@ -1665,7 +2001,7 @@ sub git_get_hash_by_path { $path =~ s,/+$,,; open my $fd, "-|", git_cmd(), "ls-tree", $base, "--", $path - or die_error(undef, "Open git-ls-tree failed"); + or die_error(500, "Open git-ls-tree failed"); my $line = <$fd>; close $fd or return undef; @@ -1724,6 +2060,71 @@ sub git_get_project_description { return $descr; } +sub git_get_project_ctags { + my $path = shift; + my $ctags = {}; + + $git_dir = "$projectroot/$path"; + unless (opendir D, "$git_dir/ctags") { + return $ctags; + } + foreach (grep { -f $_ } map { "$git_dir/ctags/$_" } readdir(D)) { + open CT, $_ or next; + my $val = ; + chomp $val; + close CT; + my $ctag = $_; $ctag =~ s#.*/##; + $ctags->{$ctag} = $val; + } + closedir D; + $ctags; +} + +sub git_populate_project_tagcloud { + my $ctags = shift; + + # First, merge different-cased tags; tags vote on casing + my %ctags_lc; + foreach (keys %$ctags) { + $ctags_lc{lc $_}->{count} += $ctags->{$_}; + if (not $ctags_lc{lc $_}->{topcount} + or $ctags_lc{lc $_}->{topcount} < $ctags->{$_}) { + $ctags_lc{lc $_}->{topcount} = $ctags->{$_}; + $ctags_lc{lc $_}->{topname} = $_; + } + } + + my $cloud; + if (eval { require HTML::TagCloud; 1; }) { + $cloud = HTML::TagCloud->new; + foreach (sort keys %ctags_lc) { + # Pad the title with spaces so that the cloud looks + # less crammed. + my $title = $ctags_lc{$_}->{topname}; + $title =~ s/ / /g; + $title =~ s/^/ /g; + $title =~ s/$/ /g; + $cloud->add($title, $home_link."?by_tag=".$_, $ctags_lc{$_}->{count}); + } + } else { + $cloud = \%ctags_lc; + } + $cloud; +} + +sub git_show_project_tagcloud { + my ($cloud, $count) = @_; + print STDERR ref($cloud)."..\n"; + if (ref $cloud eq 'HTML::TagCloud') { + return $cloud->html_and_css($count); + } else { + my @tags = sort { $cloud->{$a}->{count} <=> $cloud->{$b}->{count} } keys %$cloud; + return '

    ' . join (', ', map { + "$cloud->{$_}->{topname}" + } splice(@tags, 0, $count)) . '

    '; + } +} + sub git_get_project_url_list { my $path = shift; @@ -1745,7 +2146,7 @@ sub git_get_projects_list { $filter ||= ''; $filter =~ s/\.git$//; - my ($check_forks) = gitweb_check_feature('forks'); + my $check_forks = gitweb_check_feature('forks'); if (-d $projects_list) { # search in directory @@ -1772,10 +2173,9 @@ sub git_get_projects_list { my $subdir = substr($File::Find::name, $pfxlen + 1); # we check related file in $projectroot - if ($check_forks and $subdir =~ m#/.#) { - $File::Find::prune = 1; - } elsif (check_export_ok("$projectroot/$filter/$subdir")) { - push @list, { path => ($filter ? "$filter/" : '') . $subdir }; + my $path = ($filter ? "$filter/" : '') . $subdir; + if (check_export_ok("$projectroot/$path")) { + push @list, { path => $path }; $File::Find::prune = 1; } }, @@ -1918,7 +2318,7 @@ sub git_get_references { while (my $line = <$fd>) { chomp $line; - if ($line =~ m!^([0-9a-fA-F]{40})\srefs/($type/?[^^]+)!) { + if ($line =~ m!^([0-9a-fA-F]{40})\srefs/($type.*)$!) { if (defined $refs{$1}) { push @{$refs{$1}}, $2; } else { @@ -2092,7 +2492,7 @@ sub parse_commit_text { last; } } - if ($co{'title'} eq "") { + if (! defined $co{'title'} || $co{'title'} eq "") { $co{'title'} = $co{'title_short'} = '(no commit message)'; } # remove added spaces @@ -2127,7 +2527,7 @@ sub parse_commit { "--max-count=1", $commit_id, "--", - or die_error(undef, "Open git-rev-list failed"); + or die_error(500, "Open git-rev-list failed"); %co = parse_commit_text(<$fd>, 1); close $fd; @@ -2152,7 +2552,7 @@ sub parse_commits { $commit_id, "--", ($filename ? ($filename) : ()) - or die_error(undef, "Open git-rev-list failed"); + or die_error(500, "Open git-rev-list failed"); while (my $line = <$fd>) { my %co = parse_commit_text($line); push @cos, \%co; @@ -2386,6 +2786,15 @@ sub get_file_owner { return to_utf8($owner); } +# assume that file exists +sub insert_file { + my $filename = shift; + + open my $fd, '<', $filename; + print map { to_utf8($_) } <$fd>; + close $fd; +} + ## ...................................................................... ## mimetype related functions @@ -2514,9 +2923,14 @@ sub git_header_html { $title EOF -# print out each stylesheet that exist + # the stylesheet, favicon etc urls won't work correctly with path_info + # unless we set the appropriate base URL + if ($ENV{'PATH_INFO'}) { + print "\n"; + } + # print out each stylesheet that exist, providing backwards capability + # for those people who defined $stylesheet in a config file if (defined $stylesheet) { -#provides backwards capability for those people who define style sheet in a config file print ''."\n"; } else { foreach my $stylesheet (@stylesheets) { @@ -2574,9 +2988,7 @@ EOF "\n"; if (-f $site_header) { - open (my $fd, $site_header); - print <$fd>; - close $fd; + insert_file($site_header); } print "
    \n" . @@ -2593,7 +3005,7 @@ EOF } print "
    \n"; - my ($have_search) = gitweb_check_feature('search'); + my $have_search = gitweb_check_feature('search'); if (defined $project && $have_search) { if (!defined $searchtext) { $searchtext = ""; @@ -2607,7 +3019,7 @@ EOF $search_hash = "HEAD"; } my $action = $my_uri; - my ($use_pathinfo) = gitweb_check_feature('pathinfo'); + my $use_pathinfo = gitweb_check_feature('pathinfo'); if ($use_pathinfo) { $action .= "/".esc_url($project); } @@ -2663,20 +3075,33 @@ sub git_footer_html { print "
    \n"; # class="page_footer" if (-f $site_footer) { - open (my $fd, $site_footer); - print <$fd>; - close $fd; + insert_file($site_footer); } print "\n" . ""; } +# die_error(, ) +# Example: die_error(404, 'Hash not found') +# By convention, use the following status codes (as defined in RFC 2616): +# 400: Invalid or missing CGI parameters, or +# requested object exists but has wrong type. +# 403: Requested feature (like "pickaxe" or "snapshot") not enabled on +# this server or project. +# 404: Requested object/revision/project doesn't exist. +# 500: The server isn't configured properly, or +# an internal error occurred (e.g. failed assertions caused by bugs), or +# an unknown error occurred (e.g. the git binary died unexpectedly). sub die_error { - my $status = shift || "403 Forbidden"; - my $error = shift || "Malformed query, file missing or permission denied"; - - git_header_html($status); + my $status = shift || 500; + my $error = shift || "Internal server error"; + + my %http_responses = (400 => '400 Bad Request', + 403 => '403 Forbidden', + 404 => '404 Not Found', + 500 => '500 Internal Server Error'); + git_header_html($http_responses{$status}); print <

    @@ -2711,13 +3136,31 @@ sub git_print_page_nav { } } } + $arg{'tree'}{'hash'} = $treehead if defined $treehead; $arg{'tree'}{'hash_base'} = $treebase if defined $treebase; + my @actions = gitweb_get_feature('actions'); + my %repl = ( + '%' => '%', + 'n' => $project, # project name + 'f' => $git_dir, # project path within filesystem + 'h' => $treehead || '', # current hash ('h' parameter) + 'b' => $treebase || '', # hash base ('hb' parameter) + ); + while (@actions) { + my ($label, $link, $pos) = splice(@actions,0,3); + # insert + @navs = map { $_ eq $pos ? ($_, $label) : $_ } @navs; + # munch munch + $link =~ s/%([%nfhb])/$repl{$1}/g; + $arg{$label}{'_href'} = $link; + } + print "
    \n" . (join " | ", map { $_ eq $current ? - $_ : $cgi->a({-href => href(%{$arg{$_}})}, "$_") + $_ : $cgi->a({-href => ($arg{$_}{_href} ? $arg{$_}{_href} : href(%{$arg{$_}}))}, "$_") } @navs); print "
    \n$extra
    \n" . "
    \n"; @@ -3067,7 +3510,7 @@ sub is_patch_split { sub git_difftree_body { my ($difftree, $hash, @parents) = @_; my ($parent) = $parents[0]; - my ($have_blame) = gitweb_check_feature('blame'); + my $have_blame = gitweb_check_feature('blame'); print "
    \n"; if ($#{$difftree} > 10) { print(($#{$difftree} + 1) . " files changed:\n"); @@ -3520,21 +3963,25 @@ sub git_patchset_body { # . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . . -sub git_project_list_body { - my ($projlist, $order, $from, $to, $extra, $no_header) = @_; - - my ($check_forks) = gitweb_check_feature('forks'); - +# fills project list info (age, description, owner, forks) for each +# project in the list, removing invalid projects from returned list +# NOTE: modifies $projlist, but does not remove entries from it +sub fill_project_list_info { + my ($projlist, $check_forks) = @_; my @projects; + + my $show_ctags = gitweb_check_feature('ctags'); + PROJECT: foreach my $pr (@$projlist) { - my (@aa) = git_get_last_activity($pr->{'path'}); - unless (@aa) { - next; + my (@activity) = git_get_last_activity($pr->{'path'}); + unless (@activity) { + next PROJECT; } - ($pr->{'age'}, $pr->{'age_string'}) = @aa; + ($pr->{'age'}, $pr->{'age_string'}) = @activity; if (!defined $pr->{'descr'}) { my $descr = git_get_project_description($pr->{'path'}) || ""; - $pr->{'descr_long'} = to_utf8($descr); + $descr = to_utf8($descr); + $pr->{'descr_long'} = $descr; $pr->{'descr'} = chop_str($descr, $projects_list_description_width, 5); } if (!defined $pr->{'owner'}) { @@ -3546,66 +3993,98 @@ sub git_project_list_body { ($pname !~ /\/$/) && (-d "$projectroot/$pname")) { $pr->{'forks'} = "-d $projectroot/$pname"; - } - else { + } else { $pr->{'forks'} = 0; } } + $show_ctags and $pr->{'ctags'} = git_get_project_ctags($pr->{'path'}); push @projects, $pr; } + return @projects; +} + +# print 'sort by' element, generating 'sort by $name' replay link +# if that order is not selected +sub print_sort_th { + my ($name, $order, $header) = @_; + $header ||= ucfirst($name); + + if ($order eq $name) { + print "$header\n"; + } else { + print "" . + $cgi->a({-href => href(-replay=>1, order=>$name), + -class => "header"}, $header) . + "\n"; + } +} + +sub git_project_list_body { + # actually uses global variable $project + my ($projlist, $order, $from, $to, $extra, $no_header) = @_; + + my $check_forks = gitweb_check_feature('forks'); + my @projects = fill_project_list_info($projlist, $check_forks); + $order ||= $default_projects_order; $from = 0 unless defined $from; $to = $#projects if (!defined $to || $#projects < $to); + my %order_info = ( + project => { key => 'path', type => 'str' }, + descr => { key => 'descr_long', type => 'str' }, + owner => { key => 'owner', type => 'str' }, + age => { key => 'age', type => 'num' } + ); + my $oi = $order_info{$order}; + if ($oi->{'type'} eq 'str') { + @projects = sort {$a->{$oi->{'key'}} cmp $b->{$oi->{'key'}}} @projects; + } else { + @projects = sort {$a->{$oi->{'key'}} <=> $b->{$oi->{'key'}}} @projects; + } + + my $show_ctags = gitweb_check_feature('ctags'); + if ($show_ctags) { + my %ctags; + foreach my $p (@projects) { + foreach my $ct (keys %{$p->{'ctags'}}) { + $ctags{$ct} += $p->{'ctags'}->{$ct}; + } + } + my $cloud = git_populate_project_tagcloud(\%ctags); + print git_show_project_tagcloud($cloud, 64); + } + print "\n"; unless ($no_header) { print "\n"; if ($check_forks) { print "\n"; } - if ($order eq "project") { - @projects = sort {$a->{'path'} cmp $b->{'path'}} @projects; - print "\n"; - } else { - print "\n"; - } - if ($order eq "descr") { - @projects = sort {$a->{'descr'} cmp $b->{'descr'}} @projects; - print "\n"; - } else { - print "\n"; - } - if ($order eq "owner") { - @projects = sort {$a->{'owner'} cmp $b->{'owner'}} @projects; - print "\n"; - } else { - print "\n"; - } - if ($order eq "age") { - @projects = sort {$a->{'age'} <=> $b->{'age'}} @projects; - print "\n"; - } else { - print "\n"; - } - print "\n" . + print_sort_th('project', $order, 'Project'); + print_sort_th('descr', $order, 'Description'); + print_sort_th('owner', $order, 'Owner'); + print_sort_th('age', $order, 'Last Change'); + print "\n" . # for links "\n"; } my $alternate = 1; + my $tagfilter = $cgi->param('by_tag'); for (my $i = $from; $i <= $to; $i++) { my $pr = $projects[$i]; + + next if $tagfilter and $show_ctags and not grep { lc $_ eq lc $tagfilter } keys %{$pr->{'ctags'}}; + next if $searchtext and not $pr->{'path'} =~ /$searchtext/ + and not $pr->{'descr_long'} =~ /$searchtext/; + # Weed out forks or non-matching entries of search + if ($check_forks) { + my $forkbase = $project; $forkbase ||= ''; $forkbase =~ s#\.git$#/#; + $forkbase="^$forkbase" if $forkbase; + next if not $searchtext and not $tagfilter and $show_ctags + and $pr->{'path'} =~ m#$forkbase.*/.*#; # regexp-safe + } + if ($alternate) { print "\n"; } else { @@ -3922,37 +4401,40 @@ sub git_search_grep_body { ## actions sub git_project_list { - my $order = $cgi->param('o'); + my $order = $input_params{'order'}; if (defined $order && $order !~ m/none|project|descr|owner|age/) { - die_error(undef, "Unknown order parameter"); + die_error(400, "Unknown order parameter"); } my @list = git_get_projects_list(); if (!@list) { - die_error(undef, "No projects found"); + die_error(404, "No projects found"); } git_header_html(); if (-f $home_text) { print "
    \n"; - open (my $fd, $home_text); - print <$fd>; - close $fd; + insert_file($home_text); print "
    \n"; } + print $cgi->startform(-method => "get") . + "

    Search:\n" . + $cgi->textfield(-name => "s", -value => $searchtext) . "\n" . + "

    " . + $cgi->end_form() . "\n"; git_project_list_body(\@list, $order); git_footer_html(); } sub git_forks { - my $order = $cgi->param('o'); + my $order = $input_params{'order'}; if (defined $order && $order !~ m/none|project|descr|owner|age/) { - die_error(undef, "Unknown order parameter"); + die_error(400, "Unknown order parameter"); } my @list = git_get_projects_list($project); if (!@list) { - die_error(undef, "No forks found"); + die_error(404, "No forks found"); } git_header_html(); @@ -4000,7 +4482,7 @@ sub git_summary { my @taglist = git_get_tags_list(16); my @headlist = git_get_heads_list(16); my @forklist; - my ($check_forks) = gitweb_check_feature('forks'); + my $check_forks = gitweb_check_feature('forks'); if ($check_forks) { @forklist = git_get_projects_list($project); @@ -4011,10 +4493,10 @@ sub git_summary { print "
     
    \n"; print "
    Project" . - $cgi->a({-href => href(project=>undef, order=>'project'), - -class => "header"}, "Project") . - "Description" . - $cgi->a({-href => href(project=>undef, order=>'descr'), - -class => "header"}, "Description") . - "Owner" . - $cgi->a({-href => href(project=>undef, order=>'owner'), - -class => "header"}, "Owner") . - "Last Change" . - $cgi->a({-href => href(project=>undef, order=>'age'), - -class => "header"}, "Last Change") . - "
    \n" . - "\n" . - "\n"; + "\n" . + "\n"; if (defined $cd{'rfc2822'}) { - print "\n"; + print "\n"; } # use per project git URL list in $projectroot/$project/cloneurl @@ -4024,19 +4506,32 @@ sub git_summary { @url_list = map { "$_/$project" } @git_base_url_list unless @url_list; foreach my $git_url (@url_list) { next unless $git_url; - print "\n"; + print "\n"; $url_tag = ""; } + + # Tag cloud + my $show_ctags = gitweb_check_feature('ctags'); + if ($show_ctags) { + my $ctags = git_get_project_ctags($project); + my $cloud = git_populate_project_tagcloud($ctags); + print "\n\n"; + } + print "
    description" . esc_html($descr) . "
    owner" . esc_html($owner) . "
    description" . esc_html($descr) . "
    owner" . esc_html($owner) . "
    last change$cd{'rfc2822'}
    last change$cd{'rfc2822'}
    $url_tag$git_url
    Content tags:
    "; + print "
    " unless %$ctags; + print "Add: "; + print "" if %$ctags; + print git_show_project_tagcloud($cloud, 48); + print "
    \n"; - if (-s "$projectroot/$project/README.html") { - if (open my $fd, "$projectroot/$project/README.html") { - print "
    readme
    \n" . - "
    \n"; - print $_ while (<$fd>); - print "\n
    \n"; # class="readme" - close $fd; - } + # If XSS prevention is on, we don't include README.html. + # TODO: Allow a readme in some safe format. + if (!$prevent_xss && -s "$projectroot/$project/README.html") { + print "
    readme
    \n" . + "
    \n"; + insert_file("$projectroot/$project/README.html"); + print "\n
    \n"; # class="readme" } # we need to request one more than 16 (0..15) to check if @@ -4065,10 +4560,10 @@ sub git_summary { if (@forklist) { git_print_header_div('forks'); - git_project_list_body(\@forklist, undef, 0, 15, + git_project_list_body(\@forklist, 'age', 0, 15, $#forklist <= 15 ? undef : $cgi->a({-href => href(action=>"forks")}, "..."), - 'noheader'); + 'no_header'); } git_footer_html(); @@ -4081,7 +4576,7 @@ sub git_tag { my %tag = parse_tag($hash); if (! %tag) { - die_error(undef, "Unknown tag object"); + die_error(404, "Unknown tag object"); } git_print_header_div('commit', esc_html($tag{'name'}), $hash); @@ -4113,30 +4608,34 @@ sub git_tag { git_footer_html(); } -sub git_blame2 { - my $fd; - my $ftype; +sub git_blame { + # permissions + gitweb_check_feature('blame') + or die_error(403, "Blame view not allowed"); - my ($have_blame) = gitweb_check_feature('blame'); - if (!$have_blame) { - die_error('403 Permission denied', "Permission denied"); - } - die_error('404 Not Found', "File name not defined") if (!$file_name); + # error checking + die_error(400, "No file name given") unless $file_name; $hash_base ||= git_get_head_hash($project); - die_error(undef, "Couldn't find base commit") unless ($hash_base); + die_error(404, "Couldn't find base commit") unless $hash_base; my %co = parse_commit($hash_base) - or die_error(undef, "Reading commit failed"); + or die_error(404, "Commit not found"); + my $ftype = "blob"; if (!defined $hash) { $hash = git_get_hash_by_path($hash_base, $file_name, "blob") - or die_error(undef, "Error looking up file"); - } - $ftype = git_get_type($hash); - if ($ftype !~ "blob") { - die_error('400 Bad Request', "Object is not a blob"); + or die_error(404, "Error looking up file"); + } else { + $ftype = git_get_type($hash); + if ($ftype !~ "blob") { + die_error(400, "Object is not a blob"); + } } - open ($fd, "-|", git_cmd(), "blame", '-p', '--', - $file_name, $hash_base) - or die_error(undef, "Open git-blame failed"); + + # run git-blame --porcelain + open my $fd, "-|", git_cmd(), "blame", '-p', + $hash_base, '--', $file_name + or die_error(500, "Open git-blame failed"); + + # page header git_header_html(); my $formats_nav = $cgi->a({-href => href(action=>"blob", -replay=>1)}, @@ -4150,42 +4649,46 @@ sub git_blame2 { git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav); git_print_header_div('commit', esc_html($co{'title'}), $hash_base); git_print_page_path($file_name, $ftype, $hash_base); - my @rev_color = (qw(light2 dark2)); + + # page body + my @rev_color = qw(light2 dark2); my $num_colors = scalar(@rev_color); my $current_color = 0; - my $last_rev; + my %metainfo = (); + print < HTML - my %metainfo = (); - while (1) { - $_ = <$fd>; - last unless defined $_; + LINE: + while (my $line = <$fd>) { + chomp $line; + # the header: [] + # no for subsequent lines in group of lines my ($full_rev, $orig_lineno, $lineno, $group_size) = - /^([0-9a-f]{40}) (\d+) (\d+)(?: (\d+))?$/; + ($line =~ /^([0-9a-f]{40}) (\d+) (\d+)(?: (\d+))?$/); if (!exists $metainfo{$full_rev}) { $metainfo{$full_rev} = {}; } my $meta = $metainfo{$full_rev}; - while (<$fd>) { - last if (s/^\t//); - if (/^(\S+) (.*)$/) { + my $data; + while ($data = <$fd>) { + chomp $data; + last if ($data =~ s/^\t//); # contents of line + if ($data =~ /^(\S+) (.*)$/) { $meta->{$1} = $2; } } - my $data = $_; - chomp $data; - my $rev = substr($full_rev, 0, 8); + my $short_rev = substr($full_rev, 0, 8); my $author = $meta->{'author'}; - my %date = parse_date($meta->{'author-time'}, - $meta->{'author-tz'}); + my %date = + parse_date($meta->{'author-time'}, $meta->{'author-tz'}); my $date = $date{'iso-tz'}; if ($group_size) { - $current_color = ++$current_color % $num_colors; + $current_color = ($current_color + 1) % $num_colors; } - print "\n"; + print "\n"; if ($group_size) { print "\n"; } - open (my $dd, "-|", git_cmd(), "rev-parse", "$full_rev^") - or die_error(undef, "Open git-rev-parse failed"); - my $parent_commit = <$dd>; - close $dd; - chomp($parent_commit); + my $parent_commit; + if (!exists $meta->{'parent'}) { + open (my $dd, "-|", git_cmd(), "rev-parse", "$full_rev^") + or die_error(500, "Open git-rev-parse failed"); + $parent_commit = <$dd>; + close $dd; + chomp($parent_commit); + $meta->{'parent'} = $parent_commit; + } else { + $parent_commit = $meta->{'parent'}; + } my $blamed = href(action => 'blame', file_name => $meta->{'filename'}, hash_base => $parent_commit); print ""; @@ -4218,103 +4726,8 @@ HTML print ""; close $fd or print "Reading blob failed\n"; - git_footer_html(); -} - -sub git_blame { - my $fd; - - my ($have_blame) = gitweb_check_feature('blame'); - if (!$have_blame) { - die_error('403 Permission denied', "Permission denied"); - } - die_error('404 Not Found', "File name not defined") if (!$file_name); - $hash_base ||= git_get_head_hash($project); - die_error(undef, "Couldn't find base commit") unless ($hash_base); - my %co = parse_commit($hash_base) - or die_error(undef, "Reading commit failed"); - if (!defined $hash) { - $hash = git_get_hash_by_path($hash_base, $file_name, "blob") - or die_error(undef, "Error lookup file"); - } - open ($fd, "-|", git_cmd(), "annotate", '-l', '-t', '-r', $file_name, $hash_base) - or die_error(undef, "Open git-annotate failed"); - git_header_html(); - my $formats_nav = - $cgi->a({-href => href(action=>"blob", hash=>$hash, hash_base=>$hash_base, file_name=>$file_name)}, - "blob") . - " | " . - $cgi->a({-href => href(action=>"history", hash=>$hash, hash_base=>$hash_base, file_name=>$file_name)}, - "history") . - " | " . - $cgi->a({-href => href(action=>"blame", file_name=>$file_name)}, - "HEAD"); - git_print_page_nav('','', $hash_base,$co{'tree'},$hash_base, $formats_nav); - git_print_header_div('commit', esc_html($co{'title'}), $hash_base); - git_print_page_path($file_name, 'blob', $hash_base); - print "
    \n"; - print < -
    - - - - - - -HTML - my @line_class = (qw(light dark)); - my $line_class_len = scalar (@line_class); - my $line_class_num = $#line_class; - while (my $line = <$fd>) { - my $long_rev; - my $short_rev; - my $author; - my $time; - my $lineno; - my $data; - my $age; - my $age_str; - my $age_class; - chomp $line; - $line_class_num = ($line_class_num + 1) % $line_class_len; - - if ($line =~ m/^([0-9a-fA-F]{40})\t\(\s*([^\t]+)\t(\d+) [+-]\d\d\d\d\t(\d+)\)(.*)$/) { - $long_rev = $1; - $author = $2; - $time = $3; - $lineno = $4; - $data = $5; - } else { - print qq( \n); - next; - } - $short_rev = substr ($long_rev, 0, 8); - $age = time () - $time; - $age_str = age_string ($age); - $age_str =~ s/ / /g; - $age_class = age_class($age); - $author = esc_html ($author); - $author =~ s/ / /g; - - $data = untabify($data); - $data = esc_html ($data); - - print < - - - - - - -HTML - } # while (my $line = <$fd>) - print "
    CommitLineData
    a({-href => href(action=>"commit", hash=>$full_rev, file_name=>$file_name)}, - esc_html($rev)); + esc_html($short_rev)); print ""; print $cgi->a({ -href => "$blamed#l$orig_lineno", - -id => "l$lineno", -class => "linenr" }, esc_html($lineno)); print "
    CommitAgeAuthorLineData
    Unable to parse: $line
    $long_rev)}" class="text">$short_rev..$age_str$author$lineno$data
    \n\n"; - close $fd - or print "Reading blob failed.\n"; - print "
    "; + # page footer git_footer_html(); } @@ -4352,9 +4765,9 @@ sub git_blob_plain { if (defined $file_name) { my $base = $hash_base || git_get_head_hash($project); $hash = git_get_hash_by_path($base, $file_name, "blob") - or die_error(undef, "Error lookup file"); + or die_error(404, "Cannot find file"); } else { - die_error(undef, "No file name defined"); + die_error(400, "No file name defined"); } } elsif ($hash =~ m/^[0-9a-fA-F]{40}$/) { # blobs defined by non-textual hash id's can be cached @@ -4362,7 +4775,7 @@ sub git_blob_plain { } open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash - or die_error(undef, "Open git-cat-file blob '$hash' failed"); + or die_error(500, "Open git-cat-file blob '$hash' failed"); # content-type (can include charset) $type = blob_contenttype($fd, $file_name, $type); @@ -4375,10 +4788,21 @@ sub git_blob_plain { $save_as .= '.txt'; } + # With XSS prevention on, blobs of all types except a few known safe + # ones are served with "Content-Disposition: attachment" to make sure + # they don't run in our security domain. For certain image types, + # blob view writes an tag referring to blob_plain view, and we + # want to be sure not to break that by serving the image as an + # attachment (though Firefox 3 doesn't seem to care). + my $sandbox = $prevent_xss && + $type !~ m!^(?:text/plain|image/(?:gif|png|jpeg))$!; + print $cgi->header( -type => $type, -expires => $expires, - -content_disposition => 'inline; filename="' . $save_as . '"'); + -content_disposition => + ($sandbox ? 'attachment' : 'inline') + . '; filename="' . $save_as . '"'); undef $/; binmode STDOUT, ':raw'; print <$fd>; @@ -4394,18 +4818,18 @@ sub git_blob { if (defined $file_name) { my $base = $hash_base || git_get_head_hash($project); $hash = git_get_hash_by_path($base, $file_name, "blob") - or die_error(undef, "Error lookup file"); + or die_error(404, "Cannot find file"); } else { - die_error(undef, "No file name defined"); + die_error(400, "No file name defined"); } } elsif ($hash =~ m/^[0-9a-fA-F]{40}$/) { # blobs defined by non-textual hash id's can be cached $expires = "+1d"; } - my ($have_blame) = gitweb_check_feature('blame'); + my $have_blame = gitweb_check_feature('blame'); open my $fd, "-|", git_cmd(), "cat-file", "blob", $hash - or die_error(undef, "Couldn't cat $file_name, $hash"); + or die_error(500, "Couldn't cat $file_name, $hash"); my $mimetype = blob_mimetype($fd, $file_name); if ($mimetype !~ m!^(?:text/|image/(?:gif|png|jpeg)$)! && -B $fd) { close $fd; @@ -4484,18 +4908,19 @@ sub git_tree { $hash = $hash_base; } } + die_error(404, "No such tree") unless defined($hash); $/ = "\0"; open my $fd, "-|", git_cmd(), "ls-tree", '-z', $hash - or die_error(undef, "Open git-ls-tree failed"); + or die_error(500, "Open git-ls-tree failed"); my @entries = map { chomp; $_ } <$fd>; - close $fd or die_error(undef, "Reading tree failed"); + close $fd or die_error(404, "Reading tree failed"); $/ = "\n"; my $refs = git_get_references(); my $ref = format_ref_marker($refs, $hash_base); git_header_html(); my $basedir = ''; - my ($have_blame) = gitweb_check_feature('blame'); + my $have_blame = gitweb_check_feature('blame'); if (defined $hash_base && (my %co = parse_commit($hash_base))) { my @views_nav = (); if (defined $file_name) { @@ -4524,8 +4949,8 @@ sub git_tree { if ($basedir ne '' && substr($basedir, -1) ne '/') { $basedir .= '/'; } + git_print_page_path($file_name, 'tree', $hash_base); } - git_print_page_path($file_name, 'tree', $hash_base); print "
    \n"; print "\n"; my $alternate = 1; @@ -4573,21 +4998,18 @@ sub git_tree { } sub git_snapshot { - my @supported_fmts = gitweb_check_feature('snapshot'); - @supported_fmts = filter_snapshot_fmts(@supported_fmts); - - my $format = $cgi->param('sf'); - if (!@supported_fmts) { - die_error('403 Permission denied', "Permission denied"); + my $format = $input_params{'snapshot_format'}; + if (!@snapshot_fmts) { + die_error(403, "Snapshots not allowed"); } # default to first supported snapshot format - $format ||= $supported_fmts[0]; + $format ||= $snapshot_fmts[0]; if ($format !~ m/^[a-z0-9]+$/) { - die_error(undef, "Invalid snapshot format parameter"); + die_error(400, "Invalid snapshot format parameter"); } elsif (!exists($known_snapshot_formats{$format})) { - die_error(undef, "Unknown snapshot format"); - } elsif (!grep($_ eq $format, @supported_fmts)) { - die_error(undef, "Unsupported snapshot format"); + die_error(400, "Unknown snapshot format"); + } elsif (!grep($_ eq $format, @snapshot_fmts)) { + die_error(403, "Unsupported snapshot format"); } if (!defined $hash) { @@ -4615,7 +5037,7 @@ sub git_snapshot { -status => '200 OK'); open my $fd, "-|", $cmd - or die_error(undef, "Execute git-archive failed"); + or die_error(500, "Execute git-archive failed"); binmode STDOUT, ':raw'; print <$fd>; binmode STDOUT, ':utf8'; # as set at the beginning of gitweb.cgi @@ -4636,6 +5058,15 @@ sub git_log { my $paging_nav = format_paging_nav('log', $hash, $head, $page, $#commitlist >= 100); + my ($patch_max) = gitweb_get_feature('patches'); + if ($patch_max) { + if ($patch_max < 0 || @commitlist <= $patch_max) { + $paging_nav .= " ⋅ " . + $cgi->a({-href => href(action=>"patches", -replay=>1)}, + "patches"); + } + } + git_header_html(); git_print_page_nav('log','', $hash,undef,undef, $paging_nav); @@ -4683,10 +5114,8 @@ sub git_log { sub git_commit { $hash ||= $hash_base || "HEAD"; - my %co = parse_commit($hash); - if (!%co) { - die_error(undef, "Unknown commit object"); - } + my %co = parse_commit($hash) + or die_error(404, "Unknown commit object"); my %ad = parse_date($co{'author_epoch'}, $co{'author_tz'}); my %cd = parse_date($co{'committer_epoch'}, $co{'committer_tz'}); @@ -4717,6 +5146,11 @@ sub git_commit { } @$parents ) . ')'; } + if (gitweb_check_feature('patches')) { + $formats_nav .= " | " . + $cgi->a({-href => href(action=>"patch", -replay=>1)}, + "patch"); + } if (!defined $parent) { $parent = "--root"; @@ -4726,9 +5160,9 @@ sub git_commit { @diff_opts, (@$parents <= 1 ? $parent : '-c'), $hash, "--" - or die_error(undef, "Open git-diff-tree failed"); + or die_error(500, "Open git-diff-tree failed"); @difftree = map { chomp; $_ } <$fd>; - close $fd or die_error(undef, "Reading git-diff-tree failed"); + close $fd or die_error(404, "Reading git-diff-tree failed"); # non-textual hash id's can be cached my $expires; @@ -4821,33 +5255,33 @@ sub git_object { open my $fd, "-|", quote_command( git_cmd(), 'cat-file', '-t', $object_id) . ' 2> /dev/null' - or die_error('404 Not Found', "Object does not exist"); + or die_error(404, "Object does not exist"); $type = <$fd>; chomp $type; close $fd - or die_error('404 Not Found', "Object does not exist"); + or die_error(404, "Object does not exist"); # - hash_base and file_name } elsif ($hash_base && defined $file_name) { $file_name =~ s,/+$,,; system(git_cmd(), "cat-file", '-e', $hash_base) == 0 - or die_error('404 Not Found', "Base object does not exist"); + or die_error(404, "Base object does not exist"); # here errors should not hapen open my $fd, "-|", git_cmd(), "ls-tree", $hash_base, "--", $file_name - or die_error(undef, "Open git-ls-tree failed"); + or die_error(500, "Open git-ls-tree failed"); my $line = <$fd>; close $fd; #'100644 blob 0fa3f3a66fb6a137f6ec2c19351ed4d807070ffa panic.c' unless ($line && $line =~ m/^([0-9]+) (.+) ([0-9a-fA-F]{40})\t/) { - die_error('404 Not Found', "File or directory for given base does not exist"); + die_error(404, "File or directory for given base does not exist"); } $type = $2; $hash = $3; } else { - die_error('404 Not Found', "Not enough information to find object"); + die_error(400, "Not enough information to find object"); } print $cgi->redirect(-uri => href(action=>$type, -full=>1, @@ -4872,12 +5306,12 @@ sub git_blobdiff { open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, $hash_parent_base, $hash_base, "--", (defined $file_parent ? $file_parent : ()), $file_name - or die_error(undef, "Open git-diff-tree failed"); + or die_error(500, "Open git-diff-tree failed"); @difftree = map { chomp; $_ } <$fd>; close $fd - or die_error(undef, "Reading git-diff-tree failed"); + or die_error(404, "Reading git-diff-tree failed"); @difftree - or die_error('404 Not Found', "Blob diff not found"); + or die_error(404, "Blob diff not found"); } elsif (defined $hash && $hash =~ /[0-9a-fA-F]{40}/) { @@ -4886,23 +5320,23 @@ sub git_blobdiff { # read filtered raw output open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, $hash_parent_base, $hash_base, "--" - or die_error(undef, "Open git-diff-tree failed"); + or die_error(500, "Open git-diff-tree failed"); @difftree = # ':100644 100644 03b21826... 3b93d5e7... M ls-files.c' # $hash == to_id grep { /^:[0-7]{6} [0-7]{6} [0-9a-fA-F]{40} $hash/ } map { chomp; $_ } <$fd>; close $fd - or die_error(undef, "Reading git-diff-tree failed"); + or die_error(404, "Reading git-diff-tree failed"); @difftree - or die_error('404 Not Found', "Blob diff not found"); + or die_error(404, "Blob diff not found"); } else { - die_error('404 Not Found', "Missing one of the blob diff parameters"); + die_error(400, "Missing one of the blob diff parameters"); } if (@difftree > 1) { - die_error('404 Not Found', "Ambiguous blob diff specification"); + die_error(400, "Ambiguous blob diff specification"); } %diffinfo = parse_difftree_raw_line($difftree[0]); @@ -4923,46 +5357,12 @@ sub git_blobdiff { '-p', ($format eq 'html' ? "--full-index" : ()), $hash_parent_base, $hash_base, "--", (defined $file_parent ? $file_parent : ()), $file_name - or die_error(undef, "Open git-diff-tree failed"); + or die_error(500, "Open git-diff-tree failed"); } - # old/legacy style URI - if (!%diffinfo && # if new style URI failed - defined $hash && defined $hash_parent) { - # fake git-diff-tree raw output - $diffinfo{'from_mode'} = $diffinfo{'to_mode'} = "blob"; - $diffinfo{'from_id'} = $hash_parent; - $diffinfo{'to_id'} = $hash; - if (defined $file_name) { - if (defined $file_parent) { - $diffinfo{'status'} = '2'; - $diffinfo{'from_file'} = $file_parent; - $diffinfo{'to_file'} = $file_name; - } else { # assume not renamed - $diffinfo{'status'} = '1'; - $diffinfo{'from_file'} = $file_name; - $diffinfo{'to_file'} = $file_name; - } - } else { # no filename given - $diffinfo{'status'} = '2'; - $diffinfo{'from_file'} = $hash_parent; - $diffinfo{'to_file'} = $hash; - } - - # non-textual hash id's can be cached - if ($hash =~ m/^[0-9a-fA-F]{40}$/ && - $hash_parent =~ m/^[0-9a-fA-F]{40}$/) { - $expires = '+1d'; - } - - # open patch output - open $fd, "-|", git_cmd(), "diff", @diff_opts, - '-p', ($format eq 'html' ? "--full-index" : ()), - $hash_parent, $hash, "--" - or die_error(undef, "Open git-diff failed"); - } else { + # old/legacy style URI -- not generated anymore since 1.4.3. + if (!%diffinfo) { die_error('404 Not Found', "Missing one of the blob diff parameters") - unless %diffinfo; } # header @@ -4994,7 +5394,7 @@ sub git_blobdiff { print "X-Git-Url: " . $cgi->self_url() . "\n\n"; } else { - die_error(undef, "Unknown blobdiff format"); + die_error(400, "Unknown blobdiff format"); } # patch @@ -5027,13 +5427,18 @@ sub git_blobdiff_plain { } sub git_commitdiff { - my $format = shift || 'html'; - $hash ||= $hash_base || "HEAD"; - my %co = parse_commit($hash); - if (!%co) { - die_error(undef, "Unknown commit object"); + my %params = @_; + my $format = $params{-format} || 'html'; + + my ($patch_max) = gitweb_get_feature('patches'); + if ($format eq 'patch') { + die_error(403, "Patch view not allowed") unless $patch_max; } + $hash ||= $hash_base || "HEAD"; + my %co = parse_commit($hash) + or die_error(404, "Unknown commit object"); + # choose format for commitdiff for merge if (! defined $hash_parent && @{$co{'parents'}} > 1) { $hash_parent = '--cc'; @@ -5044,6 +5449,11 @@ sub git_commitdiff { $formats_nav = $cgi->a({-href => href(action=>"commitdiff_plain", -replay=>1)}, "raw"); + if ($patch_max) { + $formats_nav .= " | " . + $cgi->a({-href => href(action=>"patch", -replay=>1)}, + "patch"); + } if (defined $hash_parent && $hash_parent ne '-c' && $hash_parent ne '--cc') { @@ -5114,7 +5524,7 @@ sub git_commitdiff { open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, "--no-commit-id", "--patch-with-raw", "--full-index", $hash_parent_param, $hash, "--" - or die_error(undef, "Open git-diff-tree failed"); + or die_error(500, "Open git-diff-tree failed"); while (my $line = <$fd>) { chomp $line; @@ -5126,10 +5536,34 @@ sub git_commitdiff { } elsif ($format eq 'plain') { open $fd, "-|", git_cmd(), "diff-tree", '-r', @diff_opts, '-p', $hash_parent_param, $hash, "--" - or die_error(undef, "Open git-diff-tree failed"); - + or die_error(500, "Open git-diff-tree failed"); + } elsif ($format eq 'patch') { + # For commit ranges, we limit the output to the number of + # patches specified in the 'patches' feature. + # For single commits, we limit the output to a single patch, + # diverging from the git-format-patch default. + my @commit_spec = (); + if ($hash_parent) { + if ($patch_max > 0) { + push @commit_spec, "-$patch_max"; + } + push @commit_spec, '-n', "$hash_parent..$hash"; + } else { + if ($params{-single}) { + push @commit_spec, '-1'; + } else { + if ($patch_max > 0) { + push @commit_spec, "-$patch_max"; + } + push @commit_spec, "-n"; + } + push @commit_spec, '--root', $hash; + } + open $fd, "-|", git_cmd(), "format-patch", '--encoding=utf8', + '--stdout', @commit_spec + or die_error(500, "Open git-format-patch failed"); } else { - die_error(undef, "Unknown commitdiff format"); + die_error(400, "Unknown commitdiff format"); } # non-textual hash id's can be cached @@ -5176,6 +5610,14 @@ sub git_commitdiff { print to_utf8($line) . "\n"; } print "---\n\n"; + } elsif ($format eq 'patch') { + my $filename = basename($project) . "-$hash.patch"; + + print $cgi->header( + -type => 'text/plain', + -charset => 'utf-8', + -expires => $expires, + -content_disposition => 'inline; filename="' . "$filename" . '"'); } # write patch @@ -5197,11 +5639,25 @@ sub git_commitdiff { print <$fd>; close $fd or print "Reading git-diff-tree failed\n"; + } elsif ($format eq 'patch') { + local $/ = undef; + print <$fd>; + close $fd + or print "Reading git-format-patch failed\n"; } } sub git_commitdiff_plain { - git_commitdiff('plain'); + git_commitdiff(-format => 'plain'); +} + +# format-patch-style patches +sub git_patch { + git_commitdiff(-format => 'patch', -single=> 1); +} + +sub git_patches { + git_commitdiff(-format => 'patch'); } sub git_history { @@ -5212,19 +5668,15 @@ sub git_history { $page = 0; } my $ftype; - my %co = parse_commit($hash_base); - if (!%co) { - die_error(undef, "Unknown commit object"); - } + my %co = parse_commit($hash_base) + or die_error(404, "Unknown commit object"); my $refs = git_get_references(); my $limit = sprintf("--max-count=%i", (100 * ($page+1))); my @commitlist = parse_commits($hash_base, 101, (100 * $page), - $file_name, "--full-history"); - if (!@commitlist) { - die_error('404 Not Found', "No such file or directory on given branch"); - } + $file_name, "--full-history") + or die_error(404, "No such file or directory on given branch"); if (!defined $hash && defined $file_name) { # some commits could have deleted file in question, @@ -5238,7 +5690,7 @@ sub git_history { $ftype = git_get_type($hash); } if (!defined $ftype) { - die_error(undef, "Unknown type of object"); + die_error(500, "Unknown type of object"); } my $paging_nav = ''; @@ -5276,19 +5728,16 @@ sub git_history { } sub git_search { - my ($have_search) = gitweb_check_feature('search'); - if (!$have_search) { - die_error('403 Permission denied', "Permission denied"); - } + gitweb_check_feature('search') or die_error(403, "Search is disabled"); if (!defined $searchtext) { - die_error(undef, "Text field empty"); + die_error(400, "Text field is empty"); } if (!defined $hash) { $hash = git_get_head_hash($project); } my %co = parse_commit($hash); if (!%co) { - die_error(undef, "Unknown commit object"); + die_error(404, "Unknown commit object"); } if (!defined $page) { $page = 0; @@ -5298,16 +5747,12 @@ sub git_search { if ($searchtype eq 'pickaxe') { # pickaxe may take all resources of your box and run for several minutes # with every query - so decide by yourself how public you make this feature - my ($have_pickaxe) = gitweb_check_feature('pickaxe'); - if (!$have_pickaxe) { - die_error('403 Permission denied', "Permission denied"); - } + gitweb_check_feature('pickaxe') + or die_error(403, "Pickaxe is disabled"); } if ($searchtype eq 'grep') { - my ($have_grep) = gitweb_check_feature('grep'); - if (!$have_grep) { - die_error('403 Permission denied', "Permission denied"); - } + gitweb_check_feature('grep') + or die_error(403, "Grep is disabled"); } git_header_html(); @@ -5511,7 +5956,7 @@ insensitive).

    commit
    The commit messages and authorship information will be scanned for the given pattern.
    EOT - my ($have_grep) = gitweb_check_feature('grep'); + my $have_grep = gitweb_check_feature('grep'); if ($have_grep) { print <grep @@ -5528,7 +5973,7 @@ EOT
    committer
    Name and e-mail of the committer and date of commit will be scanned for the given pattern.
    EOT - my ($have_pickaxe) = gitweb_check_feature('pickaxe'); + my $have_pickaxe = gitweb_check_feature('pickaxe'); if ($have_pickaxe) { print <pickaxe @@ -5552,7 +5997,11 @@ sub git_shortlog { } my $refs = git_get_references(); - my @commitlist = parse_commits($hash, 101, (100 * $page)); + my $commit_hash = $hash; + if (defined $hash_parent) { + $commit_hash = "$hash_parent..$hash"; + } + my @commitlist = parse_commits($commit_hash, 101, (100 * $page)); my $paging_nav = format_paging_nav('shortlog', $hash, $head, $page, $#commitlist >= 100); my $next_link = ''; @@ -5561,6 +6010,14 @@ sub git_shortlog { $cgi->a({-href => href(-replay=>1, page=>$page+1), -accesskey => "n", -title => "Alt-n"}, "next"); } + my $patch_max = gitweb_check_feature('patches'); + if ($patch_max) { + if ($patch_max < 0 || @commitlist <= $patch_max) { + $paging_nav .= " ⋅ " . + $cgi->a({-href => href(action=>"patches", -replay=>1)}, + "patches"); + } + } git_header_html(); git_print_page_nav('shortlog','', $hash,$hash,$hash, $paging_nav); @@ -5576,12 +6033,12 @@ sub git_shortlog { sub git_feed { my $format = shift || 'atom'; - my ($have_blame) = gitweb_check_feature('blame'); + my $have_blame = gitweb_check_feature('blame'); # Atom: http://www.atomenabled.org/developers/syndication/ # RSS: http://www.notestips.com/80256B3A007F2692/1/NAMO5P9UPQ if ($format ne 'rss' && $format ne 'atom') { - die_error(undef, "Unknown web feed format"); + die_error(400, "Unknown web feed format"); } # log/feed of current (HEAD) branch, log of given branch, history of file/directory @@ -5598,7 +6055,25 @@ sub git_feed { } if (defined($commitlist[0])) { %latest_commit = %{$commitlist[0]}; - %latest_date = parse_date($latest_commit{'author_epoch'}); + my $latest_epoch = $latest_commit{'committer_epoch'}; + %latest_date = parse_date($latest_epoch); + my $if_modified = $cgi->http('IF_MODIFIED_SINCE'); + if (defined $if_modified) { + my $since; + if (eval { require HTTP::Date; 1; }) { + $since = HTTP::Date::str2time($if_modified); + } elsif (eval { require Time::ParseDate; 1; }) { + $since = Time::ParseDate::parsedate($if_modified, GMT => 1); + } + if (defined $since && $latest_epoch <= $since) { + print $cgi->header( + -type => $content_type, + -charset => 'utf-8', + -last_modified => $latest_date{'rfc2822'}, + -status => '304 Not Modified'); + return; + } + } print $cgi->header( -type => $content_type, -charset => 'utf-8', @@ -5657,7 +6132,24 @@ XML print "$title\n" . "$alt_url\n" . "$descr\n" . - "en\n"; + "en\n" . + # project owner is responsible for 'editorial' content + "$owner\n"; + if (defined $logo || defined $favicon) { + # prefer the logo to the favicon, since RSS + # doesn't allow both + my $img = esc_url($logo || $favicon); + print "\n" . + "$img\n" . + "$title\n" . + "$alt_url\n" . + "\n"; + } + if (%latest_date) { + print "$latest_date{'rfc2822'}\n"; + print "$latest_date{'rfc2822'}\n"; + } + print "gitweb v.$version/$git_version\n"; } elsif ($format eq 'atom') { print < @@ -5684,6 +6176,7 @@ XML } else { print "$latest_date{'iso-8601'}\n"; } + print "gitweb\n"; } # contents @@ -5805,7 +6298,11 @@ sub git_atom { sub git_opml { my @list = git_get_projects_list(); - print $cgi->header(-type => 'text/xml', -charset => 'utf-8'); + print $cgi->header( + -type => 'text/xml', + -charset => 'utf-8', + -content_disposition => 'inline; filename="opml.xml"'); + print < @@ -5829,8 +6326,8 @@ XML } my $path = esc_html(chop_str($proj{'path'}, 25, 5)); - my $rss = "$my_url?p=$proj{'path'};a=rss"; - my $html = "$my_url?p=$proj{'path'};a=summary"; + my $rss = href('project' => $proj{'path'}, 'action' => 'rss', -full => 1); + my $html = href('project' => $proj{'path'}, 'action' => 'summary', -full => 1); print "\n"; } print < +// Copyright 2008, 2009 Bernie Innocenti +// +// FIXME: The following JS snippet is harder to pass than ACID 3 + +var guru = document.getElementById('guru'); + +// Firefox 2.0 doesn't take border-color, but returns border-top-color fine +var color = document.defaultView.getComputedStyle(guru,null).getPropertyValue('border-top-color'); + +function guruOn() { + guru.style.setProperty('border-color', color, ''); + setTimeout('guruOff()', 1000); +} +function guruOff() { + guru.style.setProperty('border-color', '#000000', ''); + setTimeout('guruOn()', 1000); +} +// Safari 2.0 returns this rgba crap +// Konqueror 3.5.6 doesn't seem to support computed properties +if (color && color != 'rgba(0, 0, 0, 0)') { + //window.alert("enabled! color='" + color + "'"); + guruOn(); +} + diff --git a/macros/CgiVar.py b/macros/CgiVar.py new file mode 100755 index 0000000..9199514 --- /dev/null +++ b/macros/CgiVar.py @@ -0,0 +1,7 @@ +def _macro_CgiVar(argv): + return { + 'REMOTE_USER': remote_user(), + 'REMOTE_HOST': get_hostname(remote_host()), + 'REMOTE_ADDR': remote_host(), + 'QUERY_STRING': relative_url(query_string()), + }[argv[1]] diff --git a/macros/HttpPost.py b/macros/HttpPost.py new file mode 100755 index 0000000..ce61f79 --- /dev/null +++ b/macros/HttpPost.py @@ -0,0 +1,6 @@ +def _macro_HttpPost(argv): + if argv[1] in form: + return form[argv[1]].value + elif len(argv) > 2: + return argv[2] # default value + return '' diff --git a/macros/Include.py b/macros/Include.py deleted file mode 100755 index d46faf2..0000000 --- a/macros/Include.py +++ /dev/null @@ -1,3 +0,0 @@ -def _macro_Include(argv): - Page(argv[1]).send_naked() - return '' diff --git a/macros/WordIndex.py b/macros/WordIndex.py index 613efb5..6438d0b 100644 --- a/macros/WordIndex.py +++ b/macros/WordIndex.py @@ -17,15 +17,15 @@ def _macro_WordIndex(argv): for word in all_words: letter = word[0].lower() if letter != last_letter: - s = s + ';

    %s

    ' % (letter, letter) + s += '

    %s

    ' % (letter, letter) last_letter = letter - s = s + '%s
      ' % word + s += '%s
        ' % word links = map[word] links.sort() last_page = None for name in links: if name == last_page: continue - s = s + '
      • ' + Page(name).link_to() - s = s + '
      ' + s += '
    • ' + link_tag(name) + '
    • ' + s += '
    ' return s diff --git a/wiki_apache.conf b/wiki_apache.conf index a4725ef..ba7b90e 100644 --- a/wiki_apache.conf +++ b/wiki_apache.conf @@ -3,6 +3,3 @@ ScriptAlias /wiki /home/bernie/public_html/wiki/geekigeeki.py # Path to gitweb scriopt to browse the wiki ScriptAlias /wikigit /home/bernie/public_html/wiki/gitweb.cgi - -# Data files (optional) -Alias /wikidata /home/bernie/public_html/wiki