X-Git-Url: http://git.tremily.us/?p=ikiwiki.git;a=blobdiff_plain;f=ikiwiki;h=e590518607927001c9ccd2a541372890a867dbd5;hp=9aaeed86ca64ae51ae535ab0928c563c3e28eaa1;hb=965afd875cd168713e9351d3c4c992c31f0bea0a;hpb=035a4f31b11a7aad1074ee650cf5ff3b0d15691a diff --git a/ikiwiki b/ikiwiki index 9aaeed86c..e59051860 100755 --- a/ikiwiki +++ b/ikiwiki @@ -6,42 +6,39 @@ use File::Find; use Memoize; use File::Spec; +$ENV{PATH}="/usr/local/bin:/usr/bin:/bin"; + BEGIN { $blosxom::version="is a proper perl module too much to ask?"; do "/usr/bin/markdown"; } -memoize('pagename'); -memoize('bestlink'); +my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles, + %pagesources); +my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/; +my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/; +my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!; +my $verbose=0; +my $wikiname="wiki"; +my $default_pagetype=".mdwn"; +my $cgi=0; +my $url=""; +my $cgiurl=""; +my $svn=1; sub usage { die "usage: ikiwiki [options] source dest\n"; } -my $link=qr/\[\[([^\s]+)\]\]/; -my $verbose=0; -my $rebuild=0; -my $wikiname="wiki"; -if (grep /^-/, @ARGV) { - eval {use Getopt::Long}; - GetOptions( - "wikiname=s" => \$wikiname, - "verbose|v" => \$verbose, - "rebuild" => \$rebuild, - ) || usage(); -} -usage() unless @ARGV == 2; -my ($srcdir) = shift =~ /(.*)/; # untaint -my ($destdir) = shift =~ /(.*)/; # untaint - -my %links; -my %oldlinks; -my %oldpagemtime; -my %renderedfiles; -my %pagesources; - sub error ($) { - die @_; + if ($cgi) { + print "Content-type: text/html\n\n"; + print "Error: @_\n"; + exit 1; + } + else { + die @_; + } } sub debug ($) { @@ -54,6 +51,12 @@ sub mtime ($) { return (stat($page))[9]; } +sub possibly_foolish_untaint ($) { + my $tainted=shift; + my ($untainted)=$tainted=~/(.*)/; + return $untainted; +} + sub basename { my $file=shift; @@ -94,21 +97,21 @@ sub htmlpage ($) { return $page.".html"; } -sub readpage ($) { - my $page=shift; +sub readfile ($) { + my $file=shift; local $/=undef; - open (PAGE, "$srcdir/$page") || error("failed to read $page: $!"); - my $ret=; - close PAGE; + open (IN, "$file") || error("failed to read $file: $!"); + my $ret=; + close IN; return $ret; } -sub writepage ($$) { - my $page=shift; +sub writefile ($$) { + my $file=shift; my $content=shift; - my $dir=dirname("$destdir/$page"); + my $dir=dirname($file); if (! -d $dir) { my $d=""; foreach my $s (split(m!/+!, $dir)) { @@ -119,16 +122,16 @@ sub writepage ($$) { } } - open (PAGE, ">$destdir/$page") || error("failed to write $page: $!"); - print PAGE $content; - close PAGE; + open (OUT, ">$file") || error("failed to write $file: $!"); + print OUT $content; + close OUT; } sub findlinks { my $content=shift; my @links; - while ($content =~ /$link/g) { + while ($content =~ /$wiki_link_regexp/g) { push @links, lc($1); } return @links; @@ -154,7 +157,7 @@ sub bestlink ($$) { } } while $cwd=~s!/?[^/]+$!!; - print STDERR "warning: page $page, broken link: $link\n"; + #print STDERR "warning: page $page, broken link: $link\n"; return ""; } @@ -164,24 +167,29 @@ sub isinlinableimage ($) { $file=~/\.(png|gif|jpg|jpeg)$/; } -sub htmllink ($$) { +sub htmllink { my $page=shift; my $link=shift; + my $noimagelink=shift; my $bestlink=bestlink($page, $link); return $link if $page eq $bestlink; + # TODO BUG: %renderedfiles may not have it, if the linked to page + # was also added and isn't yet rendered! Note that this bug is + # masked by the bug mentioned below that makes all new files + # be rendered twice. if (! grep { $_ eq $bestlink } values %renderedfiles) { $bestlink=htmlpage($bestlink); } if (! grep { $_ eq $bestlink } values %renderedfiles) { - return "?$link" + return "?$link" } $bestlink=File::Spec->abs2rel($bestlink, dirname($page)); - if (isinlinableimage($bestlink)) { + if (! $noimagelink && isinlinableimage($bestlink)) { return ""; } return "$link"; @@ -191,7 +199,7 @@ sub linkify ($$) { my $content=shift; my $file=shift; - $content =~ s/$link/htmllink(pagename($file), $1)/eg; + $content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg; return $content; } @@ -214,13 +222,25 @@ sub linkbacks ($$) { my @links; foreach my $p (keys %links) { - if (grep { $_ eq $page } @{$links{$p}}) { + next if bestlink($page, $p) eq $page; + if (grep { length $_ && bestlink($p, $_) eq $page } @{$links{$p}}) { my $href=File::Spec->abs2rel(htmlpage($p), dirname($page)); - push @links, "$p"; + + # Trim common dir prefixes from both pages. + my $p_trimmed=$p; + my $page_trimmed=$page; + my $dir; + 1 while (($dir)=$page_trimmed=~m!^([^/]+/)!) && + defined $dir && + $p_trimmed=~s/^\Q$dir\E// && + $page_trimmed=~s/^\Q$dir\E//; + + push @links, "$p_trimmed"; } } - $content.="

Links: ".join(" ", sort @links)."

\n"; + $content.="

Links: ".join(" ", sort @links)."

\n" if @links; + return $content; } sub finalize ($$) { @@ -244,8 +264,15 @@ sub finalize ($$) { $path=~s/\.\.\/$/index.html/; $pagelink="$wikiname/ $pagelink"; + my @actions; + if (length $cgiurl) { + push @actions, "Edit"; + push @actions, "RecentChanges"; + } + $content="\n$title\n\n". "

$pagelink

\n". + "@actions\n
\n". $content. "\n\n"; @@ -256,7 +283,7 @@ sub render ($) { my $file=shift; my $type=pagetype($file); - my $content=readpage($file); + my $content=readfile("$srcdir/$file"); if ($type ne 'unknown') { my $page=pagename($file); @@ -267,13 +294,13 @@ sub render ($) { $content=linkbacks($content, $page); $content=finalize($content, $page); - writepage(htmlpage($page), $content); + writefile("$destdir/".htmlpage($page), $content); $oldpagemtime{$page}=time; $renderedfiles{$page}=htmlpage($page); } else { $links{$file}=[]; - writepage($file, $content); + writefile("$destdir/$file", $content); $oldpagemtime{$file}=time; $renderedfiles{$file}=$file; } @@ -282,14 +309,14 @@ sub render ($) { sub loadindex () { open (IN, "$srcdir/.index") || return; while () { - ($_)=/(.*)/; # untaint + $_=possibly_foolish_untaint($_); chomp; my ($mtime, $file, $rendered, @links)=split(' ', $_); my $page=pagename($file); $pagesources{$page}=$file; $oldpagemtime{$page}=$mtime; - $links{$page}=[@links]; $oldlinks{$page}=[@links]; + $links{$page}=[@links]; $renderedfiles{$page}=$rendered; } close IN; @@ -298,13 +325,102 @@ sub loadindex () { sub saveindex () { open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!"); foreach my $page (keys %oldpagemtime) { - print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ". - join(" ", @{$links{$page}})."\n" - if $oldpagemtime{$page}; + print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ". + join(" ", @{$links{$page}})."\n" + if $oldpagemtime{$page}; } close OUT; } +sub rcs_update () { + if (-d "$srcdir/.svn") { + if (system("svn", "update", "--quiet", $srcdir) != 0) { + warn("svn update failed\n"); + } + } +} + +sub rcs_commit ($) { + my $message=shift; + + if (-d "$srcdir/.svn") { + if (system("svn", "commit", "--quiet", "-m", + possibly_foolish_untaint($message), $srcdir) != 0) { + warn("svn commit failed\n"); + } + } +} + +sub rcs_add ($) { + my $file=shift; + + if (-d "$srcdir/.svn") { + my $parent=dirname($file); + while (! -d "$srcdir/$parent/.svn") { + $file=$parent; + $parent=dirname($file); + } + + if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) { + warn("svn add failed\n"); + } + } +} + +sub rcs_recentchanges ($) { + my $num=shift; + my @ret; + + eval q{use Date::Parse}; + eval q{use Time::Duration}; + + if (-d "$srcdir/.svn") { + my $info=`LANG=C svn info $srcdir`; + my ($svn_url)=$info=~/^URL: (.*)$/m; + + # FIXME: currently assumes that the wiki is somewhere + # under trunk in svn, doesn't support other layouts. + my ($svn_base)=$svn_url=~m!(/trunk(?:/.*)?)$!; + + my $div=qr/^--------------------+$/; + my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/; + my $state='start'; + my ($rev, $user, $when, @pages, $message); + foreach (`LANG=C svn log -v '$svn_url'`) { + chomp; + if ($state eq 'start' && /$div/) { + $state='header'; + } + elsif ($state eq 'header' && /$infoline/) { + $rev=$1; + $user=$2; + $when=concise(ago(time - str2time($3))); + } + elsif ($state eq 'header' && /^\s+[A-Z]\s+\Q$svn_base\E\/(.+)$/) { + push @pages, pagename($1) if length $1; + } + elsif ($state eq 'header' && /^$/) { + $state='body'; + } + elsif ($state eq 'body' && /$div/) { + push @ret, { rev => $rev, user => $user, + when => $when, message => $message, + pages => [@pages] } if @pages; + return @ret if @ret >= $num; + + $state='header'; + $message=$rev=$user=$when=undef; + @pages=(); + } + elsif ($state eq 'body') { + $message.="$_
\n"; + } + } + } + + return @ret; +} + sub prune ($) { my $file=shift; @@ -322,11 +438,11 @@ sub refresh () { find({ no_chdir => 1, wanted => sub { - if (/\/\.svn\//) { + if (/$wiki_file_prune_regexp/) { $File::Find::prune=1; } - elsif (! -d $_ && ! /\.html$/ && ! /\/\./) { - my ($f)=/(^[-A-Za-z0-9_.:\/+]+$)/; # untaint + elsif (! -d $_) { + my ($f)=/$wiki_file_regexp/; # untaint if (! defined $f) { warn("skipping bad filename $_\n"); } @@ -360,6 +476,7 @@ sub refresh () { prune($destdir."/".$renderedfiles{$page}); delete $renderedfiles{$page}; $oldpagemtime{$page}=0; + delete $pagesources{$page}; } } @@ -373,18 +490,15 @@ sub refresh () { render($file); $rendered{$file}=1; } - elsif ($rebuild) { - debug("rebuilding unchanged file $file"); - render($file); - $rendered{$file}=1; - } } # if any files were added or removed, check to see if each page # needs an update due to linking to them + # TODO: inefficient; pages may get rendered above and again here; + # problem is the bestlink may have changed and we won't know until + # now if (@add || @del) { FILE: foreach my $file (@files) { - next if $rendered{$file}; my $page=pagename($file); foreach my $f (@add, @del) { my $p=pagename($f); @@ -392,6 +506,7 @@ FILE: foreach my $file (@files) { if (bestlink($page, $link) eq $p) { debug("rendering $file, which links to $p"); render($file); + $rendered{$file}=1; next FILE; } } @@ -403,23 +518,27 @@ FILE: foreach my $file (@files) { # pages it links to # TODO: inefficient; pages may get rendered above and again here; # problem is the linkbacks could be wrong in the first pass render - # above. + # above if (%rendered) { my %linkchanged; foreach my $file (keys %rendered, @del) { - my $pagename=pagename($file); - if (exists $links{$pagename}) { - foreach my $link (@{$links{$pagename}}) { - if (! exists $oldlinks{$pagename} || - ! grep { $_ eq $link } @{$oldlinks{$pagename}}) { + my $page=pagename($file); + if (exists $links{$page}) { + foreach my $link (@{$links{$page}}) { + $link=bestlink($page, $link); + if (length $link && + ! exists $oldlinks{$page} || + ! grep { $_ eq $link } @{$oldlinks{$page}}) { $linkchanged{$link}=1; } } } - if (exists $oldlinks{$pagename}) { - foreach my $link (@{$oldlinks{$pagename}}) { - if (! exists $links{$pagename} || - ! grep { $_ eq $link } @{$links{$pagename}}) { + if (exists $oldlinks{$page}) { + foreach my $link (@{$oldlinks{$page}}) { + $link=bestlink($page, $link); + if (length $link && + ! exists $links{$page} || + ! grep { $_ eq $link } @{$links{$page}}) { $linkchanged{$link}=1; } } @@ -435,6 +554,267 @@ FILE: foreach my $file (@files) { } } -loadindex(); -refresh(); -saveindex(); +# Generates a C wrapper program for running ikiwiki in a specific way. +# The wrapper may be safely made suid. +sub gen_wrapper ($$) { + my ($svn, $rebuild)=@_; + + eval q{use Cwd 'abs_path'}; + $srcdir=abs_path($srcdir); + $destdir=abs_path($destdir); + my $this=abs_path($0); + if (! -x $this) { + error("$this doesn't seem to be executable"); + } + + my @params=($srcdir, $destdir, "--wikiname=$wikiname"); + push @params, "--verbose" if $verbose; + push @params, "--rebuild" if $rebuild; + push @params, "--nosvn" if !$svn; + push @params, "--cgi" if $cgi; + push @params, "--url=$url" if $url; + push @params, "--cgiurl=$cgiurl" if $cgiurl; + my $params=join(" ", @params); + my $call=''; + foreach my $p ($this, $this, @params) { + $call.=qq{"$p", }; + } + $call.="NULL"; + + my @envsave; + push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI + CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi; + my $envsave=""; + foreach my $var (@envsave) { + $envsave.=<<"EOF" + if ((s=getenv("$var"))) + asprintf(&newenviron[i++], "%s=%s", "$var", s); +EOF + } + + open(OUT, ">ikiwiki-wrap.c") || error("failed to write ikiwiki-wrap.c: $!");; + print OUT <<"EOF"; +/* A wrapper for ikiwiki, can be safely made suid. */ +#define _GNU_SOURCE +#include +#include +#include +#include + +extern char **environ; + +int main (int argc, char **argv) { + /* Sanitize environment. */ + char *s; + char *newenviron[$#envsave+3]; + int i=0; +$envsave + newenviron[i++]="HOME=$ENV{HOME}"; + newenviron[i]=NULL; + environ=newenviron; + + if (argc == 2 && strcmp(argv[1], "--params") == 0) { + printf("$params\\n"); + exit(0); + } + + execl($call); + perror("failed to run $this"); + exit(1); +} +EOF + close OUT; + if (system("gcc", "ikiwiki-wrap.c", "-o", "ikiwiki-wrap") != 0) { + error("failed to compile ikiwiki-wrap.c"); + } + unlink("ikiwiki-wrap.c"); + print "successfully generated ikiwiki-wrap\n"; + exit 0; +} + +sub cgi () { + eval q{use CGI}; + my $q=CGI->new; + + my $do=$q->param('do'); + if (! defined $do || ! length $do) { + error("\"do\" parameter missing"); + } + + if ($do eq 'recentchanges') { + my $list="
    \n"; + foreach my $change (rcs_recentchanges(100)) { + $list.="
  • "; + $list.=join(", ", map { htmllink("", $_, 1) } @{$change->{pages}}); + $list.="
    \n"; + $list.="changed ".$change->{when}." by ". + htmllink("", $change->{user}, 1). + ": ".$change->{message}."\n"; + $list.="
  • \n"; + } + $list.="
\n"; + + print $q->header, + $q->start_html("RecentChanges"), + $q->h1("$wikiname/ RecentChanges"), + $list, + $q->end_form, + $q->end_html; + return; + } + + my ($page)=$q->param('page')=~/$wiki_file_regexp/; + if (! defined $page || ! length $page || $page ne $q->param('page') || + $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) { + error("bad page name"); + } + $page=lc($page); + + my $action=$q->request_uri; + $action=~s/\?.*//; + + if ($do eq 'create') { + if (exists $pagesources{lc($page)}) { + # hmm, someone else made the page in the meantime? + print $q->redirect("$url/".htmlpage($page)); + } + + my @page_locs; + my ($from)=$q->param('from')=~/$wiki_file_regexp/; + if (! defined $from || ! length $from || + $from ne $q->param('from') || + $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) { + @page_locs=$page; + } + else { + my $dir=$from."/"; + $dir=~s![^/]+/$!!; + push @page_locs, $dir.$page; + push @page_locs, "$from/$page"; + while (length $dir) { + $dir=~s![^/]+/$!!; + push @page_locs, $dir.$page; + } + } + + $q->param("do", "save"); + print $q->header, + $q->start_html("Creating $page"), + $q->start_h1("$wikiname/ Creating $page"), + $q->end_hi, + $q->start_form(-action => $action), + $q->hidden('do'), + "Select page location:", + $q->popup_menu('page', \@page_locs), + $q->textarea(-name => 'content', + -default => "", + -rows => 20, + -columns => 80), + $q->br, + "Optional comment about this change:", + $q->br, + $q->textfield(-name => "comments", -size => 80), + $q->br, + $q->submit("Save Page"), + $q->end_form, + $q->end_html; + } + elsif ($do eq 'edit') { + my $content=""; + if (exists $pagesources{lc($page)}) { + $content=readfile("$srcdir/$pagesources{lc($page)}"); + $content=~s/\n/\r\n/g; + } + $q->param("do", "save"); + print $q->header, + $q->start_html("Editing $page"), + $q->h1("$wikiname/ Editing $page"), + $q->end_hi, + $q->start_form(-action => $action), + $q->hidden('do'), + $q->hidden('page'), + $q->textarea(-name => 'content', + -default => $content, + -rows => 20, + -columns => 80), + $q->br, + "Optional comment about this change:", + $q->br, + $q->textfield(-name => "comments", -size => 80), + $q->br, + $q->submit("Save Page"), + $q->end_form, + $q->end_html; + } + elsif ($do eq 'save') { + my $file=$page.$default_pagetype; + my $newfile=1; + if (exists $pagesources{lc($page)}) { + $file=$pagesources{lc($page)}; + $newfile=0; + } + + my $content=$q->param('content'); + $content=~s/\r\n/\n/g; + $content=~s/\r/\n/g; + writefile("$srcdir/$file", $content); + + my $message="web commit from $ENV{REMOTE_ADDR}"; + if (defined $q->param('comments')) { + $message.=": ".$q->param('comments'); + } + + if ($svn) { + if ($newfile) { + rcs_add($file); + } + # presumably the commit will trigger an update + # of the wiki + rcs_commit($message); + } + else { + refresh(); + } + + print $q->redirect("$url/".htmlpage($page)); + } + else { + error("unknown do parameter"); + } +} + +my $rebuild=0; +my $wrapper=0; +if (grep /^-/, @ARGV) { + eval {use Getopt::Long}; + GetOptions( + "wikiname=s" => \$wikiname, + "verbose|v" => \$verbose, + "rebuild" => \$rebuild, + "wrapper" => \$wrapper, + "svn!" => \$svn, + "cgi" => \$cgi, + "url=s" => \$url, + "cgiurl=s" => \$cgiurl, + ) || usage(); +} +usage() unless @ARGV == 2; +($srcdir) = possibly_foolish_untaint(shift); +($destdir) = possibly_foolish_untaint(shift); + +if ($cgi && ! length $url) { + error("Must specify url to wiki with --url when using --cgi"); +} + +gen_wrapper($svn, $rebuild) if $wrapper; +memoize('pagename'); +memoize('bestlink'); +loadindex() unless $rebuild; +if ($cgi) { + cgi(); +} +else { + rcs_update() if $svn; + refresh(); + saveindex(); +}