X-Git-Url: http://git.tremily.us/?p=ikiwiki.git;a=blobdiff_plain;f=ikiwiki;h=4057564833f9e6479bc34d8dc13cd510126c08b3;hp=8ba3249616a556e652f57a4ef86607926f4609af;hb=dba92e88fb00f29dc5d158358ad3d1613ed3c0cc;hpb=942d5896cdf8467de52a4db8f4c959e6bb4a602b diff --git a/ikiwiki b/ikiwiki index 8ba324961..405756483 100755 --- a/ikiwiki +++ b/ikiwiki @@ -17,12 +17,14 @@ my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles, %pagesources); my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/; my $wiki_file_regexp=qr/(^[-A-Za-z0-9_.:\/+]+$)/; -my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.)!; +my $wiki_file_prune_regexp=qr!((^|/).svn/|\.\.|^\.|\/\.|\.html?$)!; my $verbose=0; my $wikiname="wiki"; my $default_pagetype=".mdwn"; my $cgi=0; my $url=""; +my $cgiurl=""; +my $svn=1; sub usage { die "usage: ikiwiki [options] source dest\n"; @@ -181,7 +183,7 @@ sub htmllink ($$) { $bestlink=htmlpage($bestlink); } if (! grep { $_ eq $bestlink } values %renderedfiles) { - return "?$link" + return "?$link" } $bestlink=File::Spec->abs2rel($bestlink, dirname($page)); @@ -261,8 +263,15 @@ sub finalize ($$) { $path=~s/\.\.\/$/index.html/; $pagelink="$wikiname/ $pagelink"; + my @actions; + if (length $cgiurl) { + push @actions, "Edit"; + push @actions, "RecentChanges"; + } + $content="\n$title\n\n". "

$pagelink

\n". + "@actions\n
\n". $content. "\n\n"; @@ -322,7 +331,7 @@ sub saveindex () { close OUT; } -sub update () { +sub rcs_update () { if (-d "$srcdir/.svn") { if (system("svn", "update", "--quiet", $srcdir) != 0) { warn("svn update failed\n"); @@ -330,6 +339,84 @@ sub update () { } } +sub rcs_commit ($) { + my $message=shift; + + if (-d "$srcdir/.svn") { + if (system("svn", "commit", "--quiet", "-m", + possibly_foolish_untaint($message), $srcdir) != 0) { + warn("svn commit failed\n"); + } + } +} + +sub rcs_add ($) { + my $file=shift; + + if (-d "$srcdir/.svn") { + my $parent=dirname($file); + while (! -d "$srcdir/$parent/.svn") { + $file=$parent; + $parent=dirname($file); + } + + if (system("svn", "add", "--quiet", "$srcdir/$file") != 0) { + warn("svn add failed\n"); + } + } +} + +sub rcs_recentchanges ($) { + my $num=shift; + my @ret; + + eval {use Date::Parse}; + eval {use Time::Duration}; + + if (-d "$srcdir/.svn") { + my $info=`LANG=C svn info $srcdir`; + my ($svn_url)=$info=~/^URL: (.*)$/m; + + my $div=qr/^--------------------+$/; + my $infoline=qr/^r(\d+)\s+\|\s+([^\s]+)\s+\|\s+(\d+-\d+-\d+\s+\d+:\d+:\d+\s+[-+]?\d+).*/; + my $state='start'; + my ($rev, $user, $when, @pages, $message); + foreach (`LANG=C svn log -v '$svn_url'`) { + print STDERR "state: $state\n"; + chomp; + if ($state eq 'start' && /$div/) { + $state='header'; + } + elsif ($state eq 'header' && /$infoline/) { + $rev=$1; + $user=$2; + $when=ago(time - str2time($3)); + } + elsif ($state eq 'header' && /^\s+(.*)/) { + push @pages, $1; + } + elsif ($state eq 'header' && /^$/) { + $state='body'; + } + elsif ($state eq 'body' && /$div/) { + push @ret, {rev => $rev, user => $user, + date => $when, message => $message, + pages => [@pages]}, + last if @ret >= $num; + + $state='header'; + $message=$rev=$user=$when=undef; + @pages=(); + } + elsif ($state eq 'body') { + $message.="$_
\n"; + } + } + } + + return @ret; +} + sub prune ($) { my $file=shift; @@ -350,7 +437,7 @@ sub refresh () { if (/$wiki_file_prune_regexp/) { $File::Find::prune=1; } - elsif (! -d $_ && ! /\.html$/ && ! /\/\./) { + elsif (! -d $_) { my ($f)=/$wiki_file_regexp/; # untaint if (! defined $f) { warn("skipping bad filename $_\n"); @@ -466,7 +553,7 @@ FILE: foreach my $file (@files) { # Generates a C wrapper program for running ikiwiki in a specific way. # The wrapper may be safely made suid. sub gen_wrapper ($$) { - my ($offline, $rebuild)=@_; + my ($svn, $rebuild)=@_; eval {use Cwd 'abs_path'}; $srcdir=abs_path($srcdir); @@ -475,22 +562,29 @@ sub gen_wrapper ($$) { if (! -x $this) { error("$this doesn't seem to be executable"); } + + my @params=($srcdir, $destdir, "--wikiname=$wikiname"); + push @params, "--verbose" if $verbose; + push @params, "--rebuild" if $rebuild; + push @params, "--nosvn" if !$svn; + push @params, "--cgi" if $cgi; + push @params, "--url=$url" if $url; + push @params, "--cgiurl=$cgiurl" if $cgiurl; + my $params=join(" ", @params); + my $call=''; + foreach my $p ($this, $this, @params) { + $call.=qq{"$p", }; + } + $call.="NULL"; - my $call=qq{"$this", "$this", "$srcdir", "$destdir", "--wikiname=$wikiname"}; - $call.=', "--verbose"' if $verbose; - $call.=', "--rebuild"' if $rebuild; - $call.=', "--offline"' if $offline; - $call.=', "--cgi"' if $cgi; - $call.=', "--url='.$url.'"' if $url; - - # For CGI we need all these environment variables. - my @envsave=qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI - CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE}; + my @envsave; + push @envsave, qw{REMOTE_ADDR QUERY_STRING REQUEST_METHOD REQUEST_URI + CONTENT_TYPE CONTENT_LENGTH GATEWAY_INTERFACE} if $cgi; my $envsave=""; foreach my $var (@envsave) { $envsave.=<<"EOF" - if ((s=getenv("$var"))) - asprintf(&newenviron[i++], "%s=%s", "$var", s); + if ((s=getenv("$var"))) + asprintf(&newenviron[i++], "%s=%s", "$var", s); EOF } @@ -505,21 +599,22 @@ EOF extern char **environ; -int main (void) { +int main (int argc, char **argv) { /* Sanitize environment. */ - if ($cgi) { - char *s; - char *newenviron[$#envsave+2]; - int i=0; - $envsave; - newenviron[i]=NULL; - environ=newenviron; - } - else { - clearenv(); + char *s; + char *newenviron[$#envsave+3]; + int i=0; +$envsave + newenviron[i++]="HOME=$ENV{HOME}"; + newenviron[i]=NULL; + environ=newenviron; + + if (argc == 2 && strcmp(argv[1], "--params") == 0) { + printf("$params\\n"); + exit(0); } - - execl($call, NULL); + + execl($call); perror("failed to run $this"); exit(1); } @@ -542,16 +637,86 @@ sub cgi () { error("\"do\" parameter missing"); } - my ($page)=$q->param('page')=~/$wiki_file_regexp/; # untaint + if ($do eq 'recentchanges') { + my $list="\n"; + + print $q->header, + $q->start_html("RecentChanges"), + $q->h1("$wikiname/ RecentChanges"), + $list, + $q->end_form, + $q->end_html; + return; + } + + my ($page)=$q->param('page')=~/$wiki_file_regexp/; if (! defined $page || ! length $page || $page ne $q->param('page') || $page=~/$wiki_file_prune_regexp/ || $page=~/^\//) { error("bad page name"); } + $page=lc($page); my $action=$q->request_uri; $action=~s/\?.*//; - if ($do eq 'edit') { + if ($do eq 'create') { + if (exists $pagesources{lc($page)}) { + # hmm, someone else made the page in the meantime? + print $q->redirect("$url/".htmlpage($page)); + } + + my @page_locs; + my ($from)=$q->param('from')=~/$wiki_file_regexp/; + if (! defined $from || ! length $from || + $from ne $q->param('from') || + $from=~/$wiki_file_prune_regexp/ || $from=~/^\//) { + @page_locs=$page; + } + else { + my $dir=$from."/"; + $dir=~s![^/]+/$!!; + push @page_locs, $dir.$page; + push @page_locs, "$from/$page"; + while (length $dir) { + $dir=~s![^/]+/$!!; + push @page_locs, $dir.$page; + } + } + + $q->param("do", "save"); + print $q->header, + $q->start_html("Creating $page"), + $q->start_h1("$wikiname/ Creating $page"), + $q->start_form(-action => $action), + $q->hidden('do'), + "Select page location:", + $q->popup_menu('page', \@page_locs), + $q->textarea(-name => 'content', + -default => "", + -rows => 20, + -columns => 80), + $q->br, + "Optional comment about this change:", + $q->br, + $q->textfield(-name => "comments", -size => 80), + $q->br, + $q->submit("Save Page"), + $q->end_form, + $q->end_html; + } + elsif ($do eq 'edit') { my $content=""; if (exists $pagesources{lc($page)}) { $content=readfile("$srcdir/$pagesources{lc($page)}"); @@ -559,8 +724,8 @@ sub cgi () { } $q->param("do", "save"); print $q->header, - $q->start_html("$wikiname: Editing $page"), - $q->h1("$wikiname: Editing $page"), + $q->start_html("Editing $page"), + $q->h1("$wikiname/ Editing $page"), $q->start_form(-action => $action), $q->hidden('do'), $q->hidden('page'), @@ -568,19 +733,21 @@ sub cgi () { -default => $content, -rows => 20, -columns => 80), - $q->p, - $q->submit("Save Changes"), - # TODO: Cancel button returns to page. - # TODO: Preview button. - # TODO: Commit message field. - # TODO: Conflict prevention. + $q->br, + "Optional comment about this change:", + $q->br, + $q->textfield(-name => "comments", -size => 80), + $q->br, + $q->submit("Save Page"), $q->end_form, $q->end_html; } elsif ($do eq 'save') { my $file=$page.$default_pagetype; + my $newfile=1; if (exists $pagesources{lc($page)}) { $file=$pagesources{lc($page)}; + $newfile=0; } my $content=$q->param('content'); @@ -588,6 +755,23 @@ sub cgi () { $content=~s/\r/\n/g; writefile("$srcdir/$file", $content); + my $message="web commit from $ENV{REMOTE_ADDR}"; + if (defined $q->param('comments')) { + $message.=": ".$q->param('comments'); + } + + if ($svn) { + if ($newfile) { + rcs_add($file); + } + # presumably the commit will trigger an update + # of the wiki + rcs_commit($message); + } + else { + refresh(); + } + print $q->redirect("$url/".htmlpage($page)); } else { @@ -596,7 +780,6 @@ sub cgi () { } my $rebuild=0; -my $offline=0; my $wrapper=0; if (grep /^-/, @ARGV) { eval {use Getopt::Long}; @@ -605,9 +788,10 @@ if (grep /^-/, @ARGV) { "verbose|v" => \$verbose, "rebuild" => \$rebuild, "wrapper" => \$wrapper, - "offline" => \$offline, + "svn!" => \$svn, "cgi" => \$cgi, "url=s" => \$url, + "cgiurl=s" => \$cgiurl, ) || usage(); } usage() unless @ARGV == 2; @@ -618,7 +802,7 @@ if ($cgi && ! length $url) { error("Must specify url to wiki with --url when using --cgi"); } -gen_wrapper($offline, $rebuild) if $wrapper; +gen_wrapper($svn, $rebuild) if $wrapper; memoize('pagename'); memoize('bestlink'); loadindex() unless $rebuild; @@ -626,7 +810,7 @@ if ($cgi) { cgi(); } else { - update() unless $offline; + rcs_update() if $svn; refresh(); saveindex(); }