use Memoize;
use File::Spec;
-$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
-
BEGIN {
$blosxom::version="is a proper perl module too much to ask?";
do "/usr/bin/markdown";
}
+$ENV{PATH}="/usr/local/bin:/usr/bin:/bin";
my ($srcdir, $destdir, %links, %oldlinks, %oldpagemtime, %renderedfiles,
%pagesources);
my $wiki_link_regexp=qr/\[\[([^\s]+)\]\]/;
my $cgi=0;
my $url="";
my $cgiurl="";
+my $historyurl="";
my $svn=1;
-sub usage {
+sub usage { #{{{
die "usage: ikiwiki [options] source dest\n";
-}
+} #}}}
-sub error ($) {
+sub error ($) { #{{{
if ($cgi) {
print "Content-type: text/html\n\n";
print "Error: @_\n";
else {
die @_;
}
-}
+} #}}}
-sub debug ($) {
+sub debug ($) { #{{{
print "@_\n" if $verbose;
-}
+} #}}}
-sub mtime ($) {
+sub mtime ($) { #{{{
my $page=shift;
return (stat($page))[9];
-}
+} #}}}
-sub possibly_foolish_untaint ($) {
+sub possibly_foolish_untaint ($) { #{{{
my $tainted=shift;
my ($untainted)=$tainted=~/(.*)/;
return $untainted;
-}
+} #}}}
-sub basename {
+sub basename ($) { #{{{
my $file=shift;
$file=~s!.*/!!;
return $file;
-}
+} #}}}
-sub dirname {
+sub dirname ($) { #{{{
my $file=shift;
$file=~s!/?[^/]+$!!;
return $file;
-}
+} #}}}
-sub pagetype ($) {
+sub pagetype ($) { #{{{
my $page=shift;
if ($page =~ /\.mdwn$/) {
else {
return "unknown";
}
-}
+} #}}}
-sub pagename ($) {
+sub pagename ($) { #{{{
my $file=shift;
my $type=pagetype($file);
my $page=$file;
$page=~s/\Q$type\E*$// unless $type eq 'unknown';
return $page;
-}
+} #}}}
-sub htmlpage ($) {
+sub htmlpage ($) { #{{{
my $page=shift;
return $page.".html";
-}
+} #}}}
-sub readfile ($) {
+sub readfile ($) { #{{{
my $file=shift;
local $/=undef;
my $ret=<IN>;
close IN;
return $ret;
-}
+} #}}}
-sub writefile ($$) {
+sub writefile ($$) { #{{{
my $file=shift;
my $content=shift;
open (OUT, ">$file") || error("failed to write $file: $!");
print OUT $content;
close OUT;
-}
+} #}}}
-sub findlinks {
+sub findlinks { #{{{
my $content=shift;
my @links;
push @links, lc($1);
}
return @links;
-}
+} #}}}
# Given a page and the text of a link on the page, determine which existing
# page that link best points to. Prefers pages under a subdirectory with
# the same name as the source page, failing that goes down the directory tree
# to the base looking for matching pages.
-sub bestlink ($$) {
+sub bestlink ($$) { #{{{
my $page=shift;
my $link=lc(shift);
#print STDERR "warning: page $page, broken link: $link\n";
return "";
-}
+} #}}}
-sub isinlinableimage ($) {
+sub isinlinableimage ($) { #{{{
my $file=shift;
$file=~/\.(png|gif|jpg|jpeg)$/;
-}
+} #}}}
-sub htmllink {
+sub htmllink { #{{{
my $page=shift;
my $link=shift;
my $noimagelink=shift;
return "<img src=\"$bestlink\">";
}
return "<a href=\"$bestlink\">$link</a>";
-}
+} #}}}
-sub linkify ($$) {
+sub linkify ($$) { #{{{
my $content=shift;
my $file=shift;
$content =~ s/$wiki_link_regexp/htmllink(pagename($file), $1)/eg;
return $content;
-}
+} #}}}
-sub htmlize ($$) {
+sub htmlize ($$) { #{{{
my $type=shift;
my $content=shift;
else {
error("htmlization of $type not supported");
}
-}
+} #}}}
-sub linkbacks ($$) {
+sub linkbacks ($$) { #{{{
my $content=shift;
my $page=shift;
$content.="<hr><p>Links: ".join(" ", sort @links)."</p>\n" if @links;
return $content;
-}
+} #}}}
-sub finalize ($$) {
+sub finalize ($$) { #{{{
my $content=shift;
my $page=shift;
push @actions, "<a href=\"$cgiurl?do=edit&page=$page\">Edit</a>";
push @actions, "<a href=\"$cgiurl?do=recentchanges\">RecentChanges</a>";
}
+ if (length $historyurl) {
+ my $url=$historyurl;
+ $url=~s/\[\[\]\]/$page/g;
+ push @actions, "<a href=\"$url\">History</a>";
+ }
$content="<html>\n<head><title>$title</title></head>\n<body>\n".
"<h1>$pagelink</h1>\n".
"</body>\n</html>\n";
return $content;
-}
+} #}}}
-sub render ($) {
+sub render ($) { #{{{
my $file=shift;
my $type=pagetype($file);
$oldpagemtime{$file}=time;
$renderedfiles{$file}=$file;
}
-}
+} #}}}
-sub loadindex () {
+sub loadindex () { #{{{
open (IN, "$srcdir/.index") || return;
while (<IN>) {
$_=possibly_foolish_untaint($_);
$renderedfiles{$page}=$rendered;
}
close IN;
-}
+} #}}}
-sub saveindex () {
+sub saveindex () { #{{{
open (OUT, ">$srcdir/.index") || error("cannot write to .index: $!");
foreach my $page (keys %oldpagemtime) {
print OUT "$oldpagemtime{$page} $pagesources{$page} $renderedfiles{$page} ".
if $oldpagemtime{$page};
}
close OUT;
-}
+} #}}}
-sub rcs_update () {
+sub rcs_update () { #{{{
if (-d "$srcdir/.svn") {
if (system("svn", "update", "--quiet", $srcdir) != 0) {
warn("svn update failed\n");
}
}
-}
+} #}}}
-sub rcs_commit ($) {
+sub rcs_commit ($) { #{{{
my $message=shift;
if (-d "$srcdir/.svn") {
warn("svn commit failed\n");
}
}
-}
+} #}}}
-sub rcs_add ($) {
+sub rcs_add ($) { #{{{
my $file=shift;
if (-d "$srcdir/.svn") {
warn("svn add failed\n");
}
}
-}
+} #}}}
-sub rcs_recentchanges ($) {
+sub rcs_recentchanges ($) { #{{{
my $num=shift;
my @ret;
}
return @ret;
-}
+} #}}}
-sub prune ($) {
+sub prune ($) { #{{{
my $file=shift;
unlink($file);
while (rmdir($dir)) {
$dir=dirname($dir);
}
-}
+} #}}}
-sub refresh () {
+sub refresh () { #{{{
# Find existing pages.
my %exists;
my @files;
}
}
}
-}
+} #}}}
# Generates a C wrapper program for running ikiwiki in a specific way.
# The wrapper may be safely made suid.
-sub gen_wrapper ($$) {
+sub gen_wrapper ($$) { #{{{
my ($svn, $rebuild)=@_;
eval q{use Cwd 'abs_path'};
push @params, "--cgi" if $cgi;
push @params, "--url=$url" if $url;
push @params, "--cgiurl=$cgiurl" if $cgiurl;
+ push @params, "--historyurl=$historyurl" if $historyurl;
my $params=join(" ", @params);
my $call='';
foreach my $p ($this, $this, @params) {
unlink("ikiwiki-wrap.c");
print "successfully generated ikiwiki-wrap\n";
exit 0;
-}
+} #}}}
-sub cgi () {
+sub cgi () { #{{{
eval q{use CGI};
my $q=CGI->new;
else {
error("unknown do parameter");
}
-}
+} #}}}
+# main {{{
my $rebuild=0;
my $wrapper=0;
if (grep /^-/, @ARGV) {
"cgi" => \$cgi,
"url=s" => \$url,
"cgiurl=s" => \$cgiurl,
+ "historyurl=s" => \$historyurl,
) || usage();
}
usage() unless @ARGV == 2;
refresh();
saveindex();
}
+#}}}