* Added a pagecount plugin, enabled by default.
* Support PreProcessorDirectives with no parameters, ie "[[pagecount ]]".
* Fixed/optimised backlinks code, to avoid rebuilding pages to update
backlinks when the backlinks hadn't really changed.
* Moved inline page support, rss generation etc into the inline plugin,
enabled by default.
* Added brokenlinks plugin, not enabled by default, but rather handy.
* Fix several broken links in the doc wiki.
--- /dev/null
+#!/usr/bin/perl
+# Provides a list of broken links.
+package IkiWiki::Plugin::brokenlinks;
+
+use warnings;
+use strict;
+
+sub import { #{{{
+ IkiWiki::register_plugin("preprocess", "brokenlinks", \&preprocess);
+} # }}}
+
+sub preprocess (@) { #{{{
+ my %params=@_;
+ $params{pages}="*" unless defined $params{pages};
+
+ # Needs to update whenever a page is added or removed, so
+ # register a dependency.
+ IkiWiki::add_depends($params{page}, $params{pages});
+
+ my @broken;
+ foreach my $page (%IkiWiki::links) {
+ if (IkiWiki::globlist_match($page, $params{pages})) {
+ foreach my $link (@{$IkiWiki::links{$page}}) {
+ next if $link =~ /.*\/discussion/i;
+ my $bestlink=IkiWiki::bestlink($page, $link);
+ next if length $bestlink;
+ push @broken,
+ IkiWiki::htmllink($page, $link, 1).
+ " in ".
+ IkiWiki::htmllink($params{page}, $page, 1);
+ }
+ }
+ }
+
+ return "There are no broken links!" unless @broken;
+ return "<ul>\n".join("\n", map { "<li>$_</li>" } sort @broken)."</ul>\n";
+} # }}}
+
+1
--- /dev/null
+#!/usr/bin/perl
+# Page inlining and blogging.
+package IkiWiki::Plugin::inline;
+
+use warnings;
+use strict;
+
+sub import { #{{{
+ IkiWiki::register_plugin("preprocess", "inline", \&IkiWiki::preprocess_inline);
+} # }}}
+
+# Back to ikiwiki namespace for the rest, this code is very much
+# internal to ikiwiki even though it's separated into a plugin.
+package IkiWiki;
+
+sub preprocess_inline (@) { #{{{
+ my %params=@_;
+
+ if (! exists $params{pages}) {
+ return "";
+ }
+ if (! exists $params{archive}) {
+ $params{archive}="no";
+ }
+ if (! exists $params{show} && $params{archive} eq "no") {
+ $params{show}=10;
+ }
+ add_depends($params{page}, $params{pages});
+
+ my $ret="";
+
+ if (exists $params{rootpage}) {
+ # Add a blog post form, with a rss link button.
+ my $formtemplate=HTML::Template->new(blind_cache => 1,
+ filename => "$config{templatedir}/blogpost.tmpl");
+ $formtemplate->param(cgiurl => $config{cgiurl});
+ $formtemplate->param(rootpage => $params{rootpage});
+ if ($config{rss}) {
+ $formtemplate->param(rssurl => rsspage(basename($params{page})));
+ }
+ $ret.=$formtemplate->output;
+ }
+ elsif ($config{rss}) {
+ # Add a rss link button.
+ my $linktemplate=HTML::Template->new(blind_cache => 1,
+ filename => "$config{templatedir}/rsslink.tmpl");
+ $linktemplate->param(rssurl => rsspage(basename($params{page})));
+ $ret.=$linktemplate->output;
+ }
+
+ my $template=HTML::Template->new(blind_cache => 1,
+ filename => (($params{archive} eq "no")
+ ? "$config{templatedir}/inlinepage.tmpl"
+ : "$config{templatedir}/inlinepagetitle.tmpl"));
+
+ my @pages;
+ foreach my $page (blog_list($params{pages}, $params{show})) {
+ next if $page eq $params{page};
+ push @pages, $page;
+ $template->param(pagelink => htmllink($params{page}, $page));
+ $template->param(content => get_inline_content($params{page}, $page))
+ if $params{archive} eq "no";
+ $template->param(ctime => scalar(gmtime($pagectime{$page})));
+ $ret.=$template->output;
+ }
+
+ # TODO: should really add this to renderedfiles and call
+ # check_overwrite, but currently renderedfiles
+ # only supports listing one file per page.
+ if ($config{rss}) {
+ writefile(rsspage($params{page}), $config{destdir},
+ genrss($params{page}, @pages));
+ }
+
+ return $ret;
+} #}}}
+
+sub blog_list ($$) { #{{{
+ my $globlist=shift;
+ my $maxitems=shift;
+
+ my @list;
+ foreach my $page (keys %pagesources) {
+ if (globlist_match($page, $globlist)) {
+ push @list, $page;
+ }
+ }
+
+ @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
+ return @list if ! $maxitems || @list <= $maxitems;
+ return @list[0..$maxitems - 1];
+} #}}}
+
+sub get_inline_content ($$) { #{{{
+ my $parentpage=shift;
+ my $page=shift;
+
+ my $file=$pagesources{$page};
+ my $type=pagetype($file);
+ if ($type ne 'unknown') {
+ return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
+ }
+ else {
+ return "";
+ }
+} #}}}
+
+sub date_822 ($) { #{{{
+ my $time=shift;
+
+ eval q{use POSIX};
+ return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
+} #}}}
+
+sub absolute_urls ($$) { #{{{
+ # sucky sub because rss sucks
+ my $content=shift;
+ my $url=shift;
+
+ $url=~s/[^\/]+$//;
+
+ $content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
+ $content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
+ return $content;
+} #}}}
+
+sub rsspage ($) { #{{{
+ my $page=shift;
+
+ return $page.".rss";
+} #}}}
+
+sub genrss ($@) { #{{{
+ my $page=shift;
+ my @pages=@_;
+
+ my $url="$config{url}/".htmlpage($page);
+
+ my $template=HTML::Template->new(blind_cache => 1,
+ filename => "$config{templatedir}/rsspage.tmpl");
+
+ my @items;
+ foreach my $p (@pages) {
+ push @items, {
+ itemtitle => pagetitle(basename($p)),
+ itemurl => "$config{url}/$renderedfiles{$p}",
+ itempubdate => date_822($pagectime{$p}),
+ itemcontent => absolute_urls(get_inline_content($page, $p), $url),
+ } if exists $renderedfiles{$p};
+ }
+
+ $template->param(
+ title => $config{wikiname},
+ pageurl => $url,
+ items => \@items,
+ );
+
+ return $template->output;
+} #}}}
+
+1
--- /dev/null
+#!/usr/bin/perl
+# Provides [[pagecount ]] to count the number of pages.
+package IkiWiki::Plugin::pagecount;
+
+use warnings;
+use strict;
+
+sub import { #{{{
+ IkiWiki::register_plugin("preprocess", "pagecount", \&preprocess);
+} # }}}
+
+sub preprocess (@) { #{{{
+ my %params=@_;
+ $params{pages}="*" unless defined $params{pages};
+
+ # Needs to update count whenever a page is added or removed, so
+ # register a dependency.
+ IkiWiki::add_depends($params{page}, $params{pages});
+
+ my @pages=keys %IkiWiki::pagesources;
+ return $#pages+1 if $params{pages} eq "*"; # optimisation
+ my $count=0;
+ foreach my $page (@pages) {
+ $count++ if IkiWiki::globlist_match($page, $params{pages});
+ }
+ return $count;
+} # }}}
+
+1
--- /dev/null
+#!/usr/bin/perl
+# Ikiwiki skeleton plugin. Replace "skeleton" with the name of your plugin
+# in the lines below, and flesh out the methods to make it do something.
+package IkiWiki::Plugin::skeleton;
+
+use warnings;
+use strict;
+
+sub import { #{{{
+ IkiWiki::register_plugin("preprocess", "skeleton", \&preprocess);
+} # }}}
+
+sub preprocess (@) { #{{{
+ my %params=@_;
+
+ return "skeleton plugin result";
+} # }}}
+
+1
return @ret;
} #}}}
-sub rsspage ($) { #{{{
- my $page=shift;
-
- return $page.".rss";
-} #}}}
-
sub preprocess ($$) { #{{{
my $page=shift;
my $content=shift;
- my %commands=(inline => \&preprocess_inline);
-
my $handle=sub {
my $escape=shift;
my $command=shift;
if (length $escape) {
return "[[$command $params]]";
}
- elsif (exists $commands{$command}) {
+ elsif (exists $plugins{preprocess}{$command}) {
my %params;
while ($params =~ /(\w+)=\"([^"]+)"(\s+|$)/g) {
$params{$1}=$2;
}
- return $commands{$command}->(page => $page, %params);
+ return $plugins{preprocess}{$command}->(page => $page, %params);
}
else {
return "[[bad directive $command]]";
return $content;
} #}}}
-sub blog_list ($$) { #{{{
- my $globlist=shift;
- my $maxitems=shift;
-
- my @list;
- foreach my $page (keys %pagesources) {
- if (globlist_match($page, $globlist)) {
- push @list, $page;
- }
- }
-
- @list=sort { $pagectime{$b} <=> $pagectime{$a} } @list;
- return @list if ! $maxitems || @list <= $maxitems;
- return @list[0..$maxitems - 1];
-} #}}}
-
-sub get_inline_content ($$) { #{{{
- my $parentpage=shift;
+sub add_depends ($$) { #{{{
my $page=shift;
+ my $globlist=shift;
- my $file=$pagesources{$page};
- my $type=pagetype($file);
- if ($type ne 'unknown') {
- return htmlize($type, linkify(readfile(srcfile($file)), $parentpage));
- }
- else {
- return "";
- }
-} #}}}
-
-sub preprocess_inline ($@) { #{{{
- my %params=@_;
-
- if (! exists $params{pages}) {
- return "";
- }
- if (! exists $params{archive}) {
- $params{archive}="no";
- }
- if (! exists $params{show} && $params{archive} eq "no") {
- $params{show}=10;
- }
- if (! exists $depends{$params{page}}) {
- $depends{$params{page}}=$params{pages};
+ if (! exists $depends{$page}) {
+ $depends{$page}=$globlist;
}
else {
- $depends{$params{page}}.=" ".$params{pages};
- }
-
- my $ret="";
-
- if (exists $params{rootpage}) {
- # Add a blog post form, with a rss link button.
- my $formtemplate=HTML::Template->new(blind_cache => 1,
- filename => "$config{templatedir}/blogpost.tmpl");
- $formtemplate->param(cgiurl => $config{cgiurl});
- $formtemplate->param(rootpage => $params{rootpage});
- if ($config{rss}) {
- $formtemplate->param(rssurl => rsspage(basename($params{page})));
- }
- $ret.=$formtemplate->output;
- }
- elsif ($config{rss}) {
- # Add a rss link button.
- my $linktemplate=HTML::Template->new(blind_cache => 1,
- filename => "$config{templatedir}/rsslink.tmpl");
- $linktemplate->param(rssurl => rsspage(basename($params{page})));
- $ret.=$linktemplate->output;
- }
-
- my $template=HTML::Template->new(blind_cache => 1,
- filename => (($params{archive} eq "no")
- ? "$config{templatedir}/inlinepage.tmpl"
- : "$config{templatedir}/inlinepagetitle.tmpl"));
-
- my @pages;
- foreach my $page (blog_list($params{pages}, $params{show})) {
- next if $page eq $params{page};
- push @pages, $page;
- $template->param(pagelink => htmllink($params{page}, $page));
- $template->param(content => get_inline_content($params{page}, $page))
- if $params{archive} eq "no";
- $template->param(ctime => scalar(gmtime($pagectime{$page})));
- $ret.=$template->output;
- }
-
- # TODO: should really add this to renderedfiles and call
- # check_overwrite, but currently renderedfiles
- # only supports listing one file per page.
- if ($config{rss}) {
- writefile(rsspage($params{page}), $config{destdir},
- genrss($params{page}, @pages));
+ $depends{$page}.=" ".$globlist;
}
-
- return $ret;
-} #}}}
+} # }}}
sub genpage ($$$) { #{{{
my $content=shift;
return $template->output;
} #}}}
-sub date_822 ($) { #{{{
- my $time=shift;
-
- eval q{use POSIX};
- return POSIX::strftime("%a, %d %b %Y %H:%M:%S %z", localtime($time));
-} #}}}
-
-sub absolute_urls ($$) { #{{{
- # sucky sub because rss sucks
- my $content=shift;
- my $url=shift;
-
- $url=~s/[^\/]+$//;
-
- $content=~s/<a\s+href="(?!http:\/\/)([^"]+)"/<a href="$url$1"/ig;
- $content=~s/<img\s+src="(?!http:\/\/)([^"]+)"/<img src="$url$1"/ig;
- return $content;
-} #}}}
-
-sub genrss ($@) { #{{{
- my $page=shift;
- my @pages=@_;
-
- my $url="$config{url}/".htmlpage($page);
-
- my $template=HTML::Template->new(blind_cache => 1,
- filename => "$config{templatedir}/rsspage.tmpl");
-
- my @items;
- foreach my $p (@pages) {
- push @items, {
- itemtitle => pagetitle(basename($p)),
- itemurl => "$config{url}/$renderedfiles{$p}",
- itempubdate => date_822($pagectime{$p}),
- itemcontent => absolute_urls(get_inline_content($page, $p), $url),
- } if exists $renderedfiles{$p};
- }
-
- $template->param(
- title => $config{wikiname},
- pageurl => $url,
- items => \@items,
- );
-
- return $template->output;
-} #}}}
-
sub check_overwrite ($$) { #{{{
# Important security check. Make sure to call this before saving
# any files to the source directory.
else {
my $content=readfile($srcfile, 1);
$links{$file}=[];
+ delete $depends{$file};
check_overwrite("$config{destdir}/$file", $file);
writefile($file, $config{destdir}, $content, 1);
$oldpagemtime{$file}=time;
my $p=pagename($f);
if (exists $depends{$p}) {
foreach my $file (keys %rendered, @del) {
+ next if $f eq $file;
my $page=pagename($file);
if (globlist_match($page, $depends{$p})) {
debug("rendering $f, which depends on $page");
if (exists $links{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$links{$page}}) {
if (length $link &&
- ! exists $oldlinks{$page} ||
- ! grep { $_ eq $link } @{$oldlinks{$page}}) {
+ (! exists $oldlinks{$page} ||
+ ! grep { bestlink($page, $_) eq $link } @{$oldlinks{$page}})) {
$linkchanged{$link}=1;
}
}
if (exists $oldlinks{$page}) {
foreach my $link (map { bestlink($page, $_) } @{$oldlinks{$page}}) {
if (length $link &&
- ! exists $links{$page} ||
- ! grep { $_ eq $link } @{$links{$page}}) {
+ (! exists $links{$page} ||
+ ! grep { bestlink($page, $_) eq $link } @{$links{$page}})) {
$linkchanged{$link}=1;
}
}
extra_build:
./ikiwiki doc html --templatedir=templates --underlaydir=basewiki \
- --wikiname="ikiwiki" --verbose --nosvn --exclude=/discussion
+ --wikiname="ikiwiki" --verbose --nosvn \
+ --exclude=/discussion --plugin=brokenlinks \
+ --plugin=pagecount
./mdwn2man doc/usage.mdwn > ikiwiki.man
extra_clean:
This gets expanded before the rest of the page is processed, and can be used
to transform the page in various ways.
-Currently, these preprocessor directives are available:
+Note that if a preprocessor directive has no parameters, a space still must
+be put after its name, to avoid confusion with a [[WikiLink]]. For example:
-* "inline" to make a [[blog]]
+\\[[pagecount ]]
* Which means that more than one blog is now supported to appear on a
single page. (With some limitations, like only the last one getting an
rss file.)
+ * Added plugin system, currently only supporting for PreProcessorDirectives.
+ * Added a pagecount plugin, enabled by default.
+ * Support PreProcessorDirectives with no parameters, ie "[[pagecount ]]".
+ * Fixed/optimised backlinks code, to avoid rebuilding pages to update
+ backlinks when the backlinks hadn't really changed.
+ * Moved inline page support, rss generation etc into the inline plugin,
+ enabled by default.
+ * Added brokenlinks plugin, not enabled by default, but rather handy.
+ * Fix several broken links in the doc wiki.
- -- Joey Hess <joeyh@debian.org> Mon, 1 May 2006 18:21:16 -0400
+ -- Joey Hess <joeyh@debian.org> Mon, 1 May 2006 21:01:12 -0400
ikiwiki (1.0) unstable; urgency=low
adding/removing a page. For example, if Foo/Bar links to "Baz", which is
Foo/Baz, and Foo/Bar/Baz gets added, it will update the links in Foo/Bar
to point to it, but will forget to update the linkbacks in Foo/Baz.
- And if Foo/Bar/Baz is then removed, it forgets to update Foo/Bar to link
+
+* And if Foo/Bar/Baz is then removed, it forgets to update Foo/Bar to link
back to Foo/Baz.
- -- is this still true?
+ -- is this still true? (Yes (as of 1.0))
* If I try to do a web commit, to a svn+ssh repo, it fails with
"Host key verification failed."
line if --cgi is set, even if it's not yet running as a cgi
* if a page containing an rss feed happens to show up in an rss feed,
the preprocessor directives won't be expanded (good) but are left in
- raw rather than removed (bad)
+ raw rather than removed (bad).
* [[BackLinks]]
- Automatically included on pages. Rather faster than eg [[MoinMoin]] and always there to help with navigation.
+ Automatically included on pages. Rather faster than eg MoinMoin and
+ always there to help with navigation.
* [[PageHistory]]
ikiwiki can be configured to send you commit mails with diffs of changes
to selected pages.
+* [[Plugins]]
+
+ A plugin system allows extending ikiwiki in arbitrary ways.
+
----
It also has some [[TODO]] items and [[Bugs]].
#hyperestraier => 1,
# Sanitize html?
sanitize => 1,
+ # To change the enabled plugins, edit this list
+ #plugins => [qw{pagecount inline brokenlinks}],
}
--- /dev/null
+Plugins can be used to add additional features to ikiwiki. It's possible to
+[[write]] your own plugins, or you can install and use plugins contributed by
+others.
+
+The ikiiki package includes some standard plugins that are installed and
+by default. These include [[inline]], [[pagecount]], and [[brokenlinks]].
+Of these, [[inline]] is enabled by default.
+
+## Third party plugins
+
+Plugins are perl modules and should be installed somewhere in the perl
+module search path. See the @INC list at the end of the output of `perl -V`
+for a list of the directories in that path. All plugins are in the
+IkiWiki::Plugin namespace, so they go in a IkiWiki/Plugin subdirectory
+inside the perl search path. For example, if your perl looks in
+`/usr/local/lib/site_perl` for modules, you can locally install ikiwiki
+plugins to `/usr/local/lib/site_perl/IkiWiki/Plugin`
+
+Once a plugin is installed, you need to configure ikiwiki to load it using
+the `--plugin` switch described in [[usage]], or the equivilant line in
+[[ikiwiki.setup]].
+
+## Plugin directory
+
+Add your contributed plugins using this form:
+
+[[inline pages="plugins/* !plugins/write !*/Discussion" archive="yes" rootpage="plugins/contrib" show="30"]]
--- /dev/null
+This plugin generates a list of broken links on pages in the wiki. This is
+a useful way to find pages that still need to be written, or links that
+are written wrong.
+
+The optional parameter "pages" can be a [[GlobList]] specifying the pages
+to search for broken links, default is search them all.
+
+This plugin is included in ikiwiki, but is not enabled by default.
+If it is turned on, here's a list of broken links on this wiki:
+
+[[brokenlinks ]]
--- /dev/null
+Allows including one wiki page inside another, generating blogs and RSS
+feeds. See [[blog]] for details.
+
+This plugin is enabled by default.
--- /dev/null
+Provides a \\[[pagecount ]] [[PreProcessorDirective]] that is replaced with
+the total number of pages currently in the wiki.
+
+The optional parameter "pages" can be a [[GlobList]] specifying the pages
+to count, default is to count them all.
+
+This plugin is included in ikiwiki, but is not enabled by default.
+
+If it is turned on it can tell us that this wiki includes
+[[pagecount ]] pages, of which [[pagecount pages="*/Discussion"]] are
+discussion pages.
--- /dev/null
+ikiwiki [[plugins]] are written in perl. Each plugin is a perl module, in
+the `IkiWiki::Plugin` namespace. The name of the plugin is typically in
+lowercase, such as `IkiWiki::Plugin::inline`. Ikiwiki includes a
+`IkiWiki::Plugin::skeleton` that can be fleshed out to make a useful
+plugin. `IkiWiki::Plugin::pagecount` is another simple example.
+
+## Note
+
+One thing to keep in mind when writing a plugin is that ikiwiki is a wiki
+*compiler*. So plugins influence pages when they are built, not when they
+are loaded. A plugin that inserts the current time into a page, for
+example, will insert the build time. Also, as a compiler, ikiwiki avoids
+rebuilding pages unless they have changed, so a plugin that prints some
+random or changing thing on a page will generate a static page that won't
+change until ikiwiki rebuilds the page for some other reason, like the page
+being edited.
+
+## Registering plugins
+
+Plugins should, when imported, call IkiWiki::register_plugin to hook into
+ikiwiki. The function takes four parameters:
+
+1. A method type. Use "preprocess" to register a [[PreProcessorDirective]]
+2. A command name. This is the bit that will appear inside brackets in a
+ page.
+3. A reference to a subroutine that is run when the plugin is used.
+
+## Writing a [[PreProcessorDirective]]
+
+For preprocessor commands, the subroutine is passed named parameters. A
+"page" parameter gives the name of the page that embedded the preprocessor
+command. All parameters included in the preprocessor command are included
+as named parameters as well. Whatever the subroutine returns goes onto the
+page in place of the command.
+
+## Error handing in plugins
+
+While a plugin can call ikiwiki's error routine for a fatal error, for
+errors that aren't intended to halt the entire wiki build, including bad
+parameters passed to a [[PreProcessorDirective]], etc, it's better to just
+return the error message as the output of the plugin.
+
+## Html issues
+
+Note that if [[HTMLSanitization]] is enabled, html in
+[[PreProcessorDirective]] output is sanitised, which may limit what your
+plugin can do. Also, the rest of the page content is not in html format at
+preprocessor time.
+
+## Wiki configuration
+
+A plugin can access the wiki's configuration via the `%IkiWiki::config` hash.
+The best way to understand the contents of the hash is to look at
+[[ikiwiki.setup]], which sets the hash content to configure the wiki.
+
+## Wiki data
+
+If your plugin needs to access data about other pages in the wiki. It can
+use the following hashes, using a page name as the key:
+
+* `%IkiWiki::links` lists the names of each page
+ that is linked to from that page in an array reference.
+* `%IkiWiki::pagemtime` contains the last modification time of each page
+* `%IkiWiki::pagectime` contains the creation time of each page`
+* `%IkiWiki::renderedfiles` contains the name of the file rendered by a
+ page
+* `%IkiWiki::pagesources` contains the name of the source file for a page.
+* `%IkiWiki::depends` contains a [[GlobList]] that is used to specify other
+ pages that a page depends on. If one of its dependencies is updated, the
+ page will also get rebuilt.
+
+ Many plugins will need to add dependencies to this hash; the best way to do
+ it is by using the IkiWiki::add_depends function, which takes as its
+ parameters the page name and a [[GlobList]] of dependencies to add.
A post-commit hook is run every time you commit a change to your subversion repository. To make the wiki be updated each time a commit is made, it can be run from (or as) a post-commit hook.
The best way to run ikiwiki in a [[Subversion]] post-commit hook is using
-a [[wrapper]], which can be generated using `ikiwiki --wrapper`.
+a wrapper, which can be generated using `ikiwiki --wrapper`.
First, set up the subversion checkout that ikiwiki will update and compile
into your wiki at each subversion commit. Run ikiwiki a few times by hand
## page locking can be bypassed via direct svn commits
-A [[lock]]ed page can only be edited on the web by an admin, but
+A locked page can only be edited on the web by an admin, but
anyone who is allowed to commit direct to svn can bypass this. This is by
design, although a subversion pre-commit hook could be used to prevent
editing of locked pages when using subversion, if you really need to.
# Full list of open items:
[[inline pages="todo/* !todo/done* !*/Discussion" archive="yes"]]
-
-----
-
-Test:
-
-[[inline pages="news/* !*/Discussion" rootpage="news" show="30"]]
* list of all missing pages
- Could be a [[plugin]].
+ done
* list of registered users, with the names being links to any userpages.
- Could be implemented with a [[preprocessordirective]], which suggests that there needs to be some sort of plugin interface for new preprocessordirectives. Although, how to let the wiki know that the page needs an update whever a new user is added?
\ No newline at end of file
+ Might be a plugin, but how to let the wiki know that the page
+ needs an update whever a new user is added?
-For one type of plugin, see [[todo/PluggableRenderers]].
-
A plugin system should ideally support things like:
* [[todo/lists]] of pages, of mising pages / broken links, of registered users, etc
* would it be useful to reimplement the hyperestradier search integration as a plugin?
* Support [[RecentChanges]] as a regular page containing a plugin that updates each time there is a change, and statically builds the recent changes list. (Would this be too expensive/inflexible? There might be other ways to do it as a plugin, like making all links to RecentChanges link to the cgi and have the cgi render it on demand.)
* etc
+* For another type of plugin, see [[todo/PluggableRenderers]].
-Another, separate plugin system that already (mostly) exists in ikiwiki is the RCS backend, which allows writing modules to drive other RCS systems than subversion.
+Another, separate plugin system that already (mostly) exists in ikiwiki is
+the RCS backend, which allows writing modules to drive other RCS systems
+than subversion.
## preprocessor plugins
-Considering ikiwiki plugins, one idea I have is to make the [[PreProcessorDirective]]s be a plugin. A setting in the config file would enable various plusins, which are perl modules, that each provide one or more preprocessor directives.
-
-Since preprocessing happens before htmlization but after a page is loaded and linkified, it should be possible to use it to create something like a link map or lists, or a page index. Page inlining and rss generation is already done via preprocessor directives and seems a natureal as a plugin too.
-
-Note that things like a link map or a broken link list page would need to
-be updated whenever a set (or all) pages change; the %depends hash
-already allows for pages to register this, although there could be some
-strange behavior if mixing multiple directives some of which exclude pages
-that others might want to include.
-
-I need to look at the full range of things that other wikis use their plugin systems for, but preprocessor directives as plugins certianly seems useful, even if it's not a complete solution.
+done
## case study: Moin Moin plugins
* --wrapper [file]
- Generate a [[wrapper]] binary that is hardcoded to do action specified by
+ Generate a wrapper binary that is hardcoded to do action specified by
the other options, using the specified input files and `destination`
directory. The filename to use for the wrapper is optional.
The name of the wiki, default is "wiki".
-* --fixctime
-
- Pull last changed time for all pages out of the revision control system.
- This rarely used option provides a way to get the real creation times of
- items in weblogs, for example when building a wiki from a new subversion
- checkout. It is unoptimised and quite slow.
-
* --templatedir
Specify the directory that the page [[templates]] are stored in.
* --rss, --norss
If rss is set, ikiwiki will generate rss feeds for pages that inline
- a blog.
+ a [[blog]].
* --url http://url/
* --cgiurl http://url/ikiwiki.cgi
- Specifies the url to the ikiwiki [[CGI]] script [[wrapper]]. Required when building the wiki for links to the cgi script to be generated.
+ Specifies the url to the ikiwiki [[CGI]] script wrapper. Required when
+ building the wiki for links to the cgi script to be generated.
* --historyurl http://url/trunk/\[[file]]?root=wiki
Enable [[HtmlSanitization]] of wiki content. On by default, disable with
--no-sanitize.
+* --plugin name
+
+ Enables the use of the specified plugin in the wiki. See [[plugins]] for
+ details. Note that plugin names are case sensative.
+
* --hyperestraier
Enables use of the [[HyperEstraier]] search engine for full test page
* --verbose
- Be vebose about what it's doing.
+ Be vebose about what is being done.
+
+* --fixctime
+
+ Pull last changed time for all pages out of the revision control system.
+ This rarely used option provides a way to get the real creation times of
+ items in weblogs, for example when building a wiki from a new subversion
+ checkout. It is unoptimised and quite slow.
# AUTHOR
use lib '.'; # For use without installation, removed by Makefile.
use vars qw{%config %links %oldlinks %oldpagemtime %pagectime
- %renderedfiles %pagesources %depends};
+ %renderedfiles %pagesources %depends %plugins};
sub usage () { #{{{
die "usage: ikiwiki [options] source dest\n";
%config=(
wiki_file_prune_regexp => qr{((^|/).svn/|\.\.|^\.|\/\.|\.html?$|\.rss$)},
wiki_link_regexp => qr/\[\[(?:([^\s\]\|]+)\|)?([^\s\]]+)\]\]/,
- wiki_processor_regexp => qr/\[\[(\w+)\s+([^\]]+)\]\]/,
+ wiki_processor_regexp => qr/\[\[(\w+)\s+([^\]]*)\]\]/,
wiki_file_regexp => qr/(^[-[:alnum:]_.:\/+]+$)/,
verbose => 0,
wikiname => "wiki",
setup => undef,
adminuser => undef,
adminemail => undef,
+ plugin => [qw{inline}],
);
eval q{use Getopt::Long};
"wrapper:s" => sub {
$config{wrapper}=$_[1] ? $_[1] : "ikiwiki-wrap"
},
+ "plugin=s@" => sub {
+ push @{$config{plugin}}, $_[1];
+ }
) || usage();
if (! $config{setup}) {
require IkiWiki::Rcs::Stub;
$config{rcs}=0;
}
+
+ foreach my $plugin (@{$config{plugin}}) {
+ $plugin="IkiWiki::Plugin::".possibly_foolish_untaint($plugin);
+ eval qq{use $plugin};
+ if ($@) {
+ error("Failed to load plugin $plugin: $@");
+ }
+ }
} #}}}
sub error ($) { #{{{
return 0;
} #}}}
+sub register_plugin ($$$) { # {{{
+ my $type=shift;
+ my $command=shift;
+ my $function=shift;
+
+ $plugins{$type}{$command}=$function;
+} # }}}
+
sub main () { #{{{
getconfig();