meta: Pass info to htmlscrubber so htmlscrubber_skip can take effect.
authorJoey Hess <joey@kodama.kitenet.net>
Fri, 12 Dec 2008 19:06:45 +0000 (14:06 -0500)
committerJoey Hess <joey@kodama.kitenet.net>
Fri, 12 Dec 2008 19:06:45 +0000 (14:06 -0500)
IkiWiki/Plugin/meta.pm
debian/changelog
doc/bugs/Meta_plugin_does_not_respect_htmlscrubber__95__skip_setting.___40__patch__41__.mdwn

index 8d444109f68dc304d6c055df1191b3efbc2b9b60..3991797c0fe6f7be305e9b3e799f5aaa7bdc7ae2 100644 (file)
@@ -38,9 +38,10 @@ sub needsbuild (@) { #{{{
        }
 }
 
-sub scrub ($) { #{{{
+sub scrub ($$) { #{{{
        if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) {
-               return IkiWiki::Plugin::htmlscrubber::sanitize(content => shift);
+               return IkiWiki::Plugin::htmlscrubber::sanitize(
+                       content => shift, destpage => shift);
        }
        else {
                return shift;
@@ -137,7 +138,7 @@ sub preprocess (@) { #{{{
        elsif ($key eq 'permalink') {
                if (safeurl($value)) {
                        $pagestate{$page}{meta}{permalink}=$value;
-                       push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />');
+                       push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />', $destpage);
                }
        }
        elsif ($key eq 'stylesheet') {
@@ -206,7 +207,7 @@ sub preprocess (@) { #{{{
                my $delay=int(exists $params{delay} ? $params{delay} : 0);
                my $redir="<meta http-equiv=\"refresh\" content=\"$delay; URL=$value\" />";
                if (! $safe) {
-                       $redir=scrub($redir);
+                       $redir=scrub($redir, $destpage);
                }
                push @{$metaheaders{$page}}, $redir;
        }
@@ -216,7 +217,7 @@ sub preprocess (@) { #{{{
                                join(" ", map {
                                        encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\""
                                } keys %params).
-                               " />\n");
+                               " />\n", $destpage);
                }
        }
        elsif ($key eq 'robots') {
@@ -225,7 +226,7 @@ sub preprocess (@) { #{{{
        }
        else {
                push @{$metaheaders{$page}}, scrub('<meta name="'.encode_entities($key).
-                       '" content="'.encode_entities($value).'" />');
+                       '" content="'.encode_entities($value).'" />', $destpage);
        }
 
        return "";
index 1ff78d7493af49d7d9d8580adb37ac92f94cfe5f..bf14860dd1251e99ef0e1f865e3a2f01c09814d7 100644 (file)
@@ -13,6 +13,7 @@ ikiwiki (2.71) UNRELEASED; urgency=low
   * inline: Support emptyfeeds=no option to skip generating empty feeds.
   * inline: Support feedfile option to change the filename of the feed
     generated.
+  * meta: Pass info to htmlscrubber so htmlscrubber_skip can take effect.
 
  -- Joey Hess <joeyh@debian.org>  Mon, 17 Nov 2008 14:02:10 -0500
 
index 9d67d6662a23bc281f283ffb57f8035f7087f18f..0e40da551df68435eea96978ef21de0df1aff075 100644 (file)
@@ -8,55 +8,4 @@ Setting htmlscrubber_skip to the pagespec should stop this getting scrubbed but
 
 Below is a patch to fix that. It seams to work but I am not sure of it is the correct thing to do.
 
---- meta.pm    2008-12-11 17:50:33.000000000 +0000
-+++ meta.pm.orig       2008-12-10 17:41:23.000000000 +0000
-@@ -38,9 +38,10 @@
-       }
- }
--sub scrub (@) { #{{{
-+sub scrub ($) { #{{{
-       if (IkiWiki::Plugin::htmlscrubber->can("sanitize")) {
--              return IkiWiki::Plugin::htmlscrubber::sanitize(content => shift, destpage => shift);
-+              #return IkiWiki::Plugin::htmlscrubber::sanitize(content => shift);
-+              return shift;
-       }
-       else {
-               return shift;
-@@ -137,7 +138,7 @@
-       elsif ($key eq 'permalink') {
-               if (safeurl($value)) {
-                       $pagestate{$page}{meta}{permalink}=$value;
--                      push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />', $page);
-+                      push @{$metaheaders{$page}}, scrub('<link rel="bookmark" href="'.encode_entities($value).'" />');
-               }
-       }
-       elsif ($key eq 'stylesheet') {
-@@ -206,7 +207,7 @@
-               my $delay=int(exists $params{delay} ? $params{delay} : 0);
-               my $redir="<meta http-equiv=\"refresh\" content=\"$delay; URL=$value\" />";
-               if (! $safe) {
--                      $redir=scrub($redir, $page);
-+                      $redir=scrub($redir);
-               }
-               push @{$metaheaders{$page}}, $redir;
-       }
-@@ -216,7 +217,7 @@
-                               join(" ", map {
-                                       encode_entities($_)."=\"".encode_entities(decode_entities($params{$_}))."\""
-                               } keys %params).
--                              " />\n", $page);
-+                              " />\n");
-               }
-       }
-       elsif ($key eq 'robots') {
-@@ -225,7 +226,7 @@
-       }
-       else {
-               push @{$metaheaders{$page}}, scrub('<meta name="'.encode_entities($key).
--                      '" content="'.encode_entities($value).'" />', $page);
-+                      '" content="'.encode_entities($value).'" />');
-       }
-       return "";
-
+> [[done]], thanks for the patch --[[Joey]]