Implement PORTAGE_CHECKSUM_FILTER for bug #432170
authorZac Medico <zmedico@gentoo.org>
Tue, 21 Aug 2012 20:22:19 +0000 (13:22 -0700)
committerZac Medico <zmedico@gentoo.org>
Tue, 21 Aug 2012 20:22:19 +0000 (13:22 -0700)
man/make.conf.5
pym/_emerge/EbuildFetcher.py
pym/portage/checksum.py
pym/portage/dbapi/bintree.py
pym/portage/manifest.py
pym/portage/package/ebuild/_config/special_env_vars.py
pym/portage/package/ebuild/digestcheck.py
pym/portage/package/ebuild/fetch.py

index c617fbcfe36fb704a111abddaeb0099790496957..59f32b87caf4ed1f55e2d4821dbd22e1e11a4bfc 100644 (file)
@@ -1,4 +1,4 @@
-.TH "MAKE.CONF" "5" "Jul 2012" "Portage VERSION" "Portage"
+.TH "MAKE.CONF" "5" "Aug 2012" "Portage VERSION" "Portage"
 .SH "NAME"
 make.conf \- custom settings for Portage
 .SH "SYNOPSIS"
@@ -697,6 +697,23 @@ for bzip2 compression operations. \fBPORTAGE_BZIP2_COMMAND\fR will also be
 called for extraction operation, with -d appended, unless the
 \fBPORTAGE_BUNZIP2_COMMAND\fR variable is set.
 .TP
+\fBPORTAGE_CHECKSUM_FILTER\fR = \fI[space delimited list of hash names]\fR
+This variable may be used to filter the hash functions that are used to
+verify integrity of files. Hash functions names are case\-insensitive, and
+the \fI*\fR and \fI\-*\fR wildcard tokens are supported.
+.br
+Defaults to the value of *.
+.br
+.I Examples:
+.nf
+# Use all available hash functions
+PORTAGE_CHECKSUM_FILTER="*"
+# Use any function except whirlpool
+PORTAGE_CHECKSUM_FILTER="* \-whirlpool"
+# Only use sha256
+PORTAGE_CHECKSUM_FILTER="\-* sha256"
+.fi
+.TP
 \fBPORTAGE_COMPRESS\fR = \fI"bzip2"\fR
 This variable contains the command used to compress documentation during the
 install phase.
index c0a7fddaa635dd842067d7289509c3f200bd5612..bbcb6a9d28e98d86025710fa17120bf612aa9be3 100644 (file)
@@ -13,6 +13,7 @@ from portage import os
 from portage import _encodings
 from portage import _unicode_encode
 from portage import _unicode_decode
+from portage.checksum import _hash_filter
 from portage.elog.messages import eerror
 from portage.package.ebuild.fetch import _check_distfile, fetch
 from portage.util._pty import _create_pty_or_pipe
@@ -57,6 +58,7 @@ class EbuildFetcher(SpawnProcess):
                        if st.st_size != expected_size:
                                return False
 
+               hash_filter = _hash_filter(settings.get("PORTAGE_CHECKSUM_FILTER", ""))
                stdout_orig = sys.stdout
                stderr_orig = sys.stderr
                global_havecolor = portage.output.havecolor
@@ -78,7 +80,7 @@ class EbuildFetcher(SpawnProcess):
                                                break
                                        continue
                                ok, st = _check_distfile(os.path.join(distdir, filename),
-                                       mydigests, eout, show_errors=False)
+                                       mydigests, eout, show_errors=False, hash_filter=hash_filter)
                                if not ok:
                                        success = False
                                        break
index daf4a0cbf037c510205d7099c8a7817313dc0fdf..de4cc668a4d84ceeda0112347de3a6d557e96cd8 100644 (file)
@@ -217,6 +217,60 @@ def _filter_unaccelarated_hashes(digests):
 
        return digests
 
+class _hash_filter(object):
+       """
+       Implements filtering for PORTAGE_CHECKSUM_FILTER.
+       """
+
+       __slots__ = ('transparent', '_tokens',)
+
+       def __init__(self, filter_str):
+               tokens = filter_str.upper().split()
+               if not tokens or tokens[-1] == "*":
+                       del tokens[:]
+               self.transparent = not tokens
+               tokens.reverse()
+               self._tokens = tuple(tokens)
+
+       def __call__(self, hash_name):
+               if self.transparent:
+                       return True
+               matches = ("*", hash_name)
+               for token in self._tokens:
+                       if token in matches:
+                               return True
+                       elif token[:1] == "-":
+                               if token[1:] in matches:
+                                       return False
+               return False
+
+def _apply_hash_filter(digests, hash_filter):
+       """
+       Return a new dict containing the filtered digests, or the same
+       dict if no changes are necessary. This will always preserve at
+       at least one digest, in order to ensure that they are not all
+       discarded.
+       """
+       if hash_filter.transparent:
+               return digests
+
+       verifiable_hash_types = set(digests).intersection(hashfunc_map)
+       verifiable_hash_types.discard("size")
+       modified = False
+       if len(verifiable_hash_types) > 1:
+               for k in list(verifiable_hash_types):
+                       if not hash_filter(k):
+                               modified = True
+                               verifiable_hash_types.remove(k)
+                               if len(verifiable_hash_types) == 1:
+                                       break
+
+       if modified:
+               digests = dict((k, v) for (k, v) in digests.items()
+                       if k == "size" or k in verifiable_hash_types)
+
+       return digests
+
 def verify_all(filename, mydict, calc_prelink=0, strict=0):
        """
        Verify all checksums against a file.
index 6c01867b6b7093043c511626f39d0dbb365a552c..a2fd5eabe08237f0e982a84bcd279501998706d9 100644 (file)
@@ -5,7 +5,8 @@ __all__ = ["bindbapi", "binarytree"]
 
 import portage
 portage.proxy.lazyimport.lazyimport(globals(),
-       'portage.checksum:hashfunc_map,perform_multiple_checksums,verify_all',
+       'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
+               'verify_all,_apply_hash_filter,_hash_filter',
        'portage.dbapi.dep_expand:dep_expand',
        'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list',
        'portage.output:EOutput,colorize',
@@ -1462,6 +1463,9 @@ class binarytree(object):
                if not digests:
                        return False
 
+               hash_filter = _hash_filter(
+                       self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
+               digests = _apply_hash_filter(digests, hash_filter)
                eout = EOutput()
                eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
                ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
index a04b71780f4522bfb6094b4a82b2872b21a13f7c..b2f1ff2dcc976a65f975986c19386cd73663487e 100644 (file)
@@ -9,7 +9,7 @@ import warnings
 import portage
 portage.proxy.lazyimport.lazyimport(globals(),
        'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
-               'verify_all,_filter_unaccelarated_hashes',
+               'verify_all,_apply_hash_filter,_filter_unaccelarated_hashes',
        'portage.util:write_atomic',
 )
 
@@ -502,14 +502,17 @@ class Manifest(object):
                for t in MANIFEST2_IDENTIFIERS:
                        self.checkTypeHashes(t, ignoreMissingFiles=ignoreMissingFiles)
        
-       def checkTypeHashes(self, idtype, ignoreMissingFiles=False):
+       def checkTypeHashes(self, idtype, ignoreMissingFiles=False, hash_filter=None):
                for f in self.fhashdict[idtype]:
-                       self.checkFileHashes(idtype, f, ignoreMissing=ignoreMissingFiles)
+                       self.checkFileHashes(idtype, f, ignoreMissing=ignoreMissingFiles,
+                               hash_filter=hash_filter)
        
-       def checkFileHashes(self, ftype, fname, ignoreMissing=False):
+       def checkFileHashes(self, ftype, fname, ignoreMissing=False, hash_filter=None):
+               digests = _filter_unaccelarated_hashes(self.fhashdict[ftype][fname])
+               if hash_filter is not None:
+                       digests = _apply_hash_filter(digests, hash_filter)
                try:
-                       ok, reason = verify_all(self._getAbsname(ftype, fname),
-                               _filter_unaccelarated_hashes(self.fhashdict[ftype][fname]))
+                       ok, reason = verify_all(self._getAbsname(ftype, fname), digests)
                        if not ok:
                                raise DigestException(tuple([self._getAbsname(ftype, fname)]+list(reason)))
                        return ok, reason
index 6ed6d05423ef5172336bb0763210bde28dfbc0ce..c974eb92a819aa9e691ffea8691561e06e17c4ee 100644 (file)
@@ -150,6 +150,7 @@ environ_filter += [
        "PORTAGE_BACKGROUND", "PORTAGE_BACKGROUND_UNMERGE",
        "PORTAGE_BINHOST",
        "PORTAGE_BINHOST_CHUNKSIZE", "PORTAGE_BUILDIR_LOCKED",
+       "PORTAGE_CHECKSUM_FILTER",
        "PORTAGE_ELOG_CLASSES",
        "PORTAGE_ELOG_MAILFROM", "PORTAGE_ELOG_MAILSUBJECT",
        "PORTAGE_ELOG_MAILURI", "PORTAGE_ELOG_SYSTEM",
index 8705639d19bf2293bdd117dcff7a62224cf47325..1d59948d13d8d97b88c1a4f48240a0b1692110ab 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright 2010-2011 Gentoo Foundation
+# Copyright 2010-2012 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
 __all__ = ['digestcheck']
@@ -6,6 +6,7 @@ __all__ = ['digestcheck']
 import warnings
 
 from portage import os, _encodings, _unicode_decode
+from portage.checksum import _hash_filter
 from portage.exception import DigestException, FileNotFound
 from portage.localization import _
 from portage.output import EOutput
@@ -28,6 +29,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
        if mysettings.get("EBUILD_SKIP_MANIFEST") == "1":
                return 1
        pkgdir = mysettings["O"]
+       hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
        if mf is None:
                mf = mysettings.repositories.get_repo_for_location(
                        os.path.dirname(os.path.dirname(pkgdir)))
@@ -38,15 +40,16 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
                if not mf.thin and strict and "PORTAGE_PARALLEL_FETCHONLY" not in mysettings:
                        if mf.fhashdict.get("EBUILD"):
                                eout.ebegin(_("checking ebuild checksums ;-)"))
-                               mf.checkTypeHashes("EBUILD")
+                               mf.checkTypeHashes("EBUILD", hash_filter=hash_filter)
                                eout.eend(0)
                        if mf.fhashdict.get("AUX"):
                                eout.ebegin(_("checking auxfile checksums ;-)"))
-                               mf.checkTypeHashes("AUX")
+                               mf.checkTypeHashes("AUX", hash_filter=hash_filter)
                                eout.eend(0)
                        if mf.fhashdict.get("MISC"):
                                eout.ebegin(_("checking miscfile checksums ;-)"))
-                               mf.checkTypeHashes("MISC", ignoreMissingFiles=True)
+                               mf.checkTypeHashes("MISC", ignoreMissingFiles=True,
+                                       hash_filter=hash_filter)
                                eout.eend(0)
                for f in myfiles:
                        eout.ebegin(_("checking %s ;-)") % f)
@@ -58,7 +61,7 @@ def digestcheck(myfiles, mysettings, strict=False, justmanifest=None, mf=None):
                                writemsg(_("\n!!! Missing digest for '%s'\n") % (f,),
                                        noiselevel=-1)
                                return 0
-                       mf.checkFileHashes(ftype, f)
+                       mf.checkFileHashes(ftype, f, hash_filter=hash_filter)
                        eout.eend(0)
        except FileNotFound as e:
                eout.eend(1)
index 60ed04da28c119b728ff4e92025f7a945695bf65..8365ad211b66da0d51d83ff8c7ef25e5a9fab215 100644 (file)
@@ -26,7 +26,7 @@ portage.proxy.lazyimport.lazyimport(globals(),
 from portage import OrderedDict, os, selinux, shutil, _encodings, \
        _shell_quote, _unicode_encode
 from portage.checksum import (hashfunc_map, perform_md5, verify_all,
-       _filter_unaccelarated_hashes)
+       _filter_unaccelarated_hashes, _hash_filter, _apply_hash_filter)
 from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \
        GLOBAL_CONFIG_PATH
 from portage.data import portage_gid, portage_uid, secpass, userpriv_groups
@@ -185,7 +185,7 @@ def _check_digests(filename, digests, show_errors=1):
                return False
        return True
 
-def _check_distfile(filename, digests, eout, show_errors=1):
+def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
        """
        @return a tuple of (match, stat_obj) where match is True if filename
        matches all given digests (if any) and stat_obj is a stat result, or
@@ -212,6 +212,8 @@ def _check_distfile(filename, digests, eout, show_errors=1):
                        return (False, st)
        else:
                digests = _filter_unaccelarated_hashes(digests)
+               if hash_filter is not None:
+                       digests = _apply_hash_filter(digests, hash_filter)
                if _check_digests(filename, digests, show_errors=show_errors):
                        eout.ebegin("%s %s ;-)" % (os.path.basename(filename),
                                " ".join(sorted(digests))))
@@ -355,6 +357,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                if try_mirrors:
                        mymirrors += [x.rstrip("/") for x in mysettings["GENTOO_MIRRORS"].split() if x]
 
+       hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
        skip_manifest = mysettings.get("EBUILD_SKIP_MANIFEST") == "1"
        if skip_manifest:
                allow_missing_digests = True
@@ -637,7 +640,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                                eout = EOutput()
                                eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
                                match, mystat = _check_distfile(
-                                       myfile_path, pruned_digests, eout)
+                                       myfile_path, pruned_digests, eout, hash_filter=hash_filter)
                                if match:
                                        # Skip permission adjustment for symlinks, since we don't
                                        # want to modify anything outside of the primary DISTDIR,
@@ -709,7 +712,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                                        for x in ro_distdirs:
                                                filename = os.path.join(x, myfile)
                                                match, mystat = _check_distfile(
-                                                       filename, pruned_digests, eout)
+                                                       filename, pruned_digests, eout, hash_filter=hash_filter)
                                                if match:
                                                        readonly_file = filename
                                                        break
@@ -796,6 +799,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                                                        continue
                                                else:
                                                        digests = _filter_unaccelarated_hashes(mydigests[myfile])
+                                                       digests = _apply_hash_filter(digests, hash_filter)
                                                        verified_ok, reason = verify_all(myfile_path, digests)
                                                        if not verified_ok:
                                                                writemsg(_("!!! Previously fetched"
@@ -1053,6 +1057,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0,
                                                                # net connection. This way we have a chance to try to download
                                                                # from another mirror...
                                                                digests = _filter_unaccelarated_hashes(mydigests[myfile])
+                                                               digests = _apply_hash_filter(digests, hash_filter)
                                                                verified_ok, reason = verify_all(myfile_path, digests)
                                                                if not verified_ok:
                                                                        writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,