return False
hash_filter = _hash_filter(settings.get("PORTAGE_CHECKSUM_FILTER", ""))
+ if hash_filter.transparent:
+ hash_filter = None
stdout_orig = sys.stdout
stderr_orig = sys.stderr
global_havecolor = portage.output.havecolor
dict if no changes are necessary. This will always preserve at
at least one digest, in order to ensure that they are not all
discarded.
+ @param digests: dictionary of digests
+ @type digests: dict
+ @param hash_filter: A callable that takes a single hash name
+ argument, and returns True if the hash is to be used or
+ False otherwise
+ @type hash_filter: callable
"""
- if hash_filter.transparent:
- return digests
verifiable_hash_types = set(digests).intersection(hashfunc_map)
verifiable_hash_types.discard("size")
hash_filter = _hash_filter(
self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
- digests = _apply_hash_filter(digests, hash_filter)
+ if not hash_filter.transparent:
+ digests = _apply_hash_filter(digests, hash_filter)
eout = EOutput()
eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
return 1
pkgdir = mysettings["O"]
hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
+ if hash_filter.transparent:
+ hash_filter = None
if mf is None:
mf = mysettings.repositories.get_repo_for_location(
os.path.dirname(os.path.dirname(pkgdir)))
mymirrors += [x.rstrip("/") for x in mysettings["GENTOO_MIRRORS"].split() if x]
hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
+ if hash_filter.transparent:
+ hash_filter = None
skip_manifest = mysettings.get("EBUILD_SKIP_MANIFEST") == "1"
if skip_manifest:
allow_missing_digests = True
continue
else:
digests = _filter_unaccelarated_hashes(mydigests[myfile])
- digests = _apply_hash_filter(digests, hash_filter)
+ if hash_filter is not None:
+ digests = _apply_hash_filter(digests, hash_filter)
verified_ok, reason = verify_all(myfile_path, digests)
if not verified_ok:
writemsg(_("!!! Previously fetched"
# net connection. This way we have a chance to try to download
# from another mirror...
digests = _filter_unaccelarated_hashes(mydigests[myfile])
- digests = _apply_hash_filter(digests, hash_filter)
+ if hash_filter is not None:
+ digests = _apply_hash_filter(digests, hash_filter)
verified_ok, reason = verify_all(myfile_path, digests)
if not verified_ok:
writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,