* Optimize parallel-fetch to avoid redundant checksum verification.
authorZac Medico <zmedico@gentoo.org>
Fri, 28 Mar 2008 11:59:23 +0000 (11:59 -0000)
committerZac Medico <zmedico@gentoo.org>
Fri, 28 Mar 2008 11:59:23 +0000 (11:59 -0000)
* Add parallel-fetch to the default FEATURES since it is more efficient now.
(trunk r9462)

svn path=/main/branches/2.1.2/; revision=9553

bin/emerge
cnf/make.globals
pym/portage.py

index b72bfca68045a4fb804a66ddf77fa362df2fd04b..5928ad2bb2f9277291a9af96ec33930226fe11cd 100755 (executable)
@@ -4159,6 +4159,7 @@ class MergeTask(object):
                                fetch_env = os.environ.copy()
                                fetch_env["FEATURES"] = fetch_env.get("FEATURES", "") + " -cvs"
                                fetch_env["PORTAGE_NICENESS"] = "0"
+                               fetch_env["PORTAGE_PARALLEL_FETCHONLY"] = "1"
                                fetch_args = [sys.argv[0], "--resume", "--fetchonly"]
                                resume_opts = self.myopts.copy()
                                # For automatic resume, we need to prevent
@@ -6514,7 +6515,7 @@ def action_build(settings, trees, mtimedb,
                if ("--resume" in myopts):
                        favorites=mtimedb["resume"]["favorites"]
                        mergetask = MergeTask(settings, trees, myopts)
-                       if "--fetchonly" in myopts:
+                       if "PORTAGE_PARALLEL_FETCHONLY" in settings:
                                """ parallel-fetch uses --resume --fetchonly and we don't want
                                it to write the mtimedb"""
                                mtimedb.filename = None
index 1e08b628f7570bdafd6483d7feab9842ae7e4871..331f62d635e53096a472e352da80deb26dbf7f55 100644 (file)
@@ -33,7 +33,7 @@ FETCHCOMMAND="/usr/bin/wget -t 5 -T 60 --passive-ftp -O \"\${DISTDIR}/\${FILE}\"
 RESUMECOMMAND="/usr/bin/wget -c -t 5 -T 60 --passive-ftp -O \"\${DISTDIR}/\${FILE}\" \"\${URI}\""
 
 # Default user options
-FEATURES="distlocks metadata-transfer sandbox sfperms strict unmerge-orphans userfetch"
+FEATURES="distlocks parallel-fetch metadata-transfer sandbox sfperms strict unmerge-orphans userfetch"
 
 # Default chunksize for binhost comms
 PORTAGE_BINHOST_CHUNKSIZE="3000"
index 25789db8073b24707c2f0af12cb45a3c238f57dc..eea0cb53e0088bb58b4bee2d1c1ecd470b20edd1 100644 (file)
@@ -3008,6 +3008,14 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
        checksum_failure_primaryuri = 2
        thirdpartymirrors = mysettings.thirdpartymirrors()
 
+       # In the background parallel-fetch process, it's safe to skip checksum
+       # verification of pre-existing files in $DISTDIR that have the correct
+       # file size. The parent process will verify their checksums prior to
+       # the unpack phase.
+
+       parallel_fetchonly = fetchonly and \
+               "PORTAGE_PARALLEL_FETCHONLY" in mysettings
+
        check_config_instance(mysettings)
 
        custommirrors = grabdict(os.path.join(mysettings["PORTAGE_CONFIGROOT"],
@@ -3203,7 +3211,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
 
                        if use_locks and can_fetch:
                                waiting_msg = None
-                               if "parallel-fetch" in features:
+                               if not parallel_fetchonly and "parallel-fetch" in features:
                                        waiting_msg = ("Downloading '%s'... " + \
                                                "see /var/log/emerge-fetch.log for details.") % myfile
                                if locks_in_subdir:
@@ -3262,6 +3270,15 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
                                                if mystat.st_size < mydigests[myfile]["size"] and \
                                                        not restrict_fetch:
                                                        fetched = 1 # Try to resume this download.
+                                               elif parallel_fetchonly and \
+                                                       mystat.st_size == mydigests[myfile]["size"]:
+                                                       eout = portage.output.EOutput()
+                                                       eout.quiet = \
+                                                               mysettings.get("PORTAGE_QUIET") == "1"
+                                                       eout.ebegin(
+                                                               "%s size ;-)" % (myfile, ))
+                                                       eout.eend(0)
+                                                       continue
                                                else:
                                                        verified_ok, reason = portage_checksum.verify_all(
                                                                myfile_path, mydigests[myfile])
@@ -4720,6 +4737,11 @@ def doebuild(myebuild, mydo, myroot, mysettings, debug=0, listonly=0,
                        fetchme = newuris[:]
                        checkme = alist[:]
 
+               if mydo == "fetch":
+                       # Files are already checked inside fetch(),
+                       # so do not check them again.
+                       checkme = []
+
                # Only try and fetch the files if we are going to need them ...
                # otherwise, if user has FEATURES=noauto and they run `ebuild clean
                # unpack compile install`, we will try and fetch 4 times :/