Make fetch() behave like the ebuild has RESTRICT="primaryuri"
authorZac Medico <zmedico@gentoo.org>
Sun, 4 Nov 2007 02:15:39 +0000 (02:15 -0000)
committerZac Medico <zmedico@gentoo.org>
Sun, 4 Nov 2007 02:15:39 +0000 (02:15 -0000)
after 2 checksum failures for a fetched file. This should
increase the probability of fetching the correct file before
the maximum number of tries is reached. (trunk r8412)

svn path=/main/branches/2.1.2/; revision=8413

pym/portage.py

index d08350e7a442cd5d4e8ebeda29eb16787d3bbdf8..6c069d9e333c25d6adaaf3264a419a9cc43861af 100644 (file)
@@ -2729,6 +2729,10 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
        # and time, so there needs to be a cap.
        checksum_failure_max_tries = 5
        checksum_failure_counts = {}
+       # Behave like the package has RESTRICT="primaryuri" after a
+       # couple of checksum failures, to increase the probablility
+       # of success before checksum_failure_max_tries is reached.
+       checksum_failure_primaryuri = 2
        thirdpartymirrors = mysettings.thirdpartymirrors()
 
        check_config_instance(mysettings)
@@ -2792,6 +2796,7 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
 
        filedict={}
        primaryuri_indexes={}
+       primaryuri_dict = {}
        for myuri in myuris:
                myfile=os.path.basename(myuri)
                if not filedict.has_key(myfile):
@@ -2835,6 +2840,11 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
                                filedict[myfile].insert(primaryuri_indexes[myfile], myuri)
                        else:
                                filedict[myfile].append(myuri)
+                       primaryuris = primaryuri_dict.get(myfile)
+                       if primaryuris is None:
+                               primaryuris = []
+                               primaryuri_dict[myfile] = primaryuris
+                       primaryuris.append(myuri)
 
        can_fetch=True
 
@@ -2994,7 +3004,17 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
                                                                        eout.eend(0)
                                                                continue # fetch any remaining files
 
-                       for loc in filedict[myfile]:
+                       # Create a reversed list since that is optimal for list.pop().
+                       uri_list = filedict[myfile][:]
+                       uri_list.reverse()
+                       tried_locations = set()
+                       while uri_list:
+                               loc = uri_list.pop()
+                               # Eliminate duplicates here in case we've switched to
+                               # "primaryuri" mode on the fly due to a checksum failure.
+                               if loc in tried_locations:
+                                       continue
+                               tried_locations.add(loc)
                                if listonly:
                                        writemsg_stdout(loc+" ", noiselevel=-1)
                                        continue
@@ -3165,6 +3185,15 @@ def fetch(myuris, mysettings, listonly=0, fetchonly=0, locks_in_subdir=".locks",
                                                                        if count is None:
                                                                                count = 0
                                                                        count += 1
+                                                                       if count == checksum_failure_primaryuri:
+                                                                               # Switch to "primaryuri" mode in order
+                                                                               # to increase the probablility of
+                                                                               # of success.
+                                                                               primaryuris = \
+                                                                                       primaryuri_dict.get(myfile)
+                                                                               if primaryuris:
+                                                                                       uri_list.extend(
+                                                                                               reversed(primaryuris))
                                                                        if count >= checksum_failure_max_tries:
                                                                                break
                                                                        checksum_failure_counts[myfile] = count