import tempfile
try:
- from urllib.parse import urlparse
+ from urllib.parse import urlparse, urlunparse
except ImportError:
- from urlparse import urlparse
+ from urlparse import urlparse, urlunparse
import portage
portage.proxy.lazyimport.lazyimport(globals(),
'Y' : 80,
}
+
+def _get_checksum_failure_max_tries(settings, default=5):
+ """
+ Get the maximum number of failed download attempts.
+
+ Generally, downloading the same file repeatedly from
+ every single available mirror is a waste of bandwidth
+ and time, so there needs to be a cap.
+ """
+ key = 'PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS'
+ v = default
+ try:
+ v = int(settings.get(key, default))
+ except (ValueError, OverflowError):
+ writemsg(_("!!! Variable %s contains "
+ "non-integer value: '%s'\n")
+ % (key, settings[key]),
+ noiselevel=-1)
+ writemsg(_("!!! Using %s default value: %s\n")
+ % (key, default),
+ noiselevel=-1)
+ v = default
+ if v < 1:
+ writemsg(_("!!! Variable %s contains "
+ "value less than 1: '%s'\n")
+ % (key, v),
+ noiselevel=-1)
+ writemsg(_("!!! Using %s default value: %s\n")
+ % (key, default),
+ noiselevel=-1)
+ v = default
+ return v
+
+
+def _get_fetch_resume_size(settings, default='350K'):
+ key = 'PORTAGE_FETCH_RESUME_MIN_SIZE'
+ v = settings.get(key, default)
+ if v is not None:
+ v = "".join(v.split())
+ if not v:
+ # If it's empty, silently use the default.
+ v = default
+ match = _fetch_resume_size_re.match(v)
+ if (match is None or
+ match.group(2).upper() not in _size_suffix_map):
+ writemsg(_("!!! Variable %s contains "
+ "an unrecognized format: '%s'\n")
+ % (key, settings[key]),
+ noiselevel=-1)
+ writemsg(_("!!! Using %s default value: %s\n")
+ % (key, default),
+ noiselevel=-1)
+ v = default
+ match = _fetch_resume_size_re.match(v)
+ v = int(match.group(1)) * \
+ 2 ** _size_suffix_map[match.group(2).upper()]
+ return v
+
+
+def _get_file_uri_tuples(uris):
+ """Return a list of (filename, URI) tuples."""
+ file_uri_tuples = []
+ # Check for 'items' attribute since OrderedDict is not a dict.
+ if hasattr(uris, 'items'):
+ for filename, uri_set in uris.items():
+ for uri in uri_set:
+ file_uri_tuples.append((filename, uri))
+ if not uri_set:
+ file_uri_tuples.append((filename, None))
+ else:
+ for uri in uris:
+ if urlparse(uri).scheme:
+ file_uri_tuples.append(
+ (os.path.basename(uri), uri))
+ else:
+ file_uri_tuples.append(
+ (os.path.basename(uri), None))
+ return file_uri_tuples
+
+
+def _expand_mirror(uri, custom_mirrors=(), third_party_mirrors=()):
+ """
+ Replace the 'mirror://' scheme and netloc in the URI.
+
+ Returns an iterable listing expanded (group, URI) tuples,
+ where the group is either 'custom' or 'third-party'.
+ """
+ parsed = urlparse(uri)
+ mirror = parsed.netloc
+ path = parsed.path
+ if path:
+ # Try user-defined mirrors first
+ if mirror in custom_mirrors:
+ for cmirr in custom_mirrors[mirror]:
+ m_uri = urlparse(cmirr)
+ yield ('custom', urlunparse((
+ m_uri.scheme, m_uri.netloc, path) +
+ parsed[3:]))
+
+ # now try the official mirrors
+ if mirror in third_party_mirrors:
+ uris = []
+ for locmirr in third_party_mirrors[mirror]:
+ m_uri = urlparse(locmirr)
+ uris.append(urlunparse((
+ m_uri.scheme, m_uri.netloc, path) +
+ parsed[3:]))
+ random.shuffle(uris)
+ for uri in uris:
+ yield ('third-party', uri)
+
+ if (not custom_mirrors.get(mirror, []) and
+ not third_party_mirrors.get(mirror, [])):
+ writemsg(
+ _("No known mirror by the name: %s\n")
+ % mirror)
+ else:
+ writemsg(_("Invalid mirror definition in SRC_URI:\n"),
+ noiselevel=-1)
+ writemsg(" %s\n" % uri, noiselevel=-1)
+
+
+def _get_uris(uris, settings, custom_mirrors=(), locations=()):
+ restrict = settings.get("PORTAGE_RESTRICT", "").split()
+ restrict_fetch = "fetch" in restrict
+ restrict_mirror = "mirror" in restrict or "nomirror" in restrict
+ force_mirror = (
+ "force-mirror" in settings.features and
+ not restrict_mirror)
+
+ third_party_mirrors = settings.thirdpartymirrors()
+ third_party_mirror_uris = {}
+ filedict = OrderedDict()
+ primaryuri_dict = {}
+ for filename, uri in _get_file_uri_tuples(uris=uris):
+ if filename not in filedict:
+ filedict[filename] = [
+ os.path.join(location, 'distfiles', filename)
+ for location in locations]
+ if uri is None:
+ continue
+ if uri.startswith('mirror://'):
+ expanded_uris = _expand_mirror(
+ uri=uri, custom_mirrors=custom_mirrors,
+ third_party_mirrors=third_party_mirrors)
+ filedict[filename].extend(
+ uri for _, uri in expanded_uris)
+ third_party_mirror_uris.setdefault(filename, []).extend(
+ uri for group, uri in expanded_uris
+ if group == 'third-party')
+ else:
+ if restrict_fetch or force_mirror:
+ # Only fetch from specific mirrors is allowed.
+ continue
+ primaryuris = primaryuri_dict.get(filename)
+ if primaryuris is None:
+ primaryuris = []
+ primaryuri_dict[filename] = primaryuris
+ primaryuris.append(uri)
+
+ # Order primaryuri_dict values to match that in SRC_URI.
+ for uris in primaryuri_dict.values():
+ uris.reverse()
+
+ # Prefer third_party_mirrors over normal mirrors in cases when
+ # the file does not yet exist on the normal mirrors.
+ for filename, uris in third_party_mirror_uris.items():
+ primaryuri_dict.setdefault(filename, []).extend(uris)
+
+ # Now merge primaryuri values into filedict (includes mirrors
+ # explicitly referenced in SRC_URI).
+ if "primaryuri" in restrict:
+ for filename, uris in filedict.items():
+ filedict[filename] = primaryuri_dict.get(filename, []) + uris
+ else:
+ for filename in filedict:
+ filedict[filename] += primaryuri_dict.get(filename, [])
+
+ return filedict, primaryuri_dict
+
+
def fetch(myuris, mysettings, listonly=0, fetchonly=0,
locks_in_subdir=".locks", use_locks=1, try_mirrors=1, digests=None,
allow_missing_digests=True):
print(_(">>> \"mirror\" mode desired and \"mirror\" restriction found; skipping fetch."))
return 1
- # Generally, downloading the same file repeatedly from
- # every single available mirror is a waste of bandwidth
- # and time, so there needs to be a cap.
- checksum_failure_max_tries = 5
- v = checksum_failure_max_tries
- try:
- v = int(mysettings.get("PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS",
- checksum_failure_max_tries))
- except (ValueError, OverflowError):
- writemsg(_("!!! Variable PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"
- " contains non-integer value: '%s'\n") % \
- mysettings["PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"], noiselevel=-1)
- writemsg(_("!!! Using PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS "
- "default value: %s\n") % checksum_failure_max_tries,
- noiselevel=-1)
- v = checksum_failure_max_tries
- if v < 1:
- writemsg(_("!!! Variable PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS"
- " contains value less than 1: '%s'\n") % v, noiselevel=-1)
- writemsg(_("!!! Using PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS "
- "default value: %s\n") % checksum_failure_max_tries,
- noiselevel=-1)
- v = checksum_failure_max_tries
- checksum_failure_max_tries = v
- del v
-
- fetch_resume_size_default = "350K"
- fetch_resume_size = mysettings.get("PORTAGE_FETCH_RESUME_MIN_SIZE")
- if fetch_resume_size is not None:
- fetch_resume_size = "".join(fetch_resume_size.split())
- if not fetch_resume_size:
- # If it's undefined or empty, silently use the default.
- fetch_resume_size = fetch_resume_size_default
- match = _fetch_resume_size_re.match(fetch_resume_size)
- if match is None or \
- (match.group(2).upper() not in _size_suffix_map):
- writemsg(_("!!! Variable PORTAGE_FETCH_RESUME_MIN_SIZE"
- " contains an unrecognized format: '%s'\n") % \
- mysettings["PORTAGE_FETCH_RESUME_MIN_SIZE"], noiselevel=-1)
- writemsg(_("!!! Using PORTAGE_FETCH_RESUME_MIN_SIZE "
- "default value: %s\n") % fetch_resume_size_default,
- noiselevel=-1)
- fetch_resume_size = None
- if fetch_resume_size is None:
- fetch_resume_size = fetch_resume_size_default
- match = _fetch_resume_size_re.match(fetch_resume_size)
- fetch_resume_size = int(match.group(1)) * \
- 2 ** _size_suffix_map[match.group(2).upper()]
+ checksum_failure_max_tries = _get_checksum_failure_max_tries(
+ settings=mysettings)
+ fetch_resume_size = _get_fetch_resume_size(settings=mysettings)
# Behave like the package has RESTRICT="primaryuri" after a
# couple of checksum failures, to increase the probablility
# of success before checksum_failure_max_tries is reached.
checksum_failure_primaryuri = 2
- thirdpartymirrors = mysettings.thirdpartymirrors()
# In the background parallel-fetch process, it's safe to skip checksum
# verification of pre-existing files in $DISTDIR that have the correct
_("!!! For fetching to a read-only filesystem, "
"locking should be turned off.\n")), noiselevel=-1)
writemsg(_("!!! This can be done by adding -distlocks to "
- "FEATURES in /etc/make.conf\n"), noiselevel=-1)
+ "FEATURES in /etc/portage/make.conf\n"), noiselevel=-1)
# use_locks = 0
# local mirrors are always added
del mymirrors[x]
restrict_fetch = "fetch" in restrict
- force_mirror = "force-mirror" in features and not restrict_mirror
custom_local_mirrors = custommirrors.get("local", [])
if restrict_fetch:
# With fetch restriction, a normal uri may only be fetched from
else:
locations = mymirrors
- file_uri_tuples = []
- # Check for 'items' attribute since OrderedDict is not a dict.
- if hasattr(myuris, 'items'):
- for myfile, uri_set in myuris.items():
- for myuri in uri_set:
- file_uri_tuples.append((myfile, myuri))
- if not uri_set:
- file_uri_tuples.append((myfile, None))
- else:
- for myuri in myuris:
- if urlparse(myuri).scheme:
- file_uri_tuples.append((os.path.basename(myuri), myuri))
- else:
- file_uri_tuples.append((os.path.basename(myuri), None))
-
- filedict = OrderedDict()
- primaryuri_dict = {}
- thirdpartymirror_uris = {}
- for myfile, myuri in file_uri_tuples:
- if myfile not in filedict:
- filedict[myfile]=[]
- for y in range(0,len(locations)):
- filedict[myfile].append(locations[y]+"/distfiles/"+myfile)
- if myuri is None:
- continue
- if myuri[:9]=="mirror://":
- eidx = myuri.find("/", 9)
- if eidx != -1:
- mirrorname = myuri[9:eidx]
- path = myuri[eidx+1:]
-
- # Try user-defined mirrors first
- if mirrorname in custommirrors:
- for cmirr in custommirrors[mirrorname]:
- filedict[myfile].append(
- cmirr.rstrip("/") + "/" + path)
-
- # now try the official mirrors
- if mirrorname in thirdpartymirrors:
- uris = [locmirr.rstrip("/") + "/" + path \
- for locmirr in thirdpartymirrors[mirrorname]]
- random.shuffle(uris)
- filedict[myfile].extend(uris)
- thirdpartymirror_uris.setdefault(myfile, []).extend(uris)
-
- if not filedict[myfile]:
- writemsg(_("No known mirror by the name: %s\n") % (mirrorname))
- else:
- writemsg(_("Invalid mirror definition in SRC_URI:\n"), noiselevel=-1)
- writemsg(" %s\n" % (myuri), noiselevel=-1)
- else:
- if restrict_fetch or force_mirror:
- # Only fetch from specific mirrors is allowed.
- continue
- primaryuris = primaryuri_dict.get(myfile)
- if primaryuris is None:
- primaryuris = []
- primaryuri_dict[myfile] = primaryuris
- primaryuris.append(myuri)
-
- # Order primaryuri_dict values to match that in SRC_URI.
- for uris in primaryuri_dict.values():
- uris.reverse()
-
- # Prefer thirdpartymirrors over normal mirrors in cases when
- # the file does not yet exist on the normal mirrors.
- for myfile, uris in thirdpartymirror_uris.items():
- primaryuri_dict.setdefault(myfile, []).extend(uris)
-
- # Now merge primaryuri values into filedict (includes mirrors
- # explicitly referenced in SRC_URI).
- if "primaryuri" in restrict:
- for myfile, uris in filedict.items():
- filedict[myfile] = primaryuri_dict.get(myfile, []) + uris
- else:
- for myfile in filedict:
- filedict[myfile] += primaryuri_dict.get(myfile, [])
+ filedict, primaryuri_dict = _get_uris(
+ uris=myuris, settings=mysettings,
+ custom_mirrors=custommirrors, locations=locations)
can_fetch=True
"remaining space.\n"), noiselevel=-1)
if userfetch:
writemsg(_("!!! You may set FEATURES=\"-userfetch\""
- " in /etc/make.conf in order to fetch with\n"
+ " in /etc/portage/make.conf in order to fetch with\n"
"!!! superuser privileges.\n"), noiselevel=-1)
if fsmirrors and not os.path.exists(myfile_path) and has_space: