0093a6e3b3f4a7168ce7eb44a6d2861383f4aad1
[portage.git] / pym / portage / package / ebuild / fetch.py
1 # Copyright 2010-2013 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 from __future__ import print_function
5
6 __all__ = ['fetch']
7
8 import errno
9 import io
10 import logging
11 import random
12 import re
13 import stat
14 import sys
15 import tempfile
16
17 try:
18         from urllib.parse import urlparse, urlunparse
19 except ImportError:
20         from urlparse import urlparse, urlunparse
21
22 import portage
23 portage.proxy.lazyimport.lazyimport(globals(),
24         'portage.package.ebuild.config:check_config_instance,config',
25         'portage.package.ebuild.doebuild:doebuild_environment,' + \
26                 '_doebuild_spawn',
27         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
28 )
29
30 from portage import OrderedDict, os, selinux, shutil, _encodings, \
31         _shell_quote, _unicode_encode
32 from portage.checksum import (hashfunc_map, perform_md5, verify_all,
33         _filter_unaccelarated_hashes, _hash_filter, _apply_hash_filter)
34 from portage.const import BASH_BINARY, CUSTOM_MIRRORS_FILE, \
35         GLOBAL_CONFIG_PATH
36 from portage.data import portage_gid, portage_uid, secpass, userpriv_groups
37 from portage.exception import FileNotFound, OperationNotPermitted, \
38         PortageException, TryAgain
39 from portage.localization import _
40 from portage.locks import lockfile, unlockfile
41 from portage.output import colorize, EOutput
42 from portage.util import apply_recursive_permissions, \
43         apply_secpass_permissions, ensure_dirs, grabdict, shlex_split, \
44         varexpand, writemsg, writemsg_level, writemsg_stdout
45 from portage.process import spawn
46
47 _userpriv_spawn_kwargs = (
48         ("uid",    portage_uid),
49         ("gid",    portage_gid),
50         ("groups", userpriv_groups),
51         ("umask",  0o02),
52 )
53
54 def _hide_url_passwd(url):
55         return re.sub(r'//(.+):.+@(.+)', r'//\1:*password*@\2', url)
56
57 def _spawn_fetch(settings, args, **kwargs):
58         """
59         Spawn a process with appropriate settings for fetching, including
60         userfetch and selinux support.
61         """
62
63         global _userpriv_spawn_kwargs
64
65         # Redirect all output to stdout since some fetchers like
66         # wget pollute stderr (if portage detects a problem then it
67         # can send it's own message to stderr).
68         if "fd_pipes" not in kwargs:
69
70                 kwargs["fd_pipes"] = {
71                         0 : portage._get_stdin().fileno(),
72                         1 : sys.__stdout__.fileno(),
73                         2 : sys.__stdout__.fileno(),
74                 }
75
76         if "userfetch" in settings.features and \
77                 os.getuid() == 0 and portage_gid and portage_uid and \
78                 hasattr(os, "setgroups"):
79                 kwargs.update(_userpriv_spawn_kwargs)
80
81         spawn_func = spawn
82
83         if settings.selinux_enabled():
84                 spawn_func = selinux.spawn_wrapper(spawn_func,
85                         settings["PORTAGE_FETCH_T"])
86
87                 # bash is an allowed entrypoint, while most binaries are not
88                 if args[0] != BASH_BINARY:
89                         args = [BASH_BINARY, "-c", "exec \"$@\"", args[0]] + args
90
91         # Ensure that EBUILD_PHASE is set to fetch, so that config.environ()
92         # does not filter the calling environment (which may contain needed
93         # proxy variables, as in bug #315421).
94         phase_backup = settings.get('EBUILD_PHASE')
95         settings['EBUILD_PHASE'] = 'fetch'
96         try:
97                 rval = spawn_func(args, env=settings.environ(), **kwargs)
98         finally:
99                 if phase_backup is None:
100                         settings.pop('EBUILD_PHASE', None)
101                 else:
102                         settings['EBUILD_PHASE'] = phase_backup
103
104         return rval
105
106 _userpriv_test_write_file_cache = {}
107 _userpriv_test_write_cmd_script = ">> %(file_path)s 2>/dev/null ; rval=$? ; " + \
108         "rm -f  %(file_path)s ; exit $rval"
109
110 def _userpriv_test_write_file(settings, file_path):
111         """
112         Drop privileges and try to open a file for writing. The file may or
113         may not exist, and the parent directory is assumed to exist. The file
114         is removed before returning.
115
116         @param settings: A config instance which is passed to _spawn_fetch()
117         @param file_path: A file path to open and write.
118         @return: True if write succeeds, False otherwise.
119         """
120
121         global _userpriv_test_write_file_cache, _userpriv_test_write_cmd_script
122         rval = _userpriv_test_write_file_cache.get(file_path)
123         if rval is not None:
124                 return rval
125
126         args = [BASH_BINARY, "-c", _userpriv_test_write_cmd_script % \
127                 {"file_path" : _shell_quote(file_path)}]
128
129         returncode = _spawn_fetch(settings, args)
130
131         rval = returncode == os.EX_OK
132         _userpriv_test_write_file_cache[file_path] = rval
133         return rval
134
135 def _checksum_failure_temp_file(distdir, basename):
136         """
137         First try to find a duplicate temp file with the same checksum and return
138         that filename if available. Otherwise, use mkstemp to create a new unique
139         filename._checksum_failure_.$RANDOM, rename the given file, and return the
140         new filename. In any case, filename will be renamed or removed before this
141         function returns a temp filename.
142         """
143
144         filename = os.path.join(distdir, basename)
145         size = os.stat(filename).st_size
146         checksum = None
147         tempfile_re = re.compile(re.escape(basename) + r'\._checksum_failure_\..*')
148         for temp_filename in os.listdir(distdir):
149                 if not tempfile_re.match(temp_filename):
150                         continue
151                 temp_filename = os.path.join(distdir, temp_filename)
152                 try:
153                         if size != os.stat(temp_filename).st_size:
154                                 continue
155                 except OSError:
156                         continue
157                 try:
158                         temp_checksum = perform_md5(temp_filename)
159                 except FileNotFound:
160                         # Apparently the temp file disappeared. Let it go.
161                         continue
162                 if checksum is None:
163                         checksum = perform_md5(filename)
164                 if checksum == temp_checksum:
165                         os.unlink(filename)
166                         return temp_filename
167
168         fd, temp_filename = \
169                 tempfile.mkstemp("", basename + "._checksum_failure_.", distdir)
170         os.close(fd)
171         os.rename(filename, temp_filename)
172         return temp_filename
173
174 def _check_digests(filename, digests, show_errors=1):
175         """
176         Check digests and display a message if an error occurs.
177         @return True if all digests match, False otherwise.
178         """
179         verified_ok, reason = verify_all(filename, digests)
180         if not verified_ok:
181                 if show_errors:
182                         writemsg(_("!!! Previously fetched"
183                                 " file: '%s'\n") % filename, noiselevel=-1)
184                         writemsg(_("!!! Reason: %s\n") % reason[0],
185                                 noiselevel=-1)
186                         writemsg(_("!!! Got:      %s\n"
187                                 "!!! Expected: %s\n") % \
188                                 (reason[1], reason[2]), noiselevel=-1)
189                 return False
190         return True
191
192 def _check_distfile(filename, digests, eout, show_errors=1, hash_filter=None):
193         """
194         @return a tuple of (match, stat_obj) where match is True if filename
195         matches all given digests (if any) and stat_obj is a stat result, or
196         None if the file does not exist.
197         """
198         if digests is None:
199                 digests = {}
200         size = digests.get("size")
201         if size is not None and len(digests) == 1:
202                 digests = None
203
204         try:
205                 st = os.stat(filename)
206         except OSError:
207                 return (False, None)
208         if size is not None and size != st.st_size:
209                 return (False, st)
210         if not digests:
211                 if size is not None:
212                         eout.ebegin(_("%s size ;-)") % os.path.basename(filename))
213                         eout.eend(0)
214                 elif st.st_size == 0:
215                         # Zero-byte distfiles are always invalid.
216                         return (False, st)
217         else:
218                 digests = _filter_unaccelarated_hashes(digests)
219                 if hash_filter is not None:
220                         digests = _apply_hash_filter(digests, hash_filter)
221                 if _check_digests(filename, digests, show_errors=show_errors):
222                         eout.ebegin("%s %s ;-)" % (os.path.basename(filename),
223                                 " ".join(sorted(digests))))
224                         eout.eend(0)
225                 else:
226                         return (False, st)
227         return (True, st)
228
229 _fetch_resume_size_re = re.compile('(^[\d]+)([KMGTPEZY]?$)')
230
231 _size_suffix_map = {
232         ''  : 0,
233         'K' : 10,
234         'M' : 20,
235         'G' : 30,
236         'T' : 40,
237         'P' : 50,
238         'E' : 60,
239         'Z' : 70,
240         'Y' : 80,
241 }
242
243
244 def _get_checksum_failure_max_tries(settings, default=5):
245         """
246         Get the maximum number of failed download attempts.
247
248         Generally, downloading the same file repeatedly from
249         every single available mirror is a waste of bandwidth
250         and time, so there needs to be a cap.
251         """
252         key = 'PORTAGE_FETCH_CHECKSUM_TRY_MIRRORS'
253         v = default
254         try:
255                 v = int(settings.get(key, default))
256         except (ValueError, OverflowError):
257                 writemsg(_("!!! Variable %s contains "
258                         "non-integer value: '%s'\n")
259                         % (key, settings[key]),
260                         noiselevel=-1)
261                 writemsg(_("!!! Using %s default value: %s\n")
262                         % (key, default),
263                         noiselevel=-1)
264                 v = default
265         if v < 1:
266                 writemsg(_("!!! Variable %s contains "
267                         "value less than 1: '%s'\n")
268                         % (key, v),
269                         noiselevel=-1)
270                 writemsg(_("!!! Using %s default value: %s\n")
271                         % (key, default),
272                         noiselevel=-1)
273                 v = default
274         return v
275
276
277 def _get_fetch_resume_size(settings, default='350K'):
278         key = 'PORTAGE_FETCH_RESUME_MIN_SIZE'
279         v = settings.get(key)
280         if v is not None:
281                 v = "".join(v.split())
282                 if not v:
283                         # If it's empty, silently use the default.
284                         v = default
285                 match = _fetch_resume_size_re.match(v)
286                 if (match is None or
287                                 match.group(2).upper() not in _size_suffix_map):
288                         writemsg(_("!!! Variable %s contains an "
289                                 "unrecognized format: '%s'\n")
290                                 % (key, settings[key]),
291                                 noiselevel=-1)
292                         writemsg(_("!!! Using %s default value: %s\n")
293                                 % (key, default),
294                                 noiselevel=-1)
295                         v = None
296         if v is None:
297                 v = default
298                 match = _fetch_resume_size_re.match(v)
299         v = int(match.group(1)) * \
300                 2 ** _size_suffix_map[match.group(2).upper()]
301         return v
302
303
304 def _get_file_uri_tuples(uris):
305         """Return a list of (filename, URI) tuples."""
306         file_uri_tuples = []
307         # Check for 'items' attribute since OrderedDict is not a dict.
308         if hasattr(uris, 'items'):
309                 for filename, uri_set in uris.items():
310                         for uri in uri_set:
311                                 file_uri_tuples.append((filename, uri))
312                         if not uri_set:
313                                 file_uri_tuples.append((filename, None))
314         else:
315                 for uri in uris:
316                         if urlparse(uri).scheme:
317                                 file_uri_tuples.append(
318                                         (os.path.basename(uri), uri))
319                         else:
320                                 file_uri_tuples.append(
321                                         (os.path.basename(uri), None))
322         return file_uri_tuples
323
324
325 def _expand_mirror(uri, custom_mirrors=(), third_party_mirrors=()):
326         """
327         Replace the 'mirror://' scheme and netloc in the URI.
328
329         Returns an iterable listing expanded (group, URI) tuples,
330         where the group is either 'custom' or 'third-party'.
331         """
332         parsed = urlparse(uri)
333         mirror = parsed.netloc
334         path = parsed.path
335         if path:
336                 # Try user-defined mirrors first
337                 if mirror in custom_mirrors:
338                         for cmirr in custom_mirrors[mirror]:
339                                 m_uri = urlparse(cmirr)
340                                 yield ('custom', urlunparse((
341                                         m_uri.scheme, m_uri.netloc, path) +
342                                         parsed[3:]))
343
344                 # now try the official mirrors
345                 if mirror in third_party_mirrors:
346                         uris = []
347                         for locmirr in third_party_mirrors[mirror]:
348                                 m_uri = urlparse(locmirr)
349                                 uris.append(urlunparse((
350                                         m_uri.scheme, m_uri.netloc, path) +
351                                         parsed[3:]))
352                         random.shuffle(uris)
353                         for uri in uris:
354                                 yield ('third-party', uri)
355
356                 if (not custom_mirrors.get(mirror, []) and
357                                 not third_party_mirrors.get(mirror, [])):
358                         writemsg(
359                                 _("No known mirror by the name: %s\n")
360                                 % mirror)
361         else:
362                 writemsg(_("Invalid mirror definition in SRC_URI:\n"),
363                          noiselevel=-1)
364                 writemsg("  %s\n" % uri, noiselevel=-1)
365
366
367 def _get_uris(uris, settings, custom_mirrors=(), locations=()):
368         restrict = settings.get("PORTAGE_RESTRICT", "").split()
369         restrict_fetch = "fetch" in restrict
370         restrict_mirror = "mirror" in restrict or "nomirror" in restrict
371         force_mirror = (
372                 "force-mirror" in settings.features and
373                 not restrict_mirror)
374
375         third_party_mirrors = settings.thirdpartymirrors()
376         third_party_mirror_uris = {}
377         filedict = OrderedDict()
378         primaryuri_dict = {}
379         for filename, uri in _get_file_uri_tuples(uris=uris):
380                 if filename not in filedict:
381                         filedict[filename] = [
382                                 os.path.join(location, 'distfiles', filename)
383                                 for location in locations]
384                 if uri is None:
385                         continue
386                 if uri.startswith('mirror://'):
387                         expanded_uris = _expand_mirror(
388                                 uri=uri, custom_mirrors=custom_mirrors,
389                                 third_party_mirrors=third_party_mirrors)
390                         filedict[filename].extend(
391                                 uri for _, uri in expanded_uris)
392                         third_party_mirror_uris.setdefault(filename, []).extend(
393                                 uri for group, uri in expanded_uris
394                                 if group == 'third-party')
395                 else:
396                         if restrict_fetch or force_mirror:
397                                 # Only fetch from specific mirrors is allowed.
398                                 continue
399                         primaryuris = primaryuri_dict.get(filename)
400                         if primaryuris is None:
401                                 primaryuris = []
402                                 primaryuri_dict[filename] = primaryuris
403                         primaryuris.append(uri)
404
405         # Order primaryuri_dict values to match that in SRC_URI.
406         for uris in primaryuri_dict.values():
407                 uris.reverse()
408
409         # Prefer third_party_mirrors over normal mirrors in cases when
410         # the file does not yet exist on the normal mirrors.
411         for filename, uris in third_party_mirror_uris.items():
412                 primaryuri_dict.setdefault(filename, []).extend(uris)
413
414         # Now merge primaryuri values into filedict (includes mirrors
415         # explicitly referenced in SRC_URI).
416         if "primaryuri" in restrict:
417                 for filename, uris in filedict.items():
418                         filedict[filename] = primaryuri_dict.get(filename, []) + uris
419         else:
420                 for filename in filedict:
421                         filedict[filename] += primaryuri_dict.get(filename, [])
422
423         return filedict, primaryuri_dict
424
425
426 def fetch(myuris, mysettings, listonly=0, fetchonly=0,
427         locks_in_subdir=".locks", use_locks=1, try_mirrors=1, digests=None,
428         allow_missing_digests=True):
429         "fetch files.  Will use digest file if available."
430
431         if not myuris:
432                 return 1
433
434         features = mysettings.features
435         restrict = mysettings.get("PORTAGE_RESTRICT","").split()
436
437         userfetch = secpass >= 2 and "userfetch" in features
438         userpriv = secpass >= 2 and "userpriv" in features
439
440         # 'nomirror' is bad/negative logic. You Restrict mirroring, not no-mirroring.
441         restrict_mirror = "mirror" in restrict or "nomirror" in restrict
442         if restrict_mirror:
443                 if ("mirror" in features) and ("lmirror" not in features):
444                         # lmirror should allow you to bypass mirror restrictions.
445                         # XXX: This is not a good thing, and is temporary at best.
446                         print(_(">>> \"mirror\" mode desired and \"mirror\" restriction found; skipping fetch."))
447                         return 1
448
449         checksum_failure_max_tries = _get_checksum_failure_max_tries(
450                 settings=mysettings)
451         fetch_resume_size = _get_fetch_resume_size(settings=mysettings)
452
453         # Behave like the package has RESTRICT="primaryuri" after a
454         # couple of checksum failures, to increase the probablility
455         # of success before checksum_failure_max_tries is reached.
456         checksum_failure_primaryuri = 2
457
458         # In the background parallel-fetch process, it's safe to skip checksum
459         # verification of pre-existing files in $DISTDIR that have the correct
460         # file size. The parent process will verify their checksums prior to
461         # the unpack phase.
462
463         parallel_fetchonly = "PORTAGE_PARALLEL_FETCHONLY" in mysettings
464         if parallel_fetchonly:
465                 fetchonly = 1
466
467         check_config_instance(mysettings)
468
469         custommirrors = grabdict(os.path.join(mysettings["PORTAGE_CONFIGROOT"],
470                 CUSTOM_MIRRORS_FILE), recursive=1)
471
472         mymirrors=[]
473
474         if listonly or ("distlocks" not in features):
475                 use_locks = 0
476
477         fetch_to_ro = 0
478         if "skiprocheck" in features:
479                 fetch_to_ro = 1
480
481         if not os.access(mysettings["DISTDIR"],os.W_OK) and fetch_to_ro:
482                 if use_locks:
483                         writemsg(colorize("BAD",
484                                 _("!!! For fetching to a read-only filesystem, "
485                                 "locking should be turned off.\n")), noiselevel=-1)
486                         writemsg(_("!!! This can be done by adding -distlocks to "
487                                 "FEATURES in /etc/portage/make.conf\n"), noiselevel=-1)
488 #                       use_locks = 0
489
490         # local mirrors are always added
491         if "local" in custommirrors:
492                 mymirrors += custommirrors["local"]
493
494         if restrict_mirror:
495                 # We don't add any mirrors.
496                 pass
497         else:
498                 if try_mirrors:
499                         mymirrors += [x.rstrip("/") for x in mysettings["GENTOO_MIRRORS"].split() if x]
500
501         hash_filter = _hash_filter(mysettings.get("PORTAGE_CHECKSUM_FILTER", ""))
502         if hash_filter.transparent:
503                 hash_filter = None
504         skip_manifest = mysettings.get("EBUILD_SKIP_MANIFEST") == "1"
505         if skip_manifest:
506                 allow_missing_digests = True
507         pkgdir = mysettings.get("O")
508         if digests is None and not (pkgdir is None or skip_manifest):
509                 mydigests = mysettings.repositories.get_repo_for_location(
510                         os.path.dirname(os.path.dirname(pkgdir))).load_manifest(
511                         pkgdir, mysettings["DISTDIR"]).getTypeDigests("DIST")
512         elif digests is None or skip_manifest:
513                 # no digests because fetch was not called for a specific package
514                 mydigests = {}
515         else:
516                 mydigests = digests
517
518         ro_distdirs = [x for x in \
519                 shlex_split(mysettings.get("PORTAGE_RO_DISTDIRS", "")) \
520                 if os.path.isdir(x)]
521
522         fsmirrors = []
523         for x in range(len(mymirrors)-1,-1,-1):
524                 if mymirrors[x] and mymirrors[x][0]=='/':
525                         fsmirrors += [mymirrors[x]]
526                         del mymirrors[x]
527
528         restrict_fetch = "fetch" in restrict
529         custom_local_mirrors = custommirrors.get("local", [])
530         if restrict_fetch:
531                 # With fetch restriction, a normal uri may only be fetched from
532                 # custom local mirrors (if available).  A mirror:// uri may also
533                 # be fetched from specific mirrors (effectively overriding fetch
534                 # restriction, but only for specific mirrors).
535                 locations = custom_local_mirrors
536         else:
537                 locations = mymirrors
538
539         filedict, primaryuri_dict = _get_uris(
540                 uris=myuris, settings=mysettings,
541                 custom_mirrors=custommirrors, locations=locations)
542
543         can_fetch=True
544
545         if listonly:
546                 can_fetch = False
547
548         if can_fetch and not fetch_to_ro:
549                 global _userpriv_test_write_file_cache
550                 dirmode  = 0o070
551                 filemode =   0o60
552                 modemask =    0o2
553                 dir_gid = portage_gid
554                 if "FAKED_MODE" in mysettings:
555                         # When inside fakeroot, directories with portage's gid appear
556                         # to have root's gid. Therefore, use root's gid instead of
557                         # portage's gid to avoid spurrious permissions adjustments
558                         # when inside fakeroot.
559                         dir_gid = 0
560                 distdir_dirs = [""]
561                 try:
562                         
563                         for x in distdir_dirs:
564                                 mydir = os.path.join(mysettings["DISTDIR"], x)
565                                 write_test_file = os.path.join(
566                                         mydir, ".__portage_test_write__")
567
568                                 try:
569                                         st = os.stat(mydir)
570                                 except OSError:
571                                         st = None
572
573                                 if st is not None and stat.S_ISDIR(st.st_mode):
574                                         if not (userfetch or userpriv):
575                                                 continue
576                                         if _userpriv_test_write_file(mysettings, write_test_file):
577                                                 continue
578
579                                 _userpriv_test_write_file_cache.pop(write_test_file, None)
580                                 if ensure_dirs(mydir, gid=dir_gid, mode=dirmode, mask=modemask):
581                                         if st is None:
582                                                 # The directory has just been created
583                                                 # and therefore it must be empty.
584                                                 continue
585                                         writemsg(_("Adjusting permissions recursively: '%s'\n") % mydir,
586                                                 noiselevel=-1)
587                                         def onerror(e):
588                                                 raise # bail out on the first error that occurs during recursion
589                                         if not apply_recursive_permissions(mydir,
590                                                 gid=dir_gid, dirmode=dirmode, dirmask=modemask,
591                                                 filemode=filemode, filemask=modemask, onerror=onerror):
592                                                 raise OperationNotPermitted(
593                                                         _("Failed to apply recursive permissions for the portage group."))
594                 except PortageException as e:
595                         if not os.path.isdir(mysettings["DISTDIR"]):
596                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
597                                 writemsg(_("!!! Directory Not Found: DISTDIR='%s'\n") % mysettings["DISTDIR"], noiselevel=-1)
598                                 writemsg(_("!!! Fetching will fail!\n"), noiselevel=-1)
599
600         if can_fetch and \
601                 not fetch_to_ro and \
602                 not os.access(mysettings["DISTDIR"], os.W_OK):
603                 writemsg(_("!!! No write access to '%s'\n") % mysettings["DISTDIR"],
604                         noiselevel=-1)
605                 can_fetch = False
606
607         distdir_writable = can_fetch and not fetch_to_ro
608         failed_files = set()
609         restrict_fetch_msg = False
610
611         for myfile in filedict:
612                 """
613                 fetched  status
614                 0        nonexistent
615                 1        partially downloaded
616                 2        completely downloaded
617                 """
618                 fetched = 0
619
620                 orig_digests = mydigests.get(myfile, {})
621
622                 if not (allow_missing_digests or listonly):
623                         verifiable_hash_types = set(orig_digests).intersection(hashfunc_map)
624                         verifiable_hash_types.discard("size")
625                         if not verifiable_hash_types:
626                                 expected = set(hashfunc_map)
627                                 expected.discard("size")
628                                 expected = " ".join(sorted(expected))
629                                 got = set(orig_digests)
630                                 got.discard("size")
631                                 got = " ".join(sorted(got))
632                                 reason = (_("Insufficient data for checksum verification"),
633                                         got, expected)
634                                 writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,
635                                         noiselevel=-1)
636                                 writemsg(_("!!! Reason: %s\n") % reason[0],
637                                         noiselevel=-1)
638                                 writemsg(_("!!! Got:      %s\n!!! Expected: %s\n") % \
639                                         (reason[1], reason[2]), noiselevel=-1)
640
641                                 if fetchonly:
642                                         failed_files.add(myfile)
643                                         continue
644                                 else:
645                                         return 0
646
647                 size = orig_digests.get("size")
648                 if size == 0:
649                         # Zero-byte distfiles are always invalid, so discard their digests.
650                         del mydigests[myfile]
651                         orig_digests.clear()
652                         size = None
653                 pruned_digests = orig_digests
654                 if parallel_fetchonly:
655                         pruned_digests = {}
656                         if size is not None:
657                                 pruned_digests["size"] = size
658
659                 myfile_path = os.path.join(mysettings["DISTDIR"], myfile)
660                 has_space = True
661                 has_space_superuser = True
662                 file_lock = None
663                 if listonly:
664                         writemsg_stdout("\n", noiselevel=-1)
665                 else:
666                         # check if there is enough space in DISTDIR to completely store myfile
667                         # overestimate the filesize so we aren't bitten by FS overhead
668                         vfs_stat = None
669                         if size is not None and hasattr(os, "statvfs"):
670                                 try:
671                                         vfs_stat = os.statvfs(mysettings["DISTDIR"])
672                                 except OSError as e:
673                                         writemsg_level("!!! statvfs('%s'): %s\n" %
674                                                 (mysettings["DISTDIR"], e),
675                                                 noiselevel=-1, level=logging.ERROR)
676                                         del e
677
678                         if vfs_stat is not None:
679                                 try:
680                                         mysize = os.stat(myfile_path).st_size
681                                 except OSError as e:
682                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
683                                                 raise
684                                         del e
685                                         mysize = 0
686                                 if (size - mysize + vfs_stat.f_bsize) >= \
687                                         (vfs_stat.f_bsize * vfs_stat.f_bavail):
688
689                                         if (size - mysize + vfs_stat.f_bsize) >= \
690                                                 (vfs_stat.f_bsize * vfs_stat.f_bfree):
691                                                 has_space_superuser = False
692
693                                         if not has_space_superuser:
694                                                 has_space = False
695                                         elif secpass < 2:
696                                                 has_space = False
697                                         elif userfetch:
698                                                 has_space = False
699
700                         if distdir_writable and use_locks:
701
702                                 lock_kwargs = {}
703                                 if fetchonly:
704                                         lock_kwargs["flags"] = os.O_NONBLOCK
705
706                                 try:
707                                         file_lock = lockfile(myfile_path,
708                                                 wantnewlockfile=1, **lock_kwargs)
709                                 except TryAgain:
710                                         writemsg(_(">>> File '%s' is already locked by "
711                                                 "another fetcher. Continuing...\n") % myfile,
712                                                 noiselevel=-1)
713                                         continue
714                 try:
715                         if not listonly:
716
717                                 eout = EOutput()
718                                 eout.quiet = mysettings.get("PORTAGE_QUIET") == "1"
719                                 match, mystat = _check_distfile(
720                                         myfile_path, pruned_digests, eout, hash_filter=hash_filter)
721                                 if match:
722                                         # Skip permission adjustment for symlinks, since we don't
723                                         # want to modify anything outside of the primary DISTDIR,
724                                         # and symlinks typically point to PORTAGE_RO_DISTDIRS.
725                                         if distdir_writable and not os.path.islink(myfile_path):
726                                                 try:
727                                                         apply_secpass_permissions(myfile_path,
728                                                                 gid=portage_gid, mode=0o664, mask=0o2,
729                                                                 stat_cached=mystat)
730                                                 except PortageException as e:
731                                                         if not os.access(myfile_path, os.R_OK):
732                                                                 writemsg(_("!!! Failed to adjust permissions:"
733                                                                         " %s\n") % str(e), noiselevel=-1)
734                                                         del e
735                                         continue
736
737                                 if distdir_writable and mystat is None:
738                                         # Remove broken symlinks if necessary.
739                                         try:
740                                                 os.unlink(myfile_path)
741                                         except OSError:
742                                                 pass
743
744                                 if mystat is not None:
745                                         if stat.S_ISDIR(mystat.st_mode):
746                                                 writemsg_level(
747                                                         _("!!! Unable to fetch file since "
748                                                         "a directory is in the way: \n"
749                                                         "!!!   %s\n") % myfile_path,
750                                                         level=logging.ERROR, noiselevel=-1)
751                                                 return 0
752
753                                         if mystat.st_size == 0:
754                                                 if distdir_writable:
755                                                         try:
756                                                                 os.unlink(myfile_path)
757                                                         except OSError:
758                                                                 pass
759                                         elif distdir_writable:
760                                                 if mystat.st_size < fetch_resume_size and \
761                                                         mystat.st_size < size:
762                                                         # If the file already exists and the size does not
763                                                         # match the existing digests, it may be that the
764                                                         # user is attempting to update the digest. In this
765                                                         # case, the digestgen() function will advise the
766                                                         # user to use `ebuild --force foo.ebuild manifest`
767                                                         # in order to force the old digests to be replaced.
768                                                         # Since the user may want to keep this file, rename
769                                                         # it instead of deleting it.
770                                                         writemsg(_(">>> Renaming distfile with size "
771                                                                 "%d (smaller than " "PORTAGE_FETCH_RESU"
772                                                                 "ME_MIN_SIZE)\n") % mystat.st_size)
773                                                         temp_filename = \
774                                                                 _checksum_failure_temp_file(
775                                                                 mysettings["DISTDIR"], myfile)
776                                                         writemsg_stdout(_("Refetching... "
777                                                                 "File renamed to '%s'\n\n") % \
778                                                                 temp_filename, noiselevel=-1)
779                                                 elif mystat.st_size >= size:
780                                                         temp_filename = \
781                                                                 _checksum_failure_temp_file(
782                                                                 mysettings["DISTDIR"], myfile)
783                                                         writemsg_stdout(_("Refetching... "
784                                                                 "File renamed to '%s'\n\n") % \
785                                                                 temp_filename, noiselevel=-1)
786
787                                 if distdir_writable and ro_distdirs:
788                                         readonly_file = None
789                                         for x in ro_distdirs:
790                                                 filename = os.path.join(x, myfile)
791                                                 match, mystat = _check_distfile(
792                                                         filename, pruned_digests, eout, hash_filter=hash_filter)
793                                                 if match:
794                                                         readonly_file = filename
795                                                         break
796                                         if readonly_file is not None:
797                                                 try:
798                                                         os.unlink(myfile_path)
799                                                 except OSError as e:
800                                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
801                                                                 raise
802                                                         del e
803                                                 os.symlink(readonly_file, myfile_path)
804                                                 continue
805
806                                 # this message is shown only after we know that
807                                 # the file is not already fetched
808                                 if not has_space:
809                                         writemsg(_("!!! Insufficient space to store %s in %s\n") % \
810                                                 (myfile, mysettings["DISTDIR"]), noiselevel=-1)
811
812                                         if has_space_superuser:
813                                                 writemsg(_("!!! Insufficient privileges to use "
814                                                         "remaining space.\n"), noiselevel=-1)
815                                                 if userfetch:
816                                                         writemsg(_("!!! You may set FEATURES=\"-userfetch\""
817                                                                 " in /etc/portage/make.conf in order to fetch with\n"
818                                                                 "!!! superuser privileges.\n"), noiselevel=-1)
819
820                                 if fsmirrors and not os.path.exists(myfile_path) and has_space:
821                                         for mydir in fsmirrors:
822                                                 mirror_file = os.path.join(mydir, myfile)
823                                                 try:
824                                                         shutil.copyfile(mirror_file, myfile_path)
825                                                         writemsg(_("Local mirror has file: %s\n") % myfile)
826                                                         break
827                                                 except (IOError, OSError) as e:
828                                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
829                                                                 raise
830                                                         del e
831
832                                 try:
833                                         mystat = os.stat(myfile_path)
834                                 except OSError as e:
835                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
836                                                 raise
837                                         del e
838                                 else:
839                                         # Skip permission adjustment for symlinks, since we don't
840                                         # want to modify anything outside of the primary DISTDIR,
841                                         # and symlinks typically point to PORTAGE_RO_DISTDIRS.
842                                         if not os.path.islink(myfile_path):
843                                                 try:
844                                                         apply_secpass_permissions(myfile_path,
845                                                                 gid=portage_gid, mode=0o664, mask=0o2,
846                                                                 stat_cached=mystat)
847                                                 except PortageException as e:
848                                                         if not os.access(myfile_path, os.R_OK):
849                                                                 writemsg(_("!!! Failed to adjust permissions:"
850                                                                         " %s\n") % (e,), noiselevel=-1)
851
852                                         # If the file is empty then it's obviously invalid. Remove
853                                         # the empty file and try to download if possible.
854                                         if mystat.st_size == 0:
855                                                 if distdir_writable:
856                                                         try:
857                                                                 os.unlink(myfile_path)
858                                                         except EnvironmentError:
859                                                                 pass
860                                         elif myfile not in mydigests:
861                                                 # We don't have a digest, but the file exists.  We must
862                                                 # assume that it is fully downloaded.
863                                                 continue
864                                         else:
865                                                 if mystat.st_size < mydigests[myfile]["size"] and \
866                                                         not restrict_fetch:
867                                                         fetched = 1 # Try to resume this download.
868                                                 elif parallel_fetchonly and \
869                                                         mystat.st_size == mydigests[myfile]["size"]:
870                                                         eout = EOutput()
871                                                         eout.quiet = \
872                                                                 mysettings.get("PORTAGE_QUIET") == "1"
873                                                         eout.ebegin(
874                                                                 "%s size ;-)" % (myfile, ))
875                                                         eout.eend(0)
876                                                         continue
877                                                 else:
878                                                         digests = _filter_unaccelarated_hashes(mydigests[myfile])
879                                                         if hash_filter is not None:
880                                                                 digests = _apply_hash_filter(digests, hash_filter)
881                                                         verified_ok, reason = verify_all(myfile_path, digests)
882                                                         if not verified_ok:
883                                                                 writemsg(_("!!! Previously fetched"
884                                                                         " file: '%s'\n") % myfile, noiselevel=-1)
885                                                                 writemsg(_("!!! Reason: %s\n") % reason[0],
886                                                                         noiselevel=-1)
887                                                                 writemsg(_("!!! Got:      %s\n"
888                                                                         "!!! Expected: %s\n") % \
889                                                                         (reason[1], reason[2]), noiselevel=-1)
890                                                                 if reason[0] == _("Insufficient data for checksum verification"):
891                                                                         return 0
892                                                                 if distdir_writable:
893                                                                         temp_filename = \
894                                                                                 _checksum_failure_temp_file(
895                                                                                 mysettings["DISTDIR"], myfile)
896                                                                         writemsg_stdout(_("Refetching... "
897                                                                                 "File renamed to '%s'\n\n") % \
898                                                                                 temp_filename, noiselevel=-1)
899                                                         else:
900                                                                 eout = EOutput()
901                                                                 eout.quiet = \
902                                                                         mysettings.get("PORTAGE_QUIET", None) == "1"
903                                                                 if digests:
904                                                                         digests = list(digests)
905                                                                         digests.sort()
906                                                                         eout.ebegin(
907                                                                                 "%s %s ;-)" % (myfile, " ".join(digests)))
908                                                                         eout.eend(0)
909                                                                 continue # fetch any remaining files
910
911                         # Create a reversed list since that is optimal for list.pop().
912                         uri_list = filedict[myfile][:]
913                         uri_list.reverse()
914                         checksum_failure_count = 0
915                         tried_locations = set()
916                         while uri_list:
917                                 loc = uri_list.pop()
918                                 # Eliminate duplicates here in case we've switched to
919                                 # "primaryuri" mode on the fly due to a checksum failure.
920                                 if loc in tried_locations:
921                                         continue
922                                 tried_locations.add(loc)
923                                 if listonly:
924                                         writemsg_stdout(loc+" ", noiselevel=-1)
925                                         continue
926                                 # allow different fetchcommands per protocol
927                                 protocol = loc[0:loc.find("://")]
928
929                                 global_config_path = GLOBAL_CONFIG_PATH
930                                 if portage.const.EPREFIX:
931                                         global_config_path = os.path.join(portage.const.EPREFIX,
932                                                         GLOBAL_CONFIG_PATH.lstrip(os.sep))
933
934                                 missing_file_param = False
935                                 fetchcommand_var = "FETCHCOMMAND_" + protocol.upper()
936                                 fetchcommand = mysettings.get(fetchcommand_var)
937                                 if fetchcommand is None:
938                                         fetchcommand_var = "FETCHCOMMAND"
939                                         fetchcommand = mysettings.get(fetchcommand_var)
940                                         if fetchcommand is None:
941                                                 writemsg_level(
942                                                         _("!!! %s is unset. It should "
943                                                         "have been defined in\n!!! %s/make.globals.\n") \
944                                                         % (fetchcommand_var, global_config_path),
945                                                         level=logging.ERROR, noiselevel=-1)
946                                                 return 0
947                                 if "${FILE}" not in fetchcommand:
948                                         writemsg_level(
949                                                 _("!!! %s does not contain the required ${FILE}"
950                                                 " parameter.\n") % fetchcommand_var,
951                                                 level=logging.ERROR, noiselevel=-1)
952                                         missing_file_param = True
953
954                                 resumecommand_var = "RESUMECOMMAND_" + protocol.upper()
955                                 resumecommand = mysettings.get(resumecommand_var)
956                                 if resumecommand is None:
957                                         resumecommand_var = "RESUMECOMMAND"
958                                         resumecommand = mysettings.get(resumecommand_var)
959                                         if resumecommand is None:
960                                                 writemsg_level(
961                                                         _("!!! %s is unset. It should "
962                                                         "have been defined in\n!!! %s/make.globals.\n") \
963                                                         % (resumecommand_var, global_config_path),
964                                                         level=logging.ERROR, noiselevel=-1)
965                                                 return 0
966                                 if "${FILE}" not in resumecommand:
967                                         writemsg_level(
968                                                 _("!!! %s does not contain the required ${FILE}"
969                                                 " parameter.\n") % resumecommand_var,
970                                                 level=logging.ERROR, noiselevel=-1)
971                                         missing_file_param = True
972
973                                 if missing_file_param:
974                                         writemsg_level(
975                                                 _("!!! Refer to the make.conf(5) man page for "
976                                                 "information about how to\n!!! correctly specify "
977                                                 "FETCHCOMMAND and RESUMECOMMAND.\n"),
978                                                 level=logging.ERROR, noiselevel=-1)
979                                         if myfile != os.path.basename(loc):
980                                                 return 0
981
982                                 if not can_fetch:
983                                         if fetched != 2:
984                                                 try:
985                                                         mysize = os.stat(myfile_path).st_size
986                                                 except OSError as e:
987                                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
988                                                                 raise
989                                                         del e
990                                                         mysize = 0
991
992                                                 if mysize == 0:
993                                                         writemsg(_("!!! File %s isn't fetched but unable to get it.\n") % myfile,
994                                                                 noiselevel=-1)
995                                                 elif size is None or size > mysize:
996                                                         writemsg(_("!!! File %s isn't fully fetched, but unable to complete it\n") % myfile,
997                                                                 noiselevel=-1)
998                                                 else:
999                                                         writemsg(_("!!! File %s is incorrect size, "
1000                                                                 "but unable to retry.\n") % myfile, noiselevel=-1)
1001                                                 return 0
1002                                         else:
1003                                                 continue
1004
1005                                 if fetched != 2 and has_space:
1006                                         #we either need to resume or start the download
1007                                         if fetched == 1:
1008                                                 try:
1009                                                         mystat = os.stat(myfile_path)
1010                                                 except OSError as e:
1011                                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
1012                                                                 raise
1013                                                         del e
1014                                                         fetched = 0
1015                                                 else:
1016                                                         if mystat.st_size < fetch_resume_size:
1017                                                                 writemsg(_(">>> Deleting distfile with size "
1018                                                                         "%d (smaller than " "PORTAGE_FETCH_RESU"
1019                                                                         "ME_MIN_SIZE)\n") % mystat.st_size)
1020                                                                 try:
1021                                                                         os.unlink(myfile_path)
1022                                                                 except OSError as e:
1023                                                                         if e.errno not in \
1024                                                                                 (errno.ENOENT, errno.ESTALE):
1025                                                                                 raise
1026                                                                         del e
1027                                                                 fetched = 0
1028                                         if fetched == 1:
1029                                                 #resume mode:
1030                                                 writemsg(_(">>> Resuming download...\n"))
1031                                                 locfetch=resumecommand
1032                                                 command_var = resumecommand_var
1033                                         else:
1034                                                 #normal mode:
1035                                                 locfetch=fetchcommand
1036                                                 command_var = fetchcommand_var
1037                                         writemsg_stdout(_(">>> Downloading '%s'\n") % \
1038                                                 _hide_url_passwd(loc))
1039                                         variables = {
1040                                                 "URI":     loc,
1041                                                 "FILE":    myfile
1042                                         }
1043
1044                                         for k in ("DISTDIR", "PORTAGE_SSH_OPTS"):
1045                                                 try:
1046                                                         variables[k] = mysettings[k]
1047                                                 except KeyError:
1048                                                         pass
1049
1050                                         myfetch = shlex_split(locfetch)
1051                                         myfetch = [varexpand(x, mydict=variables) for x in myfetch]
1052                                         myret = -1
1053                                         try:
1054
1055                                                 myret = _spawn_fetch(mysettings, myfetch)
1056
1057                                         finally:
1058                                                 try:
1059                                                         apply_secpass_permissions(myfile_path,
1060                                                                 gid=portage_gid, mode=0o664, mask=0o2)
1061                                                 except FileNotFound:
1062                                                         pass
1063                                                 except PortageException as e:
1064                                                         if not os.access(myfile_path, os.R_OK):
1065                                                                 writemsg(_("!!! Failed to adjust permissions:"
1066                                                                         " %s\n") % str(e), noiselevel=-1)
1067                                                         del e
1068
1069                                         # If the file is empty then it's obviously invalid.  Don't
1070                                         # trust the return value from the fetcher.  Remove the
1071                                         # empty file and try to download again.
1072                                         try:
1073                                                 if os.stat(myfile_path).st_size == 0:
1074                                                         os.unlink(myfile_path)
1075                                                         fetched = 0
1076                                                         continue
1077                                         except EnvironmentError:
1078                                                 pass
1079
1080                                         if mydigests is not None and myfile in mydigests:
1081                                                 try:
1082                                                         mystat = os.stat(myfile_path)
1083                                                 except OSError as e:
1084                                                         if e.errno not in (errno.ENOENT, errno.ESTALE):
1085                                                                 raise
1086                                                         del e
1087                                                         fetched = 0
1088                                                 else:
1089
1090                                                         if stat.S_ISDIR(mystat.st_mode):
1091                                                                 # This can happen if FETCHCOMMAND erroneously
1092                                                                 # contains wget's -P option where it should
1093                                                                 # instead have -O.
1094                                                                 writemsg_level(
1095                                                                         _("!!! The command specified in the "
1096                                                                         "%s variable appears to have\n!!! "
1097                                                                         "created a directory instead of a "
1098                                                                         "normal file.\n") % command_var,
1099                                                                         level=logging.ERROR, noiselevel=-1)
1100                                                                 writemsg_level(
1101                                                                         _("!!! Refer to the make.conf(5) "
1102                                                                         "man page for information about how "
1103                                                                         "to\n!!! correctly specify "
1104                                                                         "FETCHCOMMAND and RESUMECOMMAND.\n"),
1105                                                                         level=logging.ERROR, noiselevel=-1)
1106                                                                 return 0
1107
1108                                                         # no exception?  file exists. let digestcheck() report
1109                                                         # an appropriately for size or checksum errors
1110
1111                                                         # If the fetcher reported success and the file is
1112                                                         # too small, it's probably because the digest is
1113                                                         # bad (upstream changed the distfile).  In this
1114                                                         # case we don't want to attempt to resume. Show a
1115                                                         # digest verification failure to that the user gets
1116                                                         # a clue about what just happened.
1117                                                         if myret != os.EX_OK and \
1118                                                                 mystat.st_size < mydigests[myfile]["size"]:
1119                                                                 # Fetch failed... Try the next one... Kill 404 files though.
1120                                                                 if (mystat[stat.ST_SIZE]<100000) and (len(myfile)>4) and not ((myfile[-5:]==".html") or (myfile[-4:]==".htm")):
1121                                                                         html404=re.compile("<title>.*(not found|404).*</title>",re.I|re.M)
1122                                                                         with io.open(
1123                                                                                 _unicode_encode(myfile_path,
1124                                                                                 encoding=_encodings['fs'], errors='strict'),
1125                                                                                 mode='r', encoding=_encodings['content'], errors='replace'
1126                                                                                 ) as f:
1127                                                                                 if html404.search(f.read()):
1128                                                                                         try:
1129                                                                                                 os.unlink(mysettings["DISTDIR"]+"/"+myfile)
1130                                                                                                 writemsg(_(">>> Deleting invalid distfile. (Improper 404 redirect from server.)\n"))
1131                                                                                                 fetched = 0
1132                                                                                                 continue
1133                                                                                         except (IOError, OSError):
1134                                                                                                 pass
1135                                                                 fetched = 1
1136                                                                 continue
1137                                                         if True:
1138                                                                 # File is the correct size--check the checksums for the fetched
1139                                                                 # file NOW, for those users who don't have a stable/continuous
1140                                                                 # net connection. This way we have a chance to try to download
1141                                                                 # from another mirror...
1142                                                                 digests = _filter_unaccelarated_hashes(mydigests[myfile])
1143                                                                 if hash_filter is not None:
1144                                                                         digests = _apply_hash_filter(digests, hash_filter)
1145                                                                 verified_ok, reason = verify_all(myfile_path, digests)
1146                                                                 if not verified_ok:
1147                                                                         writemsg(_("!!! Fetched file: %s VERIFY FAILED!\n") % myfile,
1148                                                                                 noiselevel=-1)
1149                                                                         writemsg(_("!!! Reason: %s\n") % reason[0],
1150                                                                                 noiselevel=-1)
1151                                                                         writemsg(_("!!! Got:      %s\n!!! Expected: %s\n") % \
1152                                                                                 (reason[1], reason[2]), noiselevel=-1)
1153                                                                         if reason[0] == _("Insufficient data for checksum verification"):
1154                                                                                 return 0
1155                                                                         temp_filename = \
1156                                                                                 _checksum_failure_temp_file(
1157                                                                                 mysettings["DISTDIR"], myfile)
1158                                                                         writemsg_stdout(_("Refetching... "
1159                                                                                 "File renamed to '%s'\n\n") % \
1160                                                                                 temp_filename, noiselevel=-1)
1161                                                                         fetched=0
1162                                                                         checksum_failure_count += 1
1163                                                                         if checksum_failure_count == \
1164                                                                                 checksum_failure_primaryuri:
1165                                                                                 # Switch to "primaryuri" mode in order
1166                                                                                 # to increase the probablility of
1167                                                                                 # of success.
1168                                                                                 primaryuris = \
1169                                                                                         primaryuri_dict.get(myfile)
1170                                                                                 if primaryuris:
1171                                                                                         uri_list.extend(
1172                                                                                                 reversed(primaryuris))
1173                                                                         if checksum_failure_count >= \
1174                                                                                 checksum_failure_max_tries:
1175                                                                                 break
1176                                                                 else:
1177                                                                         eout = EOutput()
1178                                                                         eout.quiet = mysettings.get("PORTAGE_QUIET", None) == "1"
1179                                                                         if digests:
1180                                                                                 eout.ebegin("%s %s ;-)" % \
1181                                                                                         (myfile, " ".join(sorted(digests))))
1182                                                                                 eout.eend(0)
1183                                                                         fetched=2
1184                                                                         break
1185                                         else:
1186                                                 if not myret:
1187                                                         fetched=2
1188                                                         break
1189                                                 elif mydigests!=None:
1190                                                         writemsg(_("No digest file available and download failed.\n\n"),
1191                                                                 noiselevel=-1)
1192                 finally:
1193                         if use_locks and file_lock:
1194                                 unlockfile(file_lock)
1195                                 file_lock = None
1196
1197                 if listonly:
1198                         writemsg_stdout("\n", noiselevel=-1)
1199                 if fetched != 2:
1200                         if restrict_fetch and not restrict_fetch_msg:
1201                                 restrict_fetch_msg = True
1202                                 msg = _("\n!!! %s/%s"
1203                                         " has fetch restriction turned on.\n"
1204                                         "!!! This probably means that this "
1205                                         "ebuild's files must be downloaded\n"
1206                                         "!!! manually.  See the comments in"
1207                                         " the ebuild for more information.\n\n") % \
1208                                         (mysettings["CATEGORY"], mysettings["PF"])
1209                                 writemsg_level(msg,
1210                                         level=logging.ERROR, noiselevel=-1)
1211                         elif restrict_fetch:
1212                                 pass
1213                         elif listonly:
1214                                 pass
1215                         elif not filedict[myfile]:
1216                                 writemsg(_("Warning: No mirrors available for file"
1217                                         " '%s'\n") % (myfile), noiselevel=-1)
1218                         else:
1219                                 writemsg(_("!!! Couldn't download '%s'. Aborting.\n") % myfile,
1220                                         noiselevel=-1)
1221
1222                         if listonly:
1223                                 failed_files.add(myfile)
1224                                 continue
1225                         elif fetchonly:
1226                                 failed_files.add(myfile)
1227                                 continue
1228                         return 0
1229         if failed_files:
1230                 return 0
1231         return 1