1 # Copyright 1998-2013 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 from __future__ import unicode_literals
6 __all__ = ["bindbapi", "binarytree"]
9 portage.proxy.lazyimport.lazyimport(globals(),
10 'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
11 'verify_all,_apply_hash_filter,_hash_filter',
12 'portage.dbapi.dep_expand:dep_expand',
13 'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list',
14 'portage.output:EOutput,colorize',
15 'portage.locks:lockfile,unlockfile',
16 'portage.package.ebuild.fetch:_check_distfile,_hide_url_passwd',
17 'portage.update:update_dbentries',
18 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
19 'writemsg,writemsg_stdout',
20 'portage.util.listdir:listdir',
21 'portage.util._urlopen:urlopen@_urlopen',
22 'portage.versions:best,catpkgsplit,catsplit,_pkg_str',
25 from portage.cache.mappings import slot_dict_class
26 from portage.const import CACHE_PATH
27 from portage.dbapi.virtual import fakedbapi
28 from portage.dep import Atom, use_reduce, paren_enclose
29 from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \
30 PermissionDenied, PortageException
31 from portage.localization import _
32 from portage import _movefile
33 from portage import os
34 from portage import _encodings
35 from portage import _unicode_decode
36 from portage import _unicode_encode
48 from gzip import GzipFile
49 from itertools import chain
51 from urllib.parse import urlparse
53 from urlparse import urlparse
55 if sys.hexversion >= 0x3000000:
62 class UseCachedCopyOfRemoteIndex(Exception):
63 # If the local copy is recent enough
64 # then fetching the remote index can be skipped.
67 class bindbapi(fakedbapi):
68 _known_keys = frozenset(list(fakedbapi._known_keys) + \
69 ["CHOST", "repository", "USE"])
70 def __init__(self, mybintree=None, **kwargs):
71 fakedbapi.__init__(self, **kwargs)
72 self.bintree = mybintree
73 self.move_ent = mybintree.move_ent
76 # Selectively cache metadata in order to optimize dep matching.
77 self._aux_cache_keys = set(
78 ["BUILD_TIME", "CHOST", "DEPEND", "EAPI",
79 "HDEPEND", "IUSE", "KEYWORDS",
80 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
81 "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES"
83 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
86 def match(self, *pargs, **kwargs):
87 if self.bintree and not self.bintree.populated:
88 self.bintree.populate()
89 return fakedbapi.match(self, *pargs, **kwargs)
91 def cpv_exists(self, cpv, myrepo=None):
92 if self.bintree and not self.bintree.populated:
93 self.bintree.populate()
94 return fakedbapi.cpv_exists(self, cpv)
96 def cpv_inject(self, cpv, **kwargs):
97 self._aux_cache.pop(cpv, None)
98 fakedbapi.cpv_inject(self, cpv, **kwargs)
100 def cpv_remove(self, cpv):
101 self._aux_cache.pop(cpv, None)
102 fakedbapi.cpv_remove(self, cpv)
104 def aux_get(self, mycpv, wants, myrepo=None):
105 if self.bintree and not self.bintree.populated:
106 self.bintree.populate()
108 if not self._known_keys.intersection(
109 wants).difference(self._aux_cache_keys):
110 aux_cache = self._aux_cache.get(mycpv)
111 if aux_cache is not None:
112 return [aux_cache.get(x, "") for x in wants]
114 mysplit = mycpv.split("/")
116 tbz2name = mysplit[1]+".tbz2"
117 if not self.bintree._remotepkgs or \
118 not self.bintree.isremote(mycpv):
119 tbz2_path = self.bintree.getname(mycpv)
120 if not os.path.exists(tbz2_path):
121 raise KeyError(mycpv)
122 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
124 v = metadata_bytes.get(_unicode_encode(k,
125 encoding=_encodings['repo.content'],
126 errors='backslashreplace'))
128 v = _unicode_decode(v,
129 encoding=_encodings['repo.content'], errors='replace')
132 getitem = self.bintree._remotepkgs[mycpv].get
136 mykeys = self._aux_cache_keys.union(wants)
139 # myval is None if the key doesn't exist
140 # or the tbz2 is corrupt.
142 mydata[x] = " ".join(myval.split())
144 if not mydata.setdefault('EAPI', '0'):
148 aux_cache = self._aux_cache_slot_dict()
149 for x in self._aux_cache_keys:
150 aux_cache[x] = mydata.get(x, '')
151 self._aux_cache[mycpv] = aux_cache
152 return [mydata.get(x, '') for x in wants]
154 def aux_update(self, cpv, values):
155 if not self.bintree.populated:
156 self.bintree.populate()
157 tbz2path = self.bintree.getname(cpv)
158 if not os.path.exists(tbz2path):
160 mytbz2 = portage.xpak.tbz2(tbz2path)
161 mydata = mytbz2.get_data()
163 for k, v in values.items():
164 k = _unicode_encode(k,
165 encoding=_encodings['repo.content'], errors='backslashreplace')
166 v = _unicode_encode(v,
167 encoding=_encodings['repo.content'], errors='backslashreplace')
170 for k, v in list(mydata.items()):
173 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
174 # inject will clear stale caches via cpv_inject.
175 self.bintree.inject(cpv)
177 def cp_list(self, *pargs, **kwargs):
178 if not self.bintree.populated:
179 self.bintree.populate()
180 return fakedbapi.cp_list(self, *pargs, **kwargs)
183 if not self.bintree.populated:
184 self.bintree.populate()
185 return fakedbapi.cp_all(self)
188 if not self.bintree.populated:
189 self.bintree.populate()
190 return fakedbapi.cpv_all(self)
192 def getfetchsizes(self, pkg):
194 This will raise MissingSignature if SIZE signature is not available,
195 or InvalidSignature if SIZE signature is invalid.
198 if not self.bintree.populated:
199 self.bintree.populate()
201 pkg = getattr(pkg, 'cpv', pkg)
204 if not self.bintree.isremote(pkg):
207 metadata = self.bintree._remotepkgs[pkg]
209 size = int(metadata["SIZE"])
211 raise portage.exception.MissingSignature("SIZE")
213 raise portage.exception.InvalidSignature(
214 "SIZE: %s" % metadata["SIZE"])
216 filesdict[os.path.basename(self.bintree.getname(pkg))] = size
220 def _pkgindex_cpv_map_latest_build(pkgindex):
222 Given a PackageIndex instance, create a dict of cpv -> metadata map.
223 If multiple packages have identical CPV values, prefer the package
224 with latest BUILD_TIME value.
225 @param pkgindex: A PackageIndex instance.
226 @type pkgindex: PackageIndex
228 @return: a dict containing entry for the give cpv.
232 for d in pkgindex.packages:
238 writemsg(_("!!! Invalid remote binary package: %s\n") % cpv,
242 btime = d.get('BUILD_TIME', '')
248 other_d = cpv_map.get(cpv)
249 if other_d is not None:
250 other_btime = other_d.get('BUILD_TIME', '')
252 other_btime = int(other_btime)
255 if other_btime and (not btime or other_btime > btime):
258 cpv_map[_pkg_str(cpv)] = d
262 class binarytree(object):
263 "this tree scans for a list of all packages available in PKGDIR"
264 def __init__(self, _unused=DeprecationWarning, pkgdir=None,
265 virtual=DeprecationWarning, settings=None):
268 raise TypeError("pkgdir parameter is required")
271 raise TypeError("settings parameter is required")
273 if _unused is not DeprecationWarning:
274 warnings.warn("The first parameter of the "
275 "portage.dbapi.bintree.binarytree"
276 " constructor is now unused. Instead "
277 "settings['ROOT'] is used.",
278 DeprecationWarning, stacklevel=2)
280 if virtual is not DeprecationWarning:
281 warnings.warn("The 'virtual' parameter of the "
282 "portage.dbapi.bintree.binarytree"
283 " constructor is unused",
284 DeprecationWarning, stacklevel=2)
287 self.pkgdir = normalize_path(pkgdir)
288 self.dbapi = bindbapi(self, settings=settings)
289 self.update_ents = self.dbapi.update_ents
290 self.move_slot_ent = self.dbapi.move_slot_ent
293 self._remote_has_index = False
294 self._remotepkgs = None # remote metadata indexed by cpv
296 self.settings = settings
298 self._pkgindex_uri = {}
299 self._populating = False
300 self._all_directory = os.path.isdir(
301 os.path.join(self.pkgdir, "All"))
302 self._pkgindex_version = 0
303 self._pkgindex_hashes = ["MD5","SHA1"]
304 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
305 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
306 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
307 self._pkgindex_aux_keys = \
308 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
309 "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
310 "PROVIDE", "RESTRICT", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
312 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
313 self._pkgindex_use_evaluated_keys = \
314 ("DEPEND", "HDEPEND", "LICENSE", "RDEPEND",
315 "PDEPEND", "PROPERTIES", "PROVIDE", "RESTRICT")
316 self._pkgindex_header_keys = set([
317 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
318 "ACCEPT_PROPERTIES", "ACCEPT_RESTRICT", "CBUILD",
319 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
320 "GENTOO_MIRRORS", "INSTALL_MASK", "IUSE_IMPLICIT", "USE",
321 "USE_EXPAND", "USE_EXPAND_HIDDEN", "USE_EXPAND_IMPLICIT",
322 "USE_EXPAND_UNPREFIXED"])
323 self._pkgindex_default_pkg_data = {
325 "DEFINED_PHASES" : "",
341 self._pkgindex_inherited_keys = ["CHOST", "repository"]
343 # Populate the header with appropriate defaults.
344 self._pkgindex_default_header_data = {
345 "CHOST" : self.settings.get("CHOST", ""),
349 # It is especially important to populate keys like
350 # "repository" that save space when entries can
351 # inherit them from the header. If an existing
352 # pkgindex header already defines these keys, then
353 # they will appropriately override our defaults.
354 main_repo = self.settings.repositories.mainRepo()
355 if main_repo is not None and not main_repo.missing_repo_name:
356 self._pkgindex_default_header_data["repository"] = \
359 self._pkgindex_translated_keys = (
360 ("DESCRIPTION" , "DESC"),
361 ("repository" , "REPO"),
364 self._pkgindex_allowed_pkg_keys = set(chain(
366 self._pkgindex_aux_keys,
367 self._pkgindex_hashes,
368 self._pkgindex_default_pkg_data,
369 self._pkgindex_inherited_keys,
370 chain(*self._pkgindex_translated_keys)
375 warnings.warn("The root attribute of "
376 "portage.dbapi.bintree.binarytree"
377 " is deprecated. Use "
378 "settings['ROOT'] instead.",
379 DeprecationWarning, stacklevel=3)
380 return self.settings['ROOT']
382 def move_ent(self, mylist, repo_match=None):
383 if not self.populated:
388 for atom in (origcp, newcp):
389 if not isjustname(atom):
390 raise InvalidPackageName(str(atom))
391 mynewcat = catsplit(newcp)[0]
392 origmatches=self.dbapi.cp_list(origcp)
396 for mycpv in origmatches:
398 mycpv = self.dbapi._pkg_str(mycpv, None)
399 except (KeyError, InvalidData):
401 mycpv_cp = portage.cpv_getkey(mycpv)
402 if mycpv_cp != origcp:
403 # Ignore PROVIDE virtual match.
405 if repo_match is not None \
406 and not repo_match(mycpv.repo):
409 # Use isvalidatom() to check if this move is valid for the
410 # EAPI (characters allowed in package names may vary).
411 if not isvalidatom(newcp, eapi=mycpv.eapi):
414 mynewcpv = mycpv.replace(mycpv_cp, _unicode(newcp), 1)
415 myoldpkg = catsplit(mycpv)[1]
416 mynewpkg = catsplit(mynewcpv)[1]
418 if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)):
419 writemsg(_("!!! Cannot update binary: Destination exists.\n"),
421 writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1)
424 tbz2path = self.getname(mycpv)
425 if os.path.exists(tbz2path) and not os.access(tbz2path,os.W_OK):
426 writemsg(_("!!! Cannot update readonly binary: %s\n") % mycpv,
431 mytbz2 = portage.xpak.tbz2(tbz2path)
432 mydata = mytbz2.get_data()
433 updated_items = update_dbentries([mylist], mydata, parent=mycpv)
434 mydata.update(updated_items)
436 _unicode_encode(mynewpkg + "\n",
437 encoding=_encodings['repo.content'])
438 mydata[b'CATEGORY'] = \
439 _unicode_encode(mynewcat + "\n",
440 encoding=_encodings['repo.content'])
441 if mynewpkg != myoldpkg:
442 ebuild_data = mydata.pop(_unicode_encode(myoldpkg + '.ebuild',
443 encoding=_encodings['repo.content']), None)
444 if ebuild_data is not None:
445 mydata[_unicode_encode(mynewpkg + '.ebuild',
446 encoding=_encodings['repo.content'])] = ebuild_data
448 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
450 self.dbapi.cpv_remove(mycpv)
451 del self._pkg_paths[mycpv]
452 new_path = self.getname(mynewcpv)
453 self._pkg_paths[mynewcpv] = os.path.join(
454 *new_path.split(os.path.sep)[-2:])
455 if new_path != mytbz2:
456 self._ensure_dir(os.path.dirname(new_path))
457 _movefile(tbz2path, new_path, mysettings=self.settings)
458 self._remove_symlink(mycpv)
459 if new_path.split(os.path.sep)[-2] == "All":
460 self._create_symlink(mynewcpv)
461 self.inject(mynewcpv)
465 def _remove_symlink(self, cpv):
466 """Remove a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink and also remove
467 the ${PKGDIR}/${CATEGORY} directory if empty. The file will not be
468 removed if os.path.islink() returns False."""
469 mycat, mypkg = catsplit(cpv)
470 mylink = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
471 if os.path.islink(mylink):
472 """Only remove it if it's really a link so that this method never
473 removes a real package that was placed here to avoid a collision."""
476 os.rmdir(os.path.join(self.pkgdir, mycat))
478 if e.errno not in (errno.ENOENT,
479 errno.ENOTEMPTY, errno.EEXIST):
483 def _create_symlink(self, cpv):
484 """Create a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink (and
485 ${PKGDIR}/${CATEGORY} directory, if necessary). Any file that may
486 exist in the location of the symlink will first be removed."""
487 mycat, mypkg = catsplit(cpv)
488 full_path = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
489 self._ensure_dir(os.path.dirname(full_path))
493 if e.errno != errno.ENOENT:
496 os.symlink(os.path.join("..", "All", mypkg + ".tbz2"), full_path)
498 def prevent_collision(self, cpv):
499 """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
500 use for a given cpv. If a collision will occur with an existing
501 package from another category, the existing package will be bumped to
502 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
503 if not self._all_directory:
506 # Copy group permissions for new directories that
507 # may have been created.
508 for path in ("All", catsplit(cpv)[0]):
509 path = os.path.join(self.pkgdir, path)
510 self._ensure_dir(path)
511 if not os.access(path, os.W_OK):
512 raise PermissionDenied("access('%s', W_OK)" % path)
514 full_path = self.getname(cpv)
515 if "All" == full_path.split(os.path.sep)[-2]:
517 """Move a colliding package if it exists. Code below this point only
518 executes in rare cases."""
519 mycat, mypkg = catsplit(cpv)
520 myfile = mypkg + ".tbz2"
521 mypath = os.path.join("All", myfile)
522 dest_path = os.path.join(self.pkgdir, mypath)
525 st = os.lstat(dest_path)
529 if stat.S_ISLNK(st.st_mode):
534 if os.path.exists(dest_path):
538 # For invalid packages, other_cat could be None.
539 other_cat = portage.xpak.tbz2(dest_path).getfile(b"CATEGORY")
541 other_cat = _unicode_decode(other_cat,
542 encoding=_encodings['repo.content'], errors='replace')
543 other_cat = other_cat.strip()
544 other_cpv = other_cat + "/" + mypkg
545 self._move_from_all(other_cpv)
546 self.inject(other_cpv)
547 self._move_to_all(cpv)
549 def _ensure_dir(self, path):
551 Create the specified directory. Also, copy gid and group mode
552 bits from self.pkgdir if possible.
553 @param cat_dir: Absolute path of the directory to be created.
554 @type cat_dir: String
557 pkgdir_st = os.stat(self.pkgdir)
561 pkgdir_gid = pkgdir_st.st_gid
562 pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
564 ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
565 except PortageException:
566 if not os.path.isdir(path):
569 def _file_permissions(self, path):
571 pkgdir_st = os.stat(self.pkgdir)
575 pkgdir_gid = pkgdir_st.st_gid
576 pkgdir_grp_mode = 0o0060 & pkgdir_st.st_mode
578 portage.util.apply_permissions(path, gid=pkgdir_gid,
579 mode=pkgdir_grp_mode, mask=0)
580 except PortageException:
583 def _move_to_all(self, cpv):
584 """If the file exists, move it. Whether or not it exists, update state
585 for future getname() calls."""
586 mycat, mypkg = catsplit(cpv)
587 myfile = mypkg + ".tbz2"
588 self._pkg_paths[cpv] = os.path.join("All", myfile)
589 src_path = os.path.join(self.pkgdir, mycat, myfile)
591 mystat = os.lstat(src_path)
594 if mystat and stat.S_ISREG(mystat.st_mode):
595 self._ensure_dir(os.path.join(self.pkgdir, "All"))
596 dest_path = os.path.join(self.pkgdir, "All", myfile)
597 _movefile(src_path, dest_path, mysettings=self.settings)
598 self._create_symlink(cpv)
601 def _move_from_all(self, cpv):
602 """Move a package from ${PKGDIR}/All/${PF}.tbz2 to
603 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 and update state from getname calls."""
604 self._remove_symlink(cpv)
605 mycat, mypkg = catsplit(cpv)
606 myfile = mypkg + ".tbz2"
607 mypath = os.path.join(mycat, myfile)
608 dest_path = os.path.join(self.pkgdir, mypath)
609 self._ensure_dir(os.path.dirname(dest_path))
610 src_path = os.path.join(self.pkgdir, "All", myfile)
611 _movefile(src_path, dest_path, mysettings=self.settings)
612 self._pkg_paths[cpv] = mypath
614 def populate(self, getbinpkgs=0):
615 "populates the binarytree"
622 if os.access(self.pkgdir, os.W_OK):
623 pkgindex_lock = lockfile(self._pkgindex_file,
625 self._populating = True
626 self._populate(getbinpkgs)
629 unlockfile(pkgindex_lock)
630 self._populating = False
632 def _populate(self, getbinpkgs=0):
633 if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
636 # Clear all caches in case populate is called multiple times
637 # as may be the case when _global_updates calls populate()
638 # prior to performing package moves since it only wants to
639 # operate on local packages (getbinpkgs=0).
640 self._remotepkgs = None
641 self.dbapi._clear_cache()
642 self.dbapi._aux_cache.clear()
645 self._pkg_paths = pkg_paths
646 dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
650 dirs.insert(0, "All")
651 pkgindex = self._load_pkgindex()
653 if not self._pkgindex_version_supported(pkgindex):
654 pkgindex = self._new_pkgindex()
655 header = pkgindex.header
657 for d in pkgindex.packages:
658 metadata[d["CPV"]] = d
659 update_pkgindex = False
661 for myfile in listdir(os.path.join(self.pkgdir, mydir)):
662 if not myfile.endswith(".tbz2"):
664 mypath = os.path.join(mydir, myfile)
665 full_path = os.path.join(self.pkgdir, mypath)
666 s = os.lstat(full_path)
667 if stat.S_ISLNK(s.st_mode):
670 # Validate data from the package index and try to avoid
671 # reading the xpak if possible.
674 d = metadata.get(mydir+"/"+myfile[:-5])
680 for mycpv in metadata:
681 mycat, mypf = catsplit(mycpv)
683 mypf, []).append(metadata[mycpv])
684 possibilities = pf_index.get(myfile[:-5])
687 for d in possibilities:
689 if long(d["MTIME"]) != s[stat.ST_MTIME]:
691 except (KeyError, ValueError):
694 if long(d["SIZE"]) != long(s.st_size):
696 except (KeyError, ValueError):
698 if not self._pkgindex_keys.difference(d):
703 if mycpv in pkg_paths:
704 # discard duplicates (All/ is preferred)
706 mycpv = _pkg_str(mycpv)
707 pkg_paths[mycpv] = mypath
708 # update the path if the package has been moved
709 oldpath = d.get("PATH")
710 if oldpath and oldpath != mypath:
711 update_pkgindex = True
712 if mypath != mycpv + ".tbz2":
715 update_pkgindex = True
719 update_pkgindex = True
720 self.dbapi.cpv_inject(mycpv)
721 if not self.dbapi._aux_cache_keys.difference(d):
722 aux_cache = self.dbapi._aux_cache_slot_dict()
723 for k in self.dbapi._aux_cache_keys:
725 self.dbapi._aux_cache[mycpv] = aux_cache
727 if not os.access(full_path, os.R_OK):
728 writemsg(_("!!! Permission denied to read " \
729 "binary package: '%s'\n") % full_path,
731 self.invalids.append(myfile[:-5])
733 metadata_bytes = portage.xpak.tbz2(full_path).get_data()
734 mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
735 encoding=_encodings['repo.content'], errors='replace')
736 mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
737 encoding=_encodings['repo.content'], errors='replace')
738 slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
739 encoding=_encodings['repo.content'], errors='replace')
741 if not mycat or not mypf or not slot:
742 #old-style or corrupt package
743 writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
747 missing_keys.append("CATEGORY")
749 missing_keys.append("PF")
751 missing_keys.append("SLOT")
755 msg.append(_("Missing metadata key(s): %s.") % \
756 ", ".join(missing_keys))
757 msg.append(_(" This binary package is not " \
758 "recoverable and should be deleted."))
759 for line in textwrap.wrap("".join(msg), 72):
760 writemsg("!!! %s\n" % line, noiselevel=-1)
761 self.invalids.append(mypkg)
763 mycat = mycat.strip()
765 if mycat != mydir and mydir != "All":
767 if mypkg != mypf.strip():
769 mycpv = mycat + "/" + mypkg
770 if mycpv in pkg_paths:
771 # All is first, so it's preferred.
773 if not self.dbapi._category_re.match(mycat):
774 writemsg(_("!!! Binary package has an " \
775 "unrecognized category: '%s'\n") % full_path,
777 writemsg(_("!!! '%s' has a category that is not" \
778 " listed in %setc/portage/categories\n") % \
779 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
782 mycpv = _pkg_str(mycpv)
783 pkg_paths[mycpv] = mypath
784 self.dbapi.cpv_inject(mycpv)
785 update_pkgindex = True
786 d = metadata.get(mycpv, {})
789 if long(d["MTIME"]) != s[stat.ST_MTIME]:
791 except (KeyError, ValueError):
795 if long(d["SIZE"]) != long(s.st_size):
797 except (KeyError, ValueError):
802 d["MTIME"] = str(s[stat.ST_MTIME])
803 d["SIZE"] = str(s.st_size)
805 d.update(zip(self._pkgindex_aux_keys,
806 self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
808 self._eval_use_flags(mycpv, d)
809 except portage.exception.InvalidDependString:
810 writemsg(_("!!! Invalid binary package: '%s'\n") % \
811 self.getname(mycpv), noiselevel=-1)
812 self.dbapi.cpv_remove(mycpv)
815 # record location if it's non-default
816 if mypath != mycpv + ".tbz2":
821 if not self.dbapi._aux_cache_keys.difference(d):
822 aux_cache = self.dbapi._aux_cache_slot_dict()
823 for k in self.dbapi._aux_cache_keys:
825 self.dbapi._aux_cache[mycpv] = aux_cache
827 for cpv in list(metadata):
828 if cpv not in pkg_paths:
831 # Do not bother to write the Packages index if $PKGDIR/All/ exists
832 # since it will provide no benefit due to the need to read CATEGORY
834 if update_pkgindex and os.access(self.pkgdir, os.W_OK):
835 del pkgindex.packages[:]
836 pkgindex.packages.extend(iter(metadata.values()))
837 self._update_pkgindex_header(pkgindex.header)
838 self._pkgindex_write(pkgindex)
840 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
841 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
844 if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
847 self._remotepkgs = {}
848 for base_url in self.settings["PORTAGE_BINHOST"].split():
849 parsed_url = urlparse(base_url)
850 host = parsed_url.netloc
851 port = parsed_url.port
856 user, host = host.split("@", 1)
857 user_passwd = user + "@"
859 user, passwd = user.split(":", 1)
862 port_str = ":%s" % (port,)
863 if host.endswith(port_str):
864 host = host[:-len(port_str)]
865 pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
866 host, parsed_url.path.lstrip("/"), "Packages")
867 pkgindex = self._new_pkgindex()
869 f = io.open(_unicode_encode(pkgindex_file,
870 encoding=_encodings['fs'], errors='strict'),
871 mode='r', encoding=_encodings['repo.content'],
877 except EnvironmentError as e:
878 if e.errno != errno.ENOENT:
880 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
881 remote_timestamp = None
882 rmt_idx = self._new_pkgindex()
886 # urlparse.urljoin() only works correctly with recognized
887 # protocols and requires the base url to have a trailing
888 # slash, so join manually...
889 url = base_url.rstrip("/") + "/Packages"
892 # Don't use urlopen for https, since it doesn't support
893 # certificate/hostname verification (bug #469888).
894 if parsed_url.scheme not in ('https',):
896 f = _urlopen(url, if_modified_since=local_timestamp)
897 if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
898 remote_timestamp = f.headers.get('timestamp')
899 except IOError as err:
900 if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
901 raise UseCachedCopyOfRemoteIndex()
903 if parsed_url.scheme in ('ftp', 'http', 'https'):
904 # This protocol is supposedly supported by urlopen,
905 # so apparently there's a problem with the url
906 # or a bug in urlopen.
907 if self.settings.get("PORTAGE_DEBUG", "0") != "0":
908 traceback.print_exc()
914 path = parsed_url.path.rstrip("/") + "/Packages"
916 if parsed_url.scheme == 'ssh':
917 # Use a pipe so that we can terminate the download
918 # early if we detect that the TIMESTAMP header
919 # matches that of the cached Packages file.
922 ssh_args.append("-p%s" % (port,))
923 # NOTE: shlex evaluates embedded quotes
924 ssh_args.extend(portage.util.shlex_split(
925 self.settings.get("PORTAGE_SSH_OPTS", "")))
926 ssh_args.append(user_passwd + host)
927 ssh_args.append('--')
928 ssh_args.append('cat')
929 ssh_args.append(path)
931 proc = subprocess.Popen(ssh_args,
932 stdout=subprocess.PIPE)
935 setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
936 fcmd = self.settings.get(setting)
938 fcmd = self.settings.get('FETCHCOMMAND')
940 raise EnvironmentError("FETCHCOMMAND is unset")
942 fd, tmp_filename = tempfile.mkstemp()
943 tmp_dirname, tmp_basename = os.path.split(tmp_filename)
947 "DISTDIR": tmp_dirname,
948 "FILE": tmp_basename,
952 for k in ("PORTAGE_SSH_OPTS",):
954 fcmd_vars[k] = self.settings[k]
958 success = portage.getbinpkg.file_get(
959 fcmd=fcmd, fcmd_vars=fcmd_vars)
961 raise EnvironmentError("%s failed" % (setting,))
962 f = open(tmp_filename, 'rb')
964 f_dec = codecs.iterdecode(f,
965 _encodings['repo.content'], errors='replace')
967 rmt_idx.readHeader(f_dec)
968 if not remote_timestamp: # in case it had not been read from HTTP header
969 remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
970 if not remote_timestamp:
971 # no timestamp in the header, something's wrong
973 writemsg(_("\n\n!!! Binhost package index " \
974 " has no TIMESTAMP field.\n"), noiselevel=-1)
976 if not self._pkgindex_version_supported(rmt_idx):
977 writemsg(_("\n\n!!! Binhost package index version" \
978 " is not supported: '%s'\n") % \
979 rmt_idx.header.get("VERSION"), noiselevel=-1)
981 elif local_timestamp != remote_timestamp:
982 rmt_idx.readBody(f_dec)
985 # Timeout after 5 seconds, in case close() blocks
986 # indefinitely (see bug #350139).
989 AlarmSignal.register(5)
992 AlarmSignal.unregister()
994 writemsg("\n\n!!! %s\n" % \
995 _("Timed out while closing connection to binhost"),
997 except UseCachedCopyOfRemoteIndex:
998 writemsg_stdout("\n")
1000 colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
1003 except EnvironmentError as e:
1004 writemsg(_("\n\n!!! Error fetching binhost package" \
1005 " info from '%s'\n") % _hide_url_passwd(base_url))
1006 writemsg("!!! %s\n\n" % str(e))
1009 if proc is not None:
1010 if proc.poll() is None:
1014 if tmp_filename is not None:
1016 os.unlink(tmp_filename)
1019 if pkgindex is rmt_idx:
1020 pkgindex.modified = False # don't update the header
1022 ensure_dirs(os.path.dirname(pkgindex_file))
1023 f = atomic_ofstream(pkgindex_file)
1026 except (IOError, PortageException):
1027 if os.access(os.path.dirname(pkgindex_file), os.W_OK):
1029 # The current user doesn't have permission to cache the
1030 # file, but that's alright.
1032 # Organize remote package list as a cpv -> metadata map.
1033 remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
1034 remote_base_uri = pkgindex.header.get("URI", base_url)
1035 for cpv, remote_metadata in remotepkgs.items():
1036 remote_metadata["BASE_URI"] = remote_base_uri
1037 self._pkgindex_uri[cpv] = url
1038 self._remotepkgs.update(remotepkgs)
1039 self._remote_has_index = True
1040 for cpv in remotepkgs:
1041 self.dbapi.cpv_inject(cpv)
1043 # Remote package instances override local package
1044 # if they are not identical.
1045 hash_names = ["SIZE"] + self._pkgindex_hashes
1046 for cpv, local_metadata in metadata.items():
1047 remote_metadata = self._remotepkgs.get(cpv)
1048 if remote_metadata is None:
1050 # Use digests to compare identity.
1052 for hash_name in hash_names:
1053 local_value = local_metadata.get(hash_name)
1054 if local_value is None:
1056 remote_value = remote_metadata.get(hash_name)
1057 if remote_value is None:
1059 if local_value != remote_value:
1063 del self._remotepkgs[cpv]
1065 # Override the local package in the aux_get cache.
1066 self.dbapi._aux_cache[cpv] = remote_metadata
1068 # Local package instances override remote instances.
1069 for cpv in metadata:
1070 self._remotepkgs.pop(cpv, None)
1074 def inject(self, cpv, filename=None):
1075 """Add a freshly built package to the database. This updates
1076 $PKGDIR/Packages with the new package metadata (including MD5).
1077 @param cpv: The cpv of the new package to inject
1079 @param filename: File path of the package to inject, or None if it's
1080 already in the location returned by getname()
1081 @type filename: string
1084 mycat, mypkg = catsplit(cpv)
1085 if not self.populated:
1087 if filename is None:
1088 full_path = self.getname(cpv)
1090 full_path = filename
1092 s = os.stat(full_path)
1093 except OSError as e:
1094 if e.errno != errno.ENOENT:
1097 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
1100 mytbz2 = portage.xpak.tbz2(full_path)
1101 slot = mytbz2.getfile("SLOT")
1103 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
1107 self.dbapi.cpv_inject(cpv)
1109 # Reread the Packages index (in case it's been changed by another
1110 # process) and then updated it, all while holding a lock.
1111 pkgindex_lock = None
1112 created_symlink = False
1114 pkgindex_lock = lockfile(self._pkgindex_file,
1116 if filename is not None:
1117 new_filename = self.getname(cpv)
1119 samefile = os.path.samefile(filename, new_filename)
1123 self._ensure_dir(os.path.dirname(new_filename))
1124 _movefile(filename, new_filename, mysettings=self.settings)
1125 full_path = new_filename
1127 self._file_permissions(full_path)
1129 if self._all_directory and \
1130 self.getname(cpv).split(os.path.sep)[-2] == "All":
1131 self._create_symlink(cpv)
1132 created_symlink = True
1133 pkgindex = self._load_pkgindex()
1135 if not self._pkgindex_version_supported(pkgindex):
1136 pkgindex = self._new_pkgindex()
1138 # Discard remote metadata to ensure that _pkgindex_entry
1139 # gets the local metadata. This also updates state for future
1141 if self._remotepkgs is not None:
1142 self._remotepkgs.pop(cpv, None)
1144 # Discard cached metadata to ensure that _pkgindex_entry
1145 # doesn't return stale metadata.
1146 self.dbapi._aux_cache.pop(cpv, None)
1149 d = self._pkgindex_entry(cpv)
1150 except portage.exception.InvalidDependString:
1151 writemsg(_("!!! Invalid binary package: '%s'\n") % \
1152 self.getname(cpv), noiselevel=-1)
1153 self.dbapi.cpv_remove(cpv)
1154 del self._pkg_paths[cpv]
1157 # If found, remove package(s) with duplicate path.
1158 path = d.get("PATH", "")
1159 for i in range(len(pkgindex.packages) - 1, -1, -1):
1160 d2 = pkgindex.packages[i]
1161 if path and path == d2.get("PATH"):
1162 # Handle path collisions in $PKGDIR/All
1163 # when CPV is not identical.
1164 del pkgindex.packages[i]
1165 elif cpv == d2.get("CPV"):
1166 if path == d2.get("PATH", ""):
1167 del pkgindex.packages[i]
1168 elif created_symlink and not d2.get("PATH", ""):
1169 # Delete entry for the package that was just
1170 # overwritten by a symlink to this package.
1171 del pkgindex.packages[i]
1173 pkgindex.packages.append(d)
1175 self._update_pkgindex_header(pkgindex.header)
1176 self._pkgindex_write(pkgindex)
1180 unlockfile(pkgindex_lock)
1182 def _pkgindex_write(self, pkgindex):
1183 contents = codecs.getwriter(_encodings['repo.content'])(io.BytesIO())
1184 pkgindex.write(contents)
1185 contents = contents.getvalue()
1186 atime = mtime = long(pkgindex.header["TIMESTAMP"])
1187 output_files = [(atomic_ofstream(self._pkgindex_file, mode="wb"),
1188 self._pkgindex_file, None)]
1190 if "compress-index" in self.settings.features:
1191 gz_fname = self._pkgindex_file + ".gz"
1192 fileobj = atomic_ofstream(gz_fname, mode="wb")
1193 output_files.append((GzipFile(filename='', mode="wb",
1194 fileobj=fileobj, mtime=mtime), gz_fname, fileobj))
1196 for f, fname, f_close in output_files:
1199 if f_close is not None:
1201 self._file_permissions(fname)
1202 # some seconds might have elapsed since TIMESTAMP
1203 os.utime(fname, (atime, mtime))
1205 def _pkgindex_entry(self, cpv):
1207 Performs checksums and evaluates USE flag conditionals.
1208 Raises InvalidDependString if necessary.
1210 @return: a dict containing entry for the give cpv.
1213 pkg_path = self.getname(cpv)
1215 d = dict(zip(self._pkgindex_aux_keys,
1216 self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1218 d.update(perform_multiple_checksums(
1219 pkg_path, hashes=self._pkgindex_hashes))
1222 st = os.stat(pkg_path)
1223 d["MTIME"] = str(st[stat.ST_MTIME])
1224 d["SIZE"] = str(st.st_size)
1226 rel_path = self._pkg_paths[cpv]
1227 # record location if it's non-default
1228 if rel_path != cpv + ".tbz2":
1229 d["PATH"] = rel_path
1231 self._eval_use_flags(cpv, d)
1234 def _new_pkgindex(self):
1235 return portage.getbinpkg.PackageIndex(
1236 allowed_pkg_keys=self._pkgindex_allowed_pkg_keys,
1237 default_header_data=self._pkgindex_default_header_data,
1238 default_pkg_data=self._pkgindex_default_pkg_data,
1239 inherited_keys=self._pkgindex_inherited_keys,
1240 translated_keys=self._pkgindex_translated_keys)
1242 def _update_pkgindex_header(self, header):
1243 portdir = normalize_path(os.path.realpath(self.settings["PORTDIR"]))
1244 profiles_base = os.path.join(portdir, "profiles") + os.path.sep
1245 if self.settings.profile_path:
1246 profile_path = normalize_path(
1247 os.path.realpath(self.settings.profile_path))
1248 if profile_path.startswith(profiles_base):
1249 profile_path = profile_path[len(profiles_base):]
1250 header["PROFILE"] = profile_path
1251 header["VERSION"] = str(self._pkgindex_version)
1252 base_uri = self.settings.get("PORTAGE_BINHOST_HEADER_URI")
1254 header["URI"] = base_uri
1256 header.pop("URI", None)
1257 for k in self._pkgindex_header_keys:
1258 v = self.settings.get(k, None)
1264 # These values may be useful for using a binhost without
1265 # having a local copy of the profile (bug #470006).
1266 for k in self.settings.get("USE_EXPAND_IMPLICIT", "").split():
1267 k = "USE_EXPAND_VALUES_" + k
1268 v = self.settings.get(k)
1274 def _pkgindex_version_supported(self, pkgindex):
1275 version = pkgindex.header.get("VERSION")
1278 if int(version) <= self._pkgindex_version:
1284 def _eval_use_flags(self, cpv, metadata):
1285 use = frozenset(metadata["USE"].split())
1287 iuse = set(f.lstrip("-+") for f in metadata["IUSE"].split())
1288 use = [f for f in use if f in iuse]
1290 metadata["USE"] = " ".join(use)
1291 for k in self._pkgindex_use_evaluated_keys:
1292 if k.endswith('DEPEND'):
1299 deps = use_reduce(deps, uselist=raw_use, token_class=token_class)
1300 deps = paren_enclose(deps)
1301 except portage.exception.InvalidDependString as e:
1302 writemsg("%s: %s\n" % (k, str(e)),
1307 def exists_specific(self, cpv):
1308 if not self.populated:
1310 return self.dbapi.match(
1311 dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings))
1313 def dep_bestmatch(self, mydep):
1314 "compatibility method -- all matches, not just visible ones"
1315 if not self.populated:
1318 writemsg("mydep: %s\n" % mydep, 1)
1319 mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
1320 writemsg("mydep: %s\n" % mydep, 1)
1321 mykey = dep_getkey(mydep)
1322 writemsg("mykey: %s\n" % mykey, 1)
1323 mymatch = best(match_from_list(mydep,self.dbapi.cp_list(mykey)))
1324 writemsg("mymatch: %s\n" % mymatch, 1)
1329 def getname(self, pkgname):
1330 """Returns a file location for this package. The default location is
1331 ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1332 in the rare event of a collision. The prevent_collision() method can
1333 be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1335 if not self.populated:
1338 mypath = self._pkg_paths.get(mycpv, None)
1340 return os.path.join(self.pkgdir, mypath)
1341 mycat, mypkg = catsplit(mycpv)
1342 if self._all_directory:
1343 mypath = os.path.join("All", mypkg + ".tbz2")
1344 if mypath in self._pkg_paths.values():
1345 mypath = os.path.join(mycat, mypkg + ".tbz2")
1347 mypath = os.path.join(mycat, mypkg + ".tbz2")
1348 self._pkg_paths[mycpv] = mypath # cache for future lookups
1349 return os.path.join(self.pkgdir, mypath)
1351 def isremote(self, pkgname):
1352 """Returns true if the package is kept remotely and it has not been
1353 downloaded (or it is only partially downloaded)."""
1354 if self._remotepkgs is None or pkgname not in self._remotepkgs:
1356 # Presence in self._remotepkgs implies that it's remote. When a
1357 # package is downloaded, state is updated by self.inject().
1360 def get_pkgindex_uri(self, pkgname):
1361 """Returns the URI to the Packages file for a given package."""
1362 return self._pkgindex_uri.get(pkgname)
1366 def gettbz2(self, pkgname):
1367 """Fetches the package from a remote site, if necessary. Attempts to
1368 resume if the file appears to be partially downloaded."""
1369 tbz2_path = self.getname(pkgname)
1370 tbz2name = os.path.basename(tbz2_path)
1372 if os.path.exists(tbz2_path):
1373 if tbz2name[:-5] not in self.invalids:
1377 writemsg(_("Resuming download of this tbz2, but it is possible that it is corrupt.\n"),
1380 mydest = os.path.dirname(self.getname(pkgname))
1381 self._ensure_dir(mydest)
1382 # urljoin doesn't work correctly with unrecognized protocols like sftp
1383 if self._remote_has_index:
1384 rel_url = self._remotepkgs[pkgname].get("PATH")
1386 rel_url = pkgname+".tbz2"
1387 remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"]
1388 url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1390 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1391 protocol = urlparse(url)[0]
1392 fcmd_prefix = "FETCHCOMMAND"
1394 fcmd_prefix = "RESUMECOMMAND"
1395 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper())
1397 fcmd = self.settings.get(fcmd_prefix)
1398 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd)
1401 os.unlink(self.getname(pkgname))
1404 raise portage.exception.FileNotFound(mydest)
1405 self.inject(pkgname)
1407 def _load_pkgindex(self):
1408 pkgindex = self._new_pkgindex()
1410 f = io.open(_unicode_encode(self._pkgindex_file,
1411 encoding=_encodings['fs'], errors='strict'),
1412 mode='r', encoding=_encodings['repo.content'],
1414 except EnvironmentError:
1423 def _get_digests(self, pkg):
1427 except AttributeError:
1432 if self._remotepkgs is None or cpv not in self._remotepkgs:
1433 for d in self._load_pkgindex().packages:
1438 metadata = self._remotepkgs[cpv]
1439 if metadata is None:
1442 for k in hashfunc_map:
1448 if "SIZE" in metadata:
1450 digests["size"] = int(metadata["SIZE"])
1452 writemsg(_("!!! Malformed SIZE attribute in remote " \
1453 "metadata for '%s'\n") % cpv)
1457 def digestCheck(self, pkg):
1459 Verify digests for the given package and raise DigestException
1460 if verification fails.
1462 @return: True if digests could be located, False otherwise.
1465 digests = self._get_digests(pkg)
1472 except AttributeError:
1475 pkg_path = self.getname(cpv)
1476 hash_filter = _hash_filter(
1477 self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
1478 if not hash_filter.transparent:
1479 digests = _apply_hash_filter(digests, hash_filter)
1481 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
1482 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
1484 ok, reason = verify_all(pkg_path, digests)
1486 raise portage.exception.DigestException(
1487 (pkg_path,) + tuple(reason))
1491 def getslot(self, mycatpkg):
1492 "Get a slot for a catpkg; assume it exists."
1495 myslot = self.dbapi._pkg_str(mycatpkg, None).slot