1 # Copyright 1998-2012 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 __all__ = ["bindbapi", "binarytree"]
7 portage.proxy.lazyimport.lazyimport(globals(),
8 'portage.checksum:hashfunc_map,perform_multiple_checksums,verify_all',
9 'portage.dbapi.dep_expand:dep_expand',
10 'portage.dep:dep_getkey,isjustname,match_from_list',
11 'portage.output:EOutput,colorize',
12 'portage.locks:lockfile,unlockfile',
13 'portage.package.ebuild.doebuild:_vdb_use_conditional_atoms',
14 'portage.package.ebuild.fetch:_check_distfile',
15 'portage.update:update_dbentries',
16 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
17 'writemsg,writemsg_stdout',
18 'portage.util.listdir:listdir',
19 'portage.versions:best,catpkgsplit,catsplit,_pkg_str',
22 from portage.cache.mappings import slot_dict_class
23 from portage.const import CACHE_PATH
24 from portage.dbapi.virtual import fakedbapi
25 from portage.dep import Atom, use_reduce, paren_enclose
26 from portage.exception import AlarmSignal, InvalidPackageName, \
27 PermissionDenied, PortageException
28 from portage.localization import _
29 from portage.util import urlopen
30 from portage import _movefile
31 from portage import os
32 from portage import _encodings
33 from portage import _unicode_decode
34 from portage import _unicode_encode
46 from itertools import chain
48 from urllib.parse import urlparse
50 from urlparse import urlparse
52 if sys.hexversion >= 0x3000000:
56 class bindbapi(fakedbapi):
57 _known_keys = frozenset(list(fakedbapi._known_keys) + \
58 ["CHOST", "repository", "USE"])
59 def __init__(self, mybintree=None, **kwargs):
60 fakedbapi.__init__(self, **kwargs)
61 self.bintree = mybintree
62 self.move_ent = mybintree.move_ent
65 # Selectively cache metadata in order to optimize dep matching.
66 self._aux_cache_keys = set(
67 ["BUILD_TIME", "CHOST", "DEPEND", "EAPI", "IUSE", "KEYWORDS",
68 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
69 "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES",
71 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
74 def match(self, *pargs, **kwargs):
75 if self.bintree and not self.bintree.populated:
76 self.bintree.populate()
77 return fakedbapi.match(self, *pargs, **kwargs)
79 def cpv_exists(self, cpv, myrepo=None):
80 if self.bintree and not self.bintree.populated:
81 self.bintree.populate()
82 return fakedbapi.cpv_exists(self, cpv)
84 def cpv_inject(self, cpv, **kwargs):
85 self._aux_cache.pop(cpv, None)
86 fakedbapi.cpv_inject(self, cpv, **kwargs)
88 def cpv_remove(self, cpv):
89 self._aux_cache.pop(cpv, None)
90 fakedbapi.cpv_remove(self, cpv)
92 def aux_get(self, mycpv, wants, myrepo=None):
93 if self.bintree and not self.bintree.populated:
94 self.bintree.populate()
96 if not self._known_keys.intersection(
97 wants).difference(self._aux_cache_keys):
98 aux_cache = self._aux_cache.get(mycpv)
99 if aux_cache is not None:
100 return [aux_cache.get(x, "") for x in wants]
102 mysplit = mycpv.split("/")
104 tbz2name = mysplit[1]+".tbz2"
105 if not self.bintree._remotepkgs or \
106 not self.bintree.isremote(mycpv):
107 tbz2_path = self.bintree.getname(mycpv)
108 if not os.path.exists(tbz2_path):
109 raise KeyError(mycpv)
110 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
112 v = metadata_bytes.get(_unicode_encode(k,
113 encoding=_encodings['repo.content'],
114 errors='backslashreplace'))
116 v = _unicode_decode(v,
117 encoding=_encodings['repo.content'], errors='replace')
120 getitem = self.bintree._remotepkgs[mycpv].get
124 mykeys = self._aux_cache_keys.union(wants)
127 # myval is None if the key doesn't exist
128 # or the tbz2 is corrupt.
130 mydata[x] = " ".join(myval.split())
132 if not mydata.setdefault('EAPI', _unicode_decode('0')):
133 mydata['EAPI'] = _unicode_decode('0')
136 aux_cache = self._aux_cache_slot_dict()
137 for x in self._aux_cache_keys:
138 aux_cache[x] = mydata.get(x, _unicode_decode(''))
139 self._aux_cache[mycpv] = aux_cache
140 return [mydata.get(x, _unicode_decode('')) for x in wants]
142 def aux_update(self, cpv, values):
143 if not self.bintree.populated:
144 self.bintree.populate()
145 tbz2path = self.bintree.getname(cpv)
146 if not os.path.exists(tbz2path):
148 mytbz2 = portage.xpak.tbz2(tbz2path)
149 mydata = mytbz2.get_data()
151 for k, v in values.items():
152 k = _unicode_encode(k,
153 encoding=_encodings['repo.content'], errors='backslashreplace')
154 v = _unicode_encode(v,
155 encoding=_encodings['repo.content'], errors='backslashreplace')
158 for k, v in list(mydata.items()):
161 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
162 # inject will clear stale caches via cpv_inject.
163 self.bintree.inject(cpv)
165 def cp_list(self, *pargs, **kwargs):
166 if not self.bintree.populated:
167 self.bintree.populate()
168 return fakedbapi.cp_list(self, *pargs, **kwargs)
171 if not self.bintree.populated:
172 self.bintree.populate()
173 return fakedbapi.cp_all(self)
176 if not self.bintree.populated:
177 self.bintree.populate()
178 return fakedbapi.cpv_all(self)
180 def getfetchsizes(self, pkg):
182 This will raise MissingSignature if SIZE signature is not available,
183 or InvalidSignature if SIZE signature is invalid.
186 if not self.bintree.populated:
187 self.bintree.populate()
189 pkg = getattr(pkg, 'cpv', pkg)
192 if not self.bintree.isremote(pkg):
195 metadata = self.bintree._remotepkgs[pkg]
197 size = int(metadata["SIZE"])
199 raise portage.exception.MissingSignature("SIZE")
201 raise portage.exception.InvalidSignature(
202 "SIZE: %s" % metadata["SIZE"])
204 filesdict[os.path.basename(self.bintree.getname(pkg))] = size
208 def _pkgindex_cpv_map_latest_build(pkgindex):
210 Given a PackageIndex instance, create a dict of cpv -> metadata map.
211 If multiple packages have identical CPV values, prefer the package
212 with latest BUILD_TIME value.
213 @param pkgindex: A PackageIndex instance.
214 @type pkgindex: PackageIndex
216 @return: a dict containing entry for the give cpv.
220 for d in pkgindex.packages:
223 btime = d.get('BUILD_TIME', '')
229 other_d = cpv_map.get(cpv)
230 if other_d is not None:
231 other_btime = other_d.get('BUILD_TIME', '')
233 other_btime = int(other_btime)
236 if other_btime and (not btime or other_btime > btime):
239 cpv_map[_pkg_str(cpv)] = d
243 class binarytree(object):
244 "this tree scans for a list of all packages available in PKGDIR"
245 def __init__(self, _unused=None, pkgdir=None,
246 virtual=DeprecationWarning, settings=None):
249 raise TypeError("pkgdir parameter is required")
252 raise TypeError("settings parameter is required")
254 if _unused is not None and _unused != settings['ROOT']:
255 warnings.warn("The root parameter of the "
256 "portage.dbapi.bintree.binarytree"
257 " constructor is now unused. Use "
258 "settings['ROOT'] instead.",
259 DeprecationWarning, stacklevel=2)
261 if virtual is not DeprecationWarning:
262 warnings.warn("The 'virtual' parameter of the "
263 "portage.dbapi.bintree.binarytree"
264 " constructor is unused",
265 DeprecationWarning, stacklevel=2)
268 self.pkgdir = normalize_path(pkgdir)
269 self.dbapi = bindbapi(self, settings=settings)
270 self.update_ents = self.dbapi.update_ents
271 self.move_slot_ent = self.dbapi.move_slot_ent
274 self._remote_has_index = False
275 self._remotepkgs = None # remote metadata indexed by cpv
277 self.settings = settings
279 self._pkgindex_uri = {}
280 self._populating = False
281 self._all_directory = os.path.isdir(
282 os.path.join(self.pkgdir, "All"))
283 self._pkgindex_version = 0
284 self._pkgindex_hashes = ["MD5","SHA1"]
285 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
286 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
287 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
288 self._pkgindex_aux_keys = \
289 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
290 "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
291 "PROVIDE", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
293 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
294 self._pkgindex_use_evaluated_keys = \
295 ("LICENSE", "RDEPEND", "DEPEND",
296 "PDEPEND", "PROPERTIES", "PROVIDE")
297 self._pkgindex_header_keys = set([
298 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
299 "ACCEPT_PROPERTIES", "CBUILD",
300 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
301 "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"])
302 self._pkgindex_default_pkg_data = {
317 "DEFINED_PHASES" : "",
319 self._pkgindex_inherited_keys = ["CHOST", "repository"]
321 # Populate the header with appropriate defaults.
322 self._pkgindex_default_header_data = {
323 "CHOST" : self.settings.get("CHOST", ""),
327 # It is especially important to populate keys like
328 # "repository" that save space when entries can
329 # inherit them from the header. If an existing
330 # pkgindex header already defines these keys, then
331 # they will appropriately override our defaults.
332 main_repo = self.settings.repositories.mainRepo()
333 if main_repo is not None and not main_repo.missing_repo_name:
334 self._pkgindex_default_header_data["repository"] = \
337 self._pkgindex_translated_keys = (
338 ("DESCRIPTION" , "DESC"),
339 ("repository" , "REPO"),
342 self._pkgindex_allowed_pkg_keys = set(chain(
344 self._pkgindex_aux_keys,
345 self._pkgindex_hashes,
346 self._pkgindex_default_pkg_data,
347 self._pkgindex_inherited_keys,
348 chain(*self._pkgindex_translated_keys)
353 warnings.warn("The root attribute of "
354 "portage.dbapi.bintree.binarytree"
355 " is deprecated. Use "
356 "settings['ROOT'] instead.",
357 DeprecationWarning, stacklevel=3)
358 return self.settings['ROOT']
360 def move_ent(self, mylist, repo_match=None):
361 if not self.populated:
366 for atom in (origcp, newcp):
367 if not isjustname(atom):
368 raise InvalidPackageName(str(atom))
369 mynewcat = catsplit(newcp)[0]
370 origmatches=self.dbapi.cp_list(origcp)
374 for mycpv in origmatches:
375 mycpv_cp = portage.cpv_getkey(mycpv)
376 if mycpv_cp != origcp:
377 # Ignore PROVIDE virtual match.
379 if repo_match is not None \
380 and not repo_match(self.dbapi.aux_get(mycpv,
383 mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
384 myoldpkg = catsplit(mycpv)[1]
385 mynewpkg = catsplit(mynewcpv)[1]
387 if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)):
388 writemsg(_("!!! Cannot update binary: Destination exists.\n"),
390 writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1)
393 tbz2path = self.getname(mycpv)
394 if os.path.exists(tbz2path) and not os.access(tbz2path,os.W_OK):
395 writemsg(_("!!! Cannot update readonly binary: %s\n") % mycpv,
400 mytbz2 = portage.xpak.tbz2(tbz2path)
401 mydata = mytbz2.get_data()
402 updated_items = update_dbentries([mylist], mydata)
403 mydata.update(updated_items)
405 _unicode_encode(mynewpkg + "\n",
406 encoding=_encodings['repo.content'])
407 mydata[b'CATEGORY'] = \
408 _unicode_encode(mynewcat + "\n",
409 encoding=_encodings['repo.content'])
410 if mynewpkg != myoldpkg:
411 ebuild_data = mydata.pop(_unicode_encode(myoldpkg + '.ebuild',
412 encoding=_encodings['repo.content']), None)
413 if ebuild_data is not None:
414 mydata[_unicode_encode(mynewpkg + '.ebuild',
415 encoding=_encodings['repo.content'])] = ebuild_data
417 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
419 self.dbapi.cpv_remove(mycpv)
420 del self._pkg_paths[mycpv]
421 new_path = self.getname(mynewcpv)
422 self._pkg_paths[mynewcpv] = os.path.join(
423 *new_path.split(os.path.sep)[-2:])
424 if new_path != mytbz2:
425 self._ensure_dir(os.path.dirname(new_path))
426 _movefile(tbz2path, new_path, mysettings=self.settings)
427 self._remove_symlink(mycpv)
428 if new_path.split(os.path.sep)[-2] == "All":
429 self._create_symlink(mynewcpv)
430 self.inject(mynewcpv)
434 def _remove_symlink(self, cpv):
435 """Remove a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink and also remove
436 the ${PKGDIR}/${CATEGORY} directory if empty. The file will not be
437 removed if os.path.islink() returns False."""
438 mycat, mypkg = catsplit(cpv)
439 mylink = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
440 if os.path.islink(mylink):
441 """Only remove it if it's really a link so that this method never
442 removes a real package that was placed here to avoid a collision."""
445 os.rmdir(os.path.join(self.pkgdir, mycat))
447 if e.errno not in (errno.ENOENT,
448 errno.ENOTEMPTY, errno.EEXIST):
452 def _create_symlink(self, cpv):
453 """Create a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink (and
454 ${PKGDIR}/${CATEGORY} directory, if necessary). Any file that may
455 exist in the location of the symlink will first be removed."""
456 mycat, mypkg = catsplit(cpv)
457 full_path = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
458 self._ensure_dir(os.path.dirname(full_path))
462 if e.errno != errno.ENOENT:
465 os.symlink(os.path.join("..", "All", mypkg + ".tbz2"), full_path)
467 def prevent_collision(self, cpv):
468 """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
469 use for a given cpv. If a collision will occur with an existing
470 package from another category, the existing package will be bumped to
471 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
472 if not self._all_directory:
475 # Copy group permissions for new directories that
476 # may have been created.
477 for path in ("All", catsplit(cpv)[0]):
478 path = os.path.join(self.pkgdir, path)
479 self._ensure_dir(path)
480 if not os.access(path, os.W_OK):
481 raise PermissionDenied("access('%s', W_OK)" % path)
483 full_path = self.getname(cpv)
484 if "All" == full_path.split(os.path.sep)[-2]:
486 """Move a colliding package if it exists. Code below this point only
487 executes in rare cases."""
488 mycat, mypkg = catsplit(cpv)
489 myfile = mypkg + ".tbz2"
490 mypath = os.path.join("All", myfile)
491 dest_path = os.path.join(self.pkgdir, mypath)
494 st = os.lstat(dest_path)
498 if stat.S_ISLNK(st.st_mode):
503 if os.path.exists(dest_path):
507 # For invalid packages, other_cat could be None.
508 other_cat = portage.xpak.tbz2(dest_path).getfile(b"CATEGORY")
510 other_cat = _unicode_decode(other_cat,
511 encoding=_encodings['repo.content'], errors='replace')
512 other_cat = other_cat.strip()
513 other_cpv = other_cat + "/" + mypkg
514 self._move_from_all(other_cpv)
515 self.inject(other_cpv)
516 self._move_to_all(cpv)
518 def _ensure_dir(self, path):
520 Create the specified directory. Also, copy gid and group mode
521 bits from self.pkgdir if possible.
522 @param cat_dir: Absolute path of the directory to be created.
523 @type cat_dir: String
526 pkgdir_st = os.stat(self.pkgdir)
530 pkgdir_gid = pkgdir_st.st_gid
531 pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
533 ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
534 except PortageException:
535 if not os.path.isdir(path):
538 def _move_to_all(self, cpv):
539 """If the file exists, move it. Whether or not it exists, update state
540 for future getname() calls."""
541 mycat, mypkg = catsplit(cpv)
542 myfile = mypkg + ".tbz2"
543 self._pkg_paths[cpv] = os.path.join("All", myfile)
544 src_path = os.path.join(self.pkgdir, mycat, myfile)
546 mystat = os.lstat(src_path)
549 if mystat and stat.S_ISREG(mystat.st_mode):
550 self._ensure_dir(os.path.join(self.pkgdir, "All"))
551 dest_path = os.path.join(self.pkgdir, "All", myfile)
552 _movefile(src_path, dest_path, mysettings=self.settings)
553 self._create_symlink(cpv)
556 def _move_from_all(self, cpv):
557 """Move a package from ${PKGDIR}/All/${PF}.tbz2 to
558 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 and update state from getname calls."""
559 self._remove_symlink(cpv)
560 mycat, mypkg = catsplit(cpv)
561 myfile = mypkg + ".tbz2"
562 mypath = os.path.join(mycat, myfile)
563 dest_path = os.path.join(self.pkgdir, mypath)
564 self._ensure_dir(os.path.dirname(dest_path))
565 src_path = os.path.join(self.pkgdir, "All", myfile)
566 _movefile(src_path, dest_path, mysettings=self.settings)
567 self._pkg_paths[cpv] = mypath
569 def populate(self, getbinpkgs=0):
570 "populates the binarytree"
577 if os.access(self.pkgdir, os.W_OK):
578 pkgindex_lock = lockfile(self._pkgindex_file,
580 self._populating = True
581 self._populate(getbinpkgs)
584 unlockfile(pkgindex_lock)
585 self._populating = False
587 def _populate(self, getbinpkgs=0):
588 if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
591 # Clear all caches in case populate is called multiple times
592 # as may be the case when _global_updates calls populate()
593 # prior to performing package moves since it only wants to
594 # operate on local packages (getbinpkgs=0).
595 self._remotepkgs = None
596 self.dbapi._clear_cache()
597 self.dbapi._aux_cache.clear()
600 self._pkg_paths = pkg_paths
601 dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
605 dirs.insert(0, "All")
606 pkgindex = self._load_pkgindex()
608 if not self._pkgindex_version_supported(pkgindex):
609 pkgindex = self._new_pkgindex()
610 header = pkgindex.header
612 for d in pkgindex.packages:
613 metadata[d["CPV"]] = d
614 update_pkgindex = False
616 for myfile in listdir(os.path.join(self.pkgdir, mydir)):
617 if not myfile.endswith(".tbz2"):
619 mypath = os.path.join(mydir, myfile)
620 full_path = os.path.join(self.pkgdir, mypath)
621 s = os.lstat(full_path)
622 if stat.S_ISLNK(s.st_mode):
625 # Validate data from the package index and try to avoid
626 # reading the xpak if possible.
629 d = metadata.get(mydir+"/"+myfile[:-5])
635 for mycpv in metadata:
636 mycat, mypf = catsplit(mycpv)
638 mypf, []).append(metadata[mycpv])
639 possibilities = pf_index.get(myfile[:-5])
642 for d in possibilities:
644 if long(d["MTIME"]) != s[stat.ST_MTIME]:
646 except (KeyError, ValueError):
649 if long(d["SIZE"]) != long(s.st_size):
651 except (KeyError, ValueError):
653 if not self._pkgindex_keys.difference(d):
658 if mycpv in pkg_paths:
659 # discard duplicates (All/ is preferred)
661 mycpv = _pkg_str(mycpv)
662 pkg_paths[mycpv] = mypath
663 # update the path if the package has been moved
664 oldpath = d.get("PATH")
665 if oldpath and oldpath != mypath:
666 update_pkgindex = True
667 if mypath != mycpv + ".tbz2":
670 update_pkgindex = True
674 update_pkgindex = True
675 self.dbapi.cpv_inject(mycpv)
676 if not self.dbapi._aux_cache_keys.difference(d):
677 aux_cache = self.dbapi._aux_cache_slot_dict()
678 for k in self.dbapi._aux_cache_keys:
680 self.dbapi._aux_cache[mycpv] = aux_cache
682 if not os.access(full_path, os.R_OK):
683 writemsg(_("!!! Permission denied to read " \
684 "binary package: '%s'\n") % full_path,
686 self.invalids.append(myfile[:-5])
688 metadata_bytes = portage.xpak.tbz2(full_path).get_data()
689 mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
690 encoding=_encodings['repo.content'], errors='replace')
691 mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
692 encoding=_encodings['repo.content'], errors='replace')
693 slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
694 encoding=_encodings['repo.content'], errors='replace')
696 if not mycat or not mypf or not slot:
697 #old-style or corrupt package
698 writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
702 missing_keys.append("CATEGORY")
704 missing_keys.append("PF")
706 missing_keys.append("SLOT")
710 msg.append(_("Missing metadata key(s): %s.") % \
711 ", ".join(missing_keys))
712 msg.append(_(" This binary package is not " \
713 "recoverable and should be deleted."))
714 for line in textwrap.wrap("".join(msg), 72):
715 writemsg("!!! %s\n" % line, noiselevel=-1)
716 self.invalids.append(mypkg)
718 mycat = mycat.strip()
720 if mycat != mydir and mydir != "All":
722 if mypkg != mypf.strip():
724 mycpv = mycat + "/" + mypkg
725 if mycpv in pkg_paths:
726 # All is first, so it's preferred.
728 if not self.dbapi._category_re.match(mycat):
729 writemsg(_("!!! Binary package has an " \
730 "unrecognized category: '%s'\n") % full_path,
732 writemsg(_("!!! '%s' has a category that is not" \
733 " listed in %setc/portage/categories\n") % \
734 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
737 mycpv = _pkg_str(mycpv)
738 pkg_paths[mycpv] = mypath
739 self.dbapi.cpv_inject(mycpv)
740 update_pkgindex = True
741 d = metadata.get(mycpv, {})
744 if long(d["MTIME"]) != s[stat.ST_MTIME]:
746 except (KeyError, ValueError):
750 if long(d["SIZE"]) != long(s.st_size):
752 except (KeyError, ValueError):
757 d["MTIME"] = str(s[stat.ST_MTIME])
758 d["SIZE"] = str(s.st_size)
760 d.update(zip(self._pkgindex_aux_keys,
761 self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
763 self._eval_use_flags(mycpv, d)
764 except portage.exception.InvalidDependString:
765 writemsg(_("!!! Invalid binary package: '%s'\n") % \
766 self.getname(mycpv), noiselevel=-1)
767 self.dbapi.cpv_remove(mycpv)
770 # record location if it's non-default
771 if mypath != mycpv + ".tbz2":
776 if not self.dbapi._aux_cache_keys.difference(d):
777 aux_cache = self.dbapi._aux_cache_slot_dict()
778 for k in self.dbapi._aux_cache_keys:
780 self.dbapi._aux_cache[mycpv] = aux_cache
782 for cpv in list(metadata):
783 if cpv not in pkg_paths:
786 # Do not bother to write the Packages index if $PKGDIR/All/ exists
787 # since it will provide no benefit due to the need to read CATEGORY
789 if update_pkgindex and os.access(self.pkgdir, os.W_OK):
790 del pkgindex.packages[:]
791 pkgindex.packages.extend(iter(metadata.values()))
792 self._update_pkgindex_header(pkgindex.header)
793 f = atomic_ofstream(self._pkgindex_file)
797 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
798 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
801 if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
804 self._remotepkgs = {}
805 for base_url in self.settings["PORTAGE_BINHOST"].split():
806 parsed_url = urlparse(base_url)
807 host = parsed_url.netloc
808 port = parsed_url.port
813 user, host = host.split("@", 1)
814 user_passwd = user + "@"
816 user, passwd = user.split(":", 1)
819 port_str = ":%s" % (port,)
820 if host.endswith(port_str):
821 host = host[:-len(port_str)]
822 pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
823 host, parsed_url.path.lstrip("/"), "Packages")
824 pkgindex = self._new_pkgindex()
826 f = io.open(_unicode_encode(pkgindex_file,
827 encoding=_encodings['fs'], errors='strict'),
828 mode='r', encoding=_encodings['repo.content'],
834 except EnvironmentError as e:
835 if e.errno != errno.ENOENT:
837 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
838 rmt_idx = self._new_pkgindex()
842 # urlparse.urljoin() only works correctly with recognized
843 # protocols and requires the base url to have a trailing
844 # slash, so join manually...
845 url = base_url.rstrip("/") + "/Packages"
849 path = parsed_url.path.rstrip("/") + "/Packages"
851 if parsed_url.scheme == 'sftp':
852 # The sftp command complains about 'Illegal seek' if
853 # we try to make it write to /dev/stdout, so use a
855 fd, tmp_filename = tempfile.mkstemp()
858 port_args = ['-P', "%s" % (port,)]
859 proc = subprocess.Popen(['sftp'] + port_args + \
860 [user_passwd + host + ":" + path, tmp_filename])
861 if proc.wait() != os.EX_OK:
863 f = open(tmp_filename, 'rb')
864 elif parsed_url.scheme == 'ssh':
866 port_args = ['-p', "%s" % (port,)]
867 proc = subprocess.Popen(['ssh'] + port_args + \
868 [user_passwd + host, '--', 'cat', path],
869 stdout=subprocess.PIPE)
872 setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
873 fcmd = self.settings.get(setting)
876 fd, tmp_filename = tempfile.mkstemp()
877 tmp_dirname, tmp_basename = os.path.split(tmp_filename)
879 success = portage.getbinpkg.file_get(url,
880 tmp_dirname, fcmd=fcmd, filename=tmp_basename)
882 raise EnvironmentError("%s failed" % (setting,))
883 f = open(tmp_filename, 'rb')
885 f_dec = codecs.iterdecode(f,
886 _encodings['repo.content'], errors='replace')
888 rmt_idx.readHeader(f_dec)
889 remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
890 if not remote_timestamp:
891 # no timestamp in the header, something's wrong
893 writemsg(_("\n\n!!! Binhost package index " \
894 " has no TIMESTAMP field.\n"), noiselevel=-1)
896 if not self._pkgindex_version_supported(rmt_idx):
897 writemsg(_("\n\n!!! Binhost package index version" \
898 " is not supported: '%s'\n") % \
899 rmt_idx.header.get("VERSION"), noiselevel=-1)
901 elif local_timestamp != remote_timestamp:
902 rmt_idx.readBody(f_dec)
905 # Timeout after 5 seconds, in case close() blocks
906 # indefinitely (see bug #350139).
909 AlarmSignal.register(5)
912 AlarmSignal.unregister()
914 writemsg("\n\n!!! %s\n" % \
915 _("Timed out while closing connection to binhost"),
917 except EnvironmentError as e:
918 writemsg(_("\n\n!!! Error fetching binhost package" \
919 " info from '%s'\n") % base_url)
920 writemsg("!!! %s\n\n" % str(e))
924 if proc.poll() is None:
928 if tmp_filename is not None:
930 os.unlink(tmp_filename)
933 if pkgindex is rmt_idx:
934 pkgindex.modified = False # don't update the header
936 ensure_dirs(os.path.dirname(pkgindex_file))
937 f = atomic_ofstream(pkgindex_file)
940 except (IOError, PortageException):
941 if os.access(os.path.dirname(pkgindex_file), os.W_OK):
943 # The current user doesn't have permission to cache the
944 # file, but that's alright.
946 # Organize remote package list as a cpv -> metadata map.
947 remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
948 remote_base_uri = pkgindex.header.get("URI", base_url)
949 for cpv, remote_metadata in remotepkgs.items():
950 remote_metadata["BASE_URI"] = remote_base_uri
951 self._pkgindex_uri[cpv] = url
952 self._remotepkgs.update(remotepkgs)
953 self._remote_has_index = True
954 for cpv in remotepkgs:
955 self.dbapi.cpv_inject(cpv)
957 # Remote package instances override local package
958 # if they are not identical.
959 hash_names = ["SIZE"] + self._pkgindex_hashes
960 for cpv, local_metadata in metadata.items():
961 remote_metadata = self._remotepkgs.get(cpv)
962 if remote_metadata is None:
964 # Use digests to compare identity.
966 for hash_name in hash_names:
967 local_value = local_metadata.get(hash_name)
968 if local_value is None:
970 remote_value = remote_metadata.get(hash_name)
971 if remote_value is None:
973 if local_value != remote_value:
977 del self._remotepkgs[cpv]
979 # Override the local package in the aux_get cache.
980 self.dbapi._aux_cache[cpv] = remote_metadata
982 # Local package instances override remote instances.
984 self._remotepkgs.pop(cpv, None)
987 chunk_size = long(self.settings["PORTAGE_BINHOST_CHUNKSIZE"])
990 except (ValueError, KeyError):
992 writemsg_stdout("\n")
994 colorize("GOOD", _("Fetching bininfo from ")) + \
995 re.sub(r'//(.+):.+@(.+)/', r'//\1:*password*@\2/', base_url) + "\n")
996 remotepkgs = portage.getbinpkg.dir_get_metadata(
997 base_url, chunk_size=chunk_size)
999 for mypkg, remote_metadata in remotepkgs.items():
1000 mycat = remote_metadata.get("CATEGORY")
1002 #old-style or corrupt package
1003 writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
1006 mycat = mycat.strip()
1007 fullpkg = mycat+"/"+mypkg[:-5]
1009 if fullpkg in metadata:
1010 # When using this old protocol, comparison with the remote
1011 # package isn't supported, so the local package is always
1012 # preferred even if getbinpkgsonly is enabled.
1015 if not self.dbapi._category_re.match(mycat):
1016 writemsg(_("!!! Remote binary package has an " \
1017 "unrecognized category: '%s'\n") % fullpkg,
1019 writemsg(_("!!! '%s' has a category that is not" \
1020 " listed in %setc/portage/categories\n") % \
1021 (fullpkg, self.settings["PORTAGE_CONFIGROOT"]),
1024 mykey = portage.cpv_getkey(fullpkg)
1026 # invalid tbz2's can hurt things.
1027 self.dbapi.cpv_inject(fullpkg)
1028 for k, v in remote_metadata.items():
1029 remote_metadata[k] = v.strip()
1030 remote_metadata["BASE_URI"] = base_url
1032 # Eliminate metadata values with names that digestCheck
1033 # uses, since they are not valid when using the old
1034 # protocol. Typically this is needed for SIZE metadata
1035 # which corresponds to the size of the unpacked files
1036 # rather than the binpkg file size, triggering digest
1037 # verification failures as reported in bug #303211.
1038 remote_metadata.pop('SIZE', None)
1039 for k in portage.checksum.hashfunc_map:
1040 remote_metadata.pop(k, None)
1042 self._remotepkgs[fullpkg] = remote_metadata
1043 except SystemExit as e:
1046 writemsg(_("!!! Failed to inject remote binary package: %s\n") % fullpkg,
1051 def inject(self, cpv, filename=None):
1052 """Add a freshly built package to the database. This updates
1053 $PKGDIR/Packages with the new package metadata (including MD5).
1054 @param cpv: The cpv of the new package to inject
1056 @param filename: File path of the package to inject, or None if it's
1057 already in the location returned by getname()
1058 @type filename: string
1061 mycat, mypkg = catsplit(cpv)
1062 if not self.populated:
1064 if filename is None:
1065 full_path = self.getname(cpv)
1067 full_path = filename
1069 s = os.stat(full_path)
1070 except OSError as e:
1071 if e.errno != errno.ENOENT:
1074 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
1077 mytbz2 = portage.xpak.tbz2(full_path)
1078 slot = mytbz2.getfile("SLOT")
1080 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
1084 self.dbapi.cpv_inject(cpv)
1086 # Reread the Packages index (in case it's been changed by another
1087 # process) and then updated it, all while holding a lock.
1088 pkgindex_lock = None
1089 created_symlink = False
1091 pkgindex_lock = lockfile(self._pkgindex_file,
1093 if filename is not None:
1094 new_filename = self.getname(cpv)
1096 samefile = os.path.samefile(filename, new_filename)
1100 self._ensure_dir(os.path.dirname(new_filename))
1101 _movefile(filename, new_filename, mysettings=self.settings)
1102 if self._all_directory and \
1103 self.getname(cpv).split(os.path.sep)[-2] == "All":
1104 self._create_symlink(cpv)
1105 created_symlink = True
1106 pkgindex = self._load_pkgindex()
1108 if not self._pkgindex_version_supported(pkgindex):
1109 pkgindex = self._new_pkgindex()
1111 # Discard remote metadata to ensure that _pkgindex_entry
1112 # gets the local metadata. This also updates state for future
1114 if self._remotepkgs is not None:
1115 self._remotepkgs.pop(cpv, None)
1117 # Discard cached metadata to ensure that _pkgindex_entry
1118 # doesn't return stale metadata.
1119 self.dbapi._aux_cache.pop(cpv, None)
1122 d = self._pkgindex_entry(cpv)
1123 except portage.exception.InvalidDependString:
1124 writemsg(_("!!! Invalid binary package: '%s'\n") % \
1125 self.getname(cpv), noiselevel=-1)
1126 self.dbapi.cpv_remove(cpv)
1127 del self._pkg_paths[cpv]
1130 # If found, remove package(s) with duplicate path.
1131 path = d.get("PATH", "")
1132 for i in range(len(pkgindex.packages) - 1, -1, -1):
1133 d2 = pkgindex.packages[i]
1134 if path and path == d2.get("PATH"):
1135 # Handle path collisions in $PKGDIR/All
1136 # when CPV is not identical.
1137 del pkgindex.packages[i]
1138 elif cpv == d2.get("CPV"):
1139 if path == d2.get("PATH", ""):
1140 del pkgindex.packages[i]
1141 elif created_symlink and not d2.get("PATH", ""):
1142 # Delete entry for the package that was just
1143 # overwritten by a symlink to this package.
1144 del pkgindex.packages[i]
1146 pkgindex.packages.append(d)
1148 self._update_pkgindex_header(pkgindex.header)
1149 f = atomic_ofstream(os.path.join(self.pkgdir, "Packages"))
1154 unlockfile(pkgindex_lock)
1156 def _pkgindex_entry(self, cpv):
1158 Performs checksums and evaluates USE flag conditionals.
1159 Raises InvalidDependString if necessary.
1161 @return: a dict containing entry for the give cpv.
1164 pkg_path = self.getname(cpv)
1166 d = dict(zip(self._pkgindex_aux_keys,
1167 self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1169 d.update(perform_multiple_checksums(
1170 pkg_path, hashes=self._pkgindex_hashes))
1173 st = os.stat(pkg_path)
1174 d["MTIME"] = str(st[stat.ST_MTIME])
1175 d["SIZE"] = str(st.st_size)
1177 rel_path = self._pkg_paths[cpv]
1178 # record location if it's non-default
1179 if rel_path != cpv + ".tbz2":
1180 d["PATH"] = rel_path
1182 self._eval_use_flags(cpv, d)
1185 def _new_pkgindex(self):
1186 return portage.getbinpkg.PackageIndex(
1187 allowed_pkg_keys=self._pkgindex_allowed_pkg_keys,
1188 default_header_data=self._pkgindex_default_header_data,
1189 default_pkg_data=self._pkgindex_default_pkg_data,
1190 inherited_keys=self._pkgindex_inherited_keys,
1191 translated_keys=self._pkgindex_translated_keys)
1193 def _update_pkgindex_header(self, header):
1194 portdir = normalize_path(os.path.realpath(self.settings["PORTDIR"]))
1195 profiles_base = os.path.join(portdir, "profiles") + os.path.sep
1196 if self.settings.profile_path:
1197 profile_path = normalize_path(
1198 os.path.realpath(self.settings.profile_path))
1199 if profile_path.startswith(profiles_base):
1200 profile_path = profile_path[len(profiles_base):]
1201 header["PROFILE"] = profile_path
1202 header["VERSION"] = str(self._pkgindex_version)
1203 base_uri = self.settings.get("PORTAGE_BINHOST_HEADER_URI")
1205 header["URI"] = base_uri
1207 header.pop("URI", None)
1208 for k in self._pkgindex_header_keys:
1209 v = self.settings.get(k, None)
1215 def _pkgindex_version_supported(self, pkgindex):
1216 version = pkgindex.header.get("VERSION")
1219 if int(version) <= self._pkgindex_version:
1225 def _eval_use_flags(self, cpv, metadata):
1226 use = frozenset(metadata["USE"].split())
1228 iuse = set(f.lstrip("-+") for f in metadata["IUSE"].split())
1229 use = [f for f in use if f in iuse]
1231 metadata["USE"] = " ".join(use)
1232 for k in self._pkgindex_use_evaluated_keys:
1233 if k.endswith('DEPEND'):
1240 deps = use_reduce(deps, uselist=raw_use, token_class=token_class)
1241 deps = paren_enclose(deps)
1242 except portage.exception.InvalidDependString as e:
1243 writemsg("%s: %s\n" % (k, str(e)),
1248 def exists_specific(self, cpv):
1249 if not self.populated:
1251 return self.dbapi.match(
1252 dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings))
1254 def dep_bestmatch(self, mydep):
1255 "compatibility method -- all matches, not just visible ones"
1256 if not self.populated:
1259 writemsg("mydep: %s\n" % mydep, 1)
1260 mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
1261 writemsg("mydep: %s\n" % mydep, 1)
1262 mykey = dep_getkey(mydep)
1263 writemsg("mykey: %s\n" % mykey, 1)
1264 mymatch = best(match_from_list(mydep,self.dbapi.cp_list(mykey)))
1265 writemsg("mymatch: %s\n" % mymatch, 1)
1270 def getname(self, pkgname):
1271 """Returns a file location for this package. The default location is
1272 ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1273 in the rare event of a collision. The prevent_collision() method can
1274 be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1276 if not self.populated:
1279 mypath = self._pkg_paths.get(mycpv, None)
1281 return os.path.join(self.pkgdir, mypath)
1282 mycat, mypkg = catsplit(mycpv)
1283 if self._all_directory:
1284 mypath = os.path.join("All", mypkg + ".tbz2")
1285 if mypath in self._pkg_paths.values():
1286 mypath = os.path.join(mycat, mypkg + ".tbz2")
1288 mypath = os.path.join(mycat, mypkg + ".tbz2")
1289 self._pkg_paths[mycpv] = mypath # cache for future lookups
1290 return os.path.join(self.pkgdir, mypath)
1292 def isremote(self, pkgname):
1293 """Returns true if the package is kept remotely and it has not been
1294 downloaded (or it is only partially downloaded)."""
1295 if self._remotepkgs is None or pkgname not in self._remotepkgs:
1297 # Presence in self._remotepkgs implies that it's remote. When a
1298 # package is downloaded, state is updated by self.inject().
1301 def get_pkgindex_uri(self, pkgname):
1302 """Returns the URI to the Packages file for a given package."""
1303 return self._pkgindex_uri.get(pkgname)
1305 def gettbz2(self, pkgname):
1306 """Fetches the package from a remote site, if necessary. Attempts to
1307 resume if the file appears to be partially downloaded."""
1308 tbz2_path = self.getname(pkgname)
1309 tbz2name = os.path.basename(tbz2_path)
1311 if os.path.exists(tbz2_path):
1312 if (tbz2name not in self.invalids):
1316 writemsg(_("Resuming download of this tbz2, but it is possible that it is corrupt.\n"),
1319 mydest = os.path.dirname(self.getname(pkgname))
1320 self._ensure_dir(mydest)
1321 # urljoin doesn't work correctly with unrecognized protocols like sftp
1322 if self._remote_has_index:
1323 rel_url = self._remotepkgs[pkgname].get("PATH")
1325 rel_url = pkgname+".tbz2"
1326 remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"]
1327 url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1329 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1330 protocol = urlparse(url)[0]
1331 fcmd_prefix = "FETCHCOMMAND"
1333 fcmd_prefix = "RESUMECOMMAND"
1334 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper())
1336 fcmd = self.settings.get(fcmd_prefix)
1337 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd)
1340 os.unlink(self.getname(pkgname))
1343 raise portage.exception.FileNotFound(mydest)
1344 self.inject(pkgname)
1346 def _load_pkgindex(self):
1347 pkgindex = self._new_pkgindex()
1349 f = io.open(_unicode_encode(self._pkgindex_file,
1350 encoding=_encodings['fs'], errors='strict'),
1351 mode='r', encoding=_encodings['repo.content'],
1353 except EnvironmentError:
1362 def digestCheck(self, pkg):
1364 Verify digests for the given package and raise DigestException
1365 if verification fails.
1367 @return: True if digests could be located, False otherwise.
1370 if not isinstance(cpv, basestring):
1374 pkg_path = self.getname(cpv)
1376 if self._remotepkgs is None or cpv not in self._remotepkgs:
1377 for d in self._load_pkgindex().packages:
1382 metadata = self._remotepkgs[cpv]
1383 if metadata is None:
1387 for k in hashfunc_map:
1393 if "SIZE" in metadata:
1395 digests["size"] = int(metadata["SIZE"])
1397 writemsg(_("!!! Malformed SIZE attribute in remote " \
1398 "metadata for '%s'\n") % cpv)
1404 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
1405 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
1407 ok, reason = verify_all(pkg_path, digests)
1409 raise portage.exception.DigestException(
1410 (pkg_path,) + tuple(reason))
1414 def getslot(self, mycatpkg):
1415 "Get a slot for a catpkg; assume it exists."
1418 myslot = self.dbapi.aux_get(mycatpkg,["SLOT"])[0]
1419 except SystemExit as e:
1421 except Exception as e: