1 # Copyright 1998-2012 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 __all__ = ["bindbapi", "binarytree"]
7 portage.proxy.lazyimport.lazyimport(globals(),
8 'portage.checksum:hashfunc_map,perform_multiple_checksums,' + \
9 'verify_all,_apply_hash_filter,_hash_filter',
10 'portage.dbapi.dep_expand:dep_expand',
11 'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list',
12 'portage.output:EOutput,colorize',
13 'portage.locks:lockfile,unlockfile',
14 'portage.package.ebuild.fetch:_check_distfile,_hide_url_passwd',
15 'portage.update:update_dbentries',
16 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
17 'writemsg,writemsg_stdout',
18 'portage.util.listdir:listdir',
19 'portage.util._urlopen:urlopen@_urlopen',
20 'portage.versions:best,catpkgsplit,catsplit,_pkg_str',
23 from portage.cache.mappings import slot_dict_class
24 from portage.const import CACHE_PATH
25 from portage.dbapi.virtual import fakedbapi
26 from portage.dep import Atom, use_reduce, paren_enclose
27 from portage.exception import AlarmSignal, InvalidData, InvalidPackageName, \
28 PermissionDenied, PortageException
29 from portage.localization import _
30 from portage import _movefile
31 from portage import os
32 from portage import _encodings
33 from portage import _unicode_decode
34 from portage import _unicode_encode
45 from gzip import GzipFile
46 from itertools import chain
48 from urllib.parse import urlparse
50 from urlparse import urlparse
52 if sys.hexversion >= 0x3000000:
59 class UseCachedCopyOfRemoteIndex(Exception):
60 # If the local copy is recent enough
61 # then fetching the remote index can be skipped.
64 class bindbapi(fakedbapi):
65 _known_keys = frozenset(list(fakedbapi._known_keys) + \
66 ["CHOST", "repository", "USE"])
67 def __init__(self, mybintree=None, **kwargs):
68 fakedbapi.__init__(self, **kwargs)
69 self.bintree = mybintree
70 self.move_ent = mybintree.move_ent
73 # Selectively cache metadata in order to optimize dep matching.
74 self._aux_cache_keys = set(
75 ["BUILD_TIME", "CHOST", "DEPEND", "EAPI",
76 "HDEPEND", "IUSE", "KEYWORDS",
77 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
78 "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES"
80 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
83 def match(self, *pargs, **kwargs):
84 if self.bintree and not self.bintree.populated:
85 self.bintree.populate()
86 return fakedbapi.match(self, *pargs, **kwargs)
88 def cpv_exists(self, cpv, myrepo=None):
89 if self.bintree and not self.bintree.populated:
90 self.bintree.populate()
91 return fakedbapi.cpv_exists(self, cpv)
93 def cpv_inject(self, cpv, **kwargs):
94 self._aux_cache.pop(cpv, None)
95 fakedbapi.cpv_inject(self, cpv, **kwargs)
97 def cpv_remove(self, cpv):
98 self._aux_cache.pop(cpv, None)
99 fakedbapi.cpv_remove(self, cpv)
101 def aux_get(self, mycpv, wants, myrepo=None):
102 if self.bintree and not self.bintree.populated:
103 self.bintree.populate()
105 if not self._known_keys.intersection(
106 wants).difference(self._aux_cache_keys):
107 aux_cache = self._aux_cache.get(mycpv)
108 if aux_cache is not None:
109 return [aux_cache.get(x, "") for x in wants]
111 mysplit = mycpv.split("/")
113 tbz2name = mysplit[1]+".tbz2"
114 if not self.bintree._remotepkgs or \
115 not self.bintree.isremote(mycpv):
116 tbz2_path = self.bintree.getname(mycpv)
117 if not os.path.exists(tbz2_path):
118 raise KeyError(mycpv)
119 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
121 v = metadata_bytes.get(_unicode_encode(k,
122 encoding=_encodings['repo.content'],
123 errors='backslashreplace'))
125 v = _unicode_decode(v,
126 encoding=_encodings['repo.content'], errors='replace')
129 getitem = self.bintree._remotepkgs[mycpv].get
133 mykeys = self._aux_cache_keys.union(wants)
136 # myval is None if the key doesn't exist
137 # or the tbz2 is corrupt.
139 mydata[x] = " ".join(myval.split())
141 if not mydata.setdefault('EAPI', _unicode_decode('0')):
142 mydata['EAPI'] = _unicode_decode('0')
145 aux_cache = self._aux_cache_slot_dict()
146 for x in self._aux_cache_keys:
147 aux_cache[x] = mydata.get(x, _unicode_decode(''))
148 self._aux_cache[mycpv] = aux_cache
149 return [mydata.get(x, _unicode_decode('')) for x in wants]
151 def aux_update(self, cpv, values):
152 if not self.bintree.populated:
153 self.bintree.populate()
154 tbz2path = self.bintree.getname(cpv)
155 if not os.path.exists(tbz2path):
157 mytbz2 = portage.xpak.tbz2(tbz2path)
158 mydata = mytbz2.get_data()
160 for k, v in values.items():
161 k = _unicode_encode(k,
162 encoding=_encodings['repo.content'], errors='backslashreplace')
163 v = _unicode_encode(v,
164 encoding=_encodings['repo.content'], errors='backslashreplace')
167 for k, v in list(mydata.items()):
170 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
171 # inject will clear stale caches via cpv_inject.
172 self.bintree.inject(cpv)
174 def cp_list(self, *pargs, **kwargs):
175 if not self.bintree.populated:
176 self.bintree.populate()
177 return fakedbapi.cp_list(self, *pargs, **kwargs)
180 if not self.bintree.populated:
181 self.bintree.populate()
182 return fakedbapi.cp_all(self)
185 if not self.bintree.populated:
186 self.bintree.populate()
187 return fakedbapi.cpv_all(self)
189 def getfetchsizes(self, pkg):
191 This will raise MissingSignature if SIZE signature is not available,
192 or InvalidSignature if SIZE signature is invalid.
195 if not self.bintree.populated:
196 self.bintree.populate()
198 pkg = getattr(pkg, 'cpv', pkg)
201 if not self.bintree.isremote(pkg):
204 metadata = self.bintree._remotepkgs[pkg]
206 size = int(metadata["SIZE"])
208 raise portage.exception.MissingSignature("SIZE")
210 raise portage.exception.InvalidSignature(
211 "SIZE: %s" % metadata["SIZE"])
213 filesdict[os.path.basename(self.bintree.getname(pkg))] = size
217 def _pkgindex_cpv_map_latest_build(pkgindex):
219 Given a PackageIndex instance, create a dict of cpv -> metadata map.
220 If multiple packages have identical CPV values, prefer the package
221 with latest BUILD_TIME value.
222 @param pkgindex: A PackageIndex instance.
223 @type pkgindex: PackageIndex
225 @return: a dict containing entry for the give cpv.
229 for d in pkgindex.packages:
235 writemsg(_("!!! Invalid remote binary package: %s\n") % cpv,
239 btime = d.get('BUILD_TIME', '')
245 other_d = cpv_map.get(cpv)
246 if other_d is not None:
247 other_btime = other_d.get('BUILD_TIME', '')
249 other_btime = int(other_btime)
252 if other_btime and (not btime or other_btime > btime):
255 cpv_map[_pkg_str(cpv)] = d
259 class binarytree(object):
260 "this tree scans for a list of all packages available in PKGDIR"
261 def __init__(self, _unused=None, pkgdir=None,
262 virtual=DeprecationWarning, settings=None):
265 raise TypeError("pkgdir parameter is required")
268 raise TypeError("settings parameter is required")
270 if _unused is not None and _unused != settings['ROOT']:
271 warnings.warn("The root parameter of the "
272 "portage.dbapi.bintree.binarytree"
273 " constructor is now unused. Use "
274 "settings['ROOT'] instead.",
275 DeprecationWarning, stacklevel=2)
277 if virtual is not DeprecationWarning:
278 warnings.warn("The 'virtual' parameter of the "
279 "portage.dbapi.bintree.binarytree"
280 " constructor is unused",
281 DeprecationWarning, stacklevel=2)
284 self.pkgdir = normalize_path(pkgdir)
285 self.dbapi = bindbapi(self, settings=settings)
286 self.update_ents = self.dbapi.update_ents
287 self.move_slot_ent = self.dbapi.move_slot_ent
290 self._remote_has_index = False
291 self._remotepkgs = None # remote metadata indexed by cpv
293 self.settings = settings
295 self._pkgindex_uri = {}
296 self._populating = False
297 self._all_directory = os.path.isdir(
298 os.path.join(self.pkgdir, "All"))
299 self._pkgindex_version = 0
300 self._pkgindex_hashes = ["MD5","SHA1"]
301 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
302 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
303 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
304 self._pkgindex_aux_keys = \
305 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
306 "HDEPEND", "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
307 "PROVIDE", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
309 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
310 self._pkgindex_use_evaluated_keys = \
311 ("DEPEND", "HDEPEND", "LICENSE", "RDEPEND",
312 "PDEPEND", "PROPERTIES", "PROVIDE")
313 self._pkgindex_header_keys = set([
314 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
315 "ACCEPT_PROPERTIES", "CBUILD",
316 "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
317 "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"])
318 self._pkgindex_default_pkg_data = {
320 "DEFINED_PHASES" : "",
336 self._pkgindex_inherited_keys = ["CHOST", "repository"]
338 # Populate the header with appropriate defaults.
339 self._pkgindex_default_header_data = {
340 "CHOST" : self.settings.get("CHOST", ""),
344 # It is especially important to populate keys like
345 # "repository" that save space when entries can
346 # inherit them from the header. If an existing
347 # pkgindex header already defines these keys, then
348 # they will appropriately override our defaults.
349 main_repo = self.settings.repositories.mainRepo()
350 if main_repo is not None and not main_repo.missing_repo_name:
351 self._pkgindex_default_header_data["repository"] = \
354 self._pkgindex_translated_keys = (
355 ("DESCRIPTION" , "DESC"),
356 ("repository" , "REPO"),
359 self._pkgindex_allowed_pkg_keys = set(chain(
361 self._pkgindex_aux_keys,
362 self._pkgindex_hashes,
363 self._pkgindex_default_pkg_data,
364 self._pkgindex_inherited_keys,
365 chain(*self._pkgindex_translated_keys)
370 warnings.warn("The root attribute of "
371 "portage.dbapi.bintree.binarytree"
372 " is deprecated. Use "
373 "settings['ROOT'] instead.",
374 DeprecationWarning, stacklevel=3)
375 return self.settings['ROOT']
377 def move_ent(self, mylist, repo_match=None):
378 if not self.populated:
383 for atom in (origcp, newcp):
384 if not isjustname(atom):
385 raise InvalidPackageName(str(atom))
386 mynewcat = catsplit(newcp)[0]
387 origmatches=self.dbapi.cp_list(origcp)
391 for mycpv in origmatches:
393 mycpv = self.dbapi._pkg_str(mycpv, None)
394 except (KeyError, InvalidData):
396 mycpv_cp = portage.cpv_getkey(mycpv)
397 if mycpv_cp != origcp:
398 # Ignore PROVIDE virtual match.
400 if repo_match is not None \
401 and not repo_match(mycpv.repo):
404 # Use isvalidatom() to check if this move is valid for the
405 # EAPI (characters allowed in package names may vary).
406 if not isvalidatom(newcp, eapi=mycpv.eapi):
409 mynewcpv = mycpv.replace(mycpv_cp, _unicode(newcp), 1)
410 myoldpkg = catsplit(mycpv)[1]
411 mynewpkg = catsplit(mynewcpv)[1]
413 if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)):
414 writemsg(_("!!! Cannot update binary: Destination exists.\n"),
416 writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1)
419 tbz2path = self.getname(mycpv)
420 if os.path.exists(tbz2path) and not os.access(tbz2path,os.W_OK):
421 writemsg(_("!!! Cannot update readonly binary: %s\n") % mycpv,
426 mytbz2 = portage.xpak.tbz2(tbz2path)
427 mydata = mytbz2.get_data()
428 updated_items = update_dbentries([mylist], mydata, parent=mycpv)
429 mydata.update(updated_items)
431 _unicode_encode(mynewpkg + "\n",
432 encoding=_encodings['repo.content'])
433 mydata[b'CATEGORY'] = \
434 _unicode_encode(mynewcat + "\n",
435 encoding=_encodings['repo.content'])
436 if mynewpkg != myoldpkg:
437 ebuild_data = mydata.pop(_unicode_encode(myoldpkg + '.ebuild',
438 encoding=_encodings['repo.content']), None)
439 if ebuild_data is not None:
440 mydata[_unicode_encode(mynewpkg + '.ebuild',
441 encoding=_encodings['repo.content'])] = ebuild_data
443 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
445 self.dbapi.cpv_remove(mycpv)
446 del self._pkg_paths[mycpv]
447 new_path = self.getname(mynewcpv)
448 self._pkg_paths[mynewcpv] = os.path.join(
449 *new_path.split(os.path.sep)[-2:])
450 if new_path != mytbz2:
451 self._ensure_dir(os.path.dirname(new_path))
452 _movefile(tbz2path, new_path, mysettings=self.settings)
453 self._remove_symlink(mycpv)
454 if new_path.split(os.path.sep)[-2] == "All":
455 self._create_symlink(mynewcpv)
456 self.inject(mynewcpv)
460 def _remove_symlink(self, cpv):
461 """Remove a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink and also remove
462 the ${PKGDIR}/${CATEGORY} directory if empty. The file will not be
463 removed if os.path.islink() returns False."""
464 mycat, mypkg = catsplit(cpv)
465 mylink = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
466 if os.path.islink(mylink):
467 """Only remove it if it's really a link so that this method never
468 removes a real package that was placed here to avoid a collision."""
471 os.rmdir(os.path.join(self.pkgdir, mycat))
473 if e.errno not in (errno.ENOENT,
474 errno.ENOTEMPTY, errno.EEXIST):
478 def _create_symlink(self, cpv):
479 """Create a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink (and
480 ${PKGDIR}/${CATEGORY} directory, if necessary). Any file that may
481 exist in the location of the symlink will first be removed."""
482 mycat, mypkg = catsplit(cpv)
483 full_path = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
484 self._ensure_dir(os.path.dirname(full_path))
488 if e.errno != errno.ENOENT:
491 os.symlink(os.path.join("..", "All", mypkg + ".tbz2"), full_path)
493 def prevent_collision(self, cpv):
494 """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
495 use for a given cpv. If a collision will occur with an existing
496 package from another category, the existing package will be bumped to
497 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
498 if not self._all_directory:
501 # Copy group permissions for new directories that
502 # may have been created.
503 for path in ("All", catsplit(cpv)[0]):
504 path = os.path.join(self.pkgdir, path)
505 self._ensure_dir(path)
506 if not os.access(path, os.W_OK):
507 raise PermissionDenied("access('%s', W_OK)" % path)
509 full_path = self.getname(cpv)
510 if "All" == full_path.split(os.path.sep)[-2]:
512 """Move a colliding package if it exists. Code below this point only
513 executes in rare cases."""
514 mycat, mypkg = catsplit(cpv)
515 myfile = mypkg + ".tbz2"
516 mypath = os.path.join("All", myfile)
517 dest_path = os.path.join(self.pkgdir, mypath)
520 st = os.lstat(dest_path)
524 if stat.S_ISLNK(st.st_mode):
529 if os.path.exists(dest_path):
533 # For invalid packages, other_cat could be None.
534 other_cat = portage.xpak.tbz2(dest_path).getfile(b"CATEGORY")
536 other_cat = _unicode_decode(other_cat,
537 encoding=_encodings['repo.content'], errors='replace')
538 other_cat = other_cat.strip()
539 other_cpv = other_cat + "/" + mypkg
540 self._move_from_all(other_cpv)
541 self.inject(other_cpv)
542 self._move_to_all(cpv)
544 def _ensure_dir(self, path):
546 Create the specified directory. Also, copy gid and group mode
547 bits from self.pkgdir if possible.
548 @param cat_dir: Absolute path of the directory to be created.
549 @type cat_dir: String
552 pkgdir_st = os.stat(self.pkgdir)
556 pkgdir_gid = pkgdir_st.st_gid
557 pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
559 ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
560 except PortageException:
561 if not os.path.isdir(path):
564 def _move_to_all(self, cpv):
565 """If the file exists, move it. Whether or not it exists, update state
566 for future getname() calls."""
567 mycat, mypkg = catsplit(cpv)
568 myfile = mypkg + ".tbz2"
569 self._pkg_paths[cpv] = os.path.join("All", myfile)
570 src_path = os.path.join(self.pkgdir, mycat, myfile)
572 mystat = os.lstat(src_path)
575 if mystat and stat.S_ISREG(mystat.st_mode):
576 self._ensure_dir(os.path.join(self.pkgdir, "All"))
577 dest_path = os.path.join(self.pkgdir, "All", myfile)
578 _movefile(src_path, dest_path, mysettings=self.settings)
579 self._create_symlink(cpv)
582 def _move_from_all(self, cpv):
583 """Move a package from ${PKGDIR}/All/${PF}.tbz2 to
584 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 and update state from getname calls."""
585 self._remove_symlink(cpv)
586 mycat, mypkg = catsplit(cpv)
587 myfile = mypkg + ".tbz2"
588 mypath = os.path.join(mycat, myfile)
589 dest_path = os.path.join(self.pkgdir, mypath)
590 self._ensure_dir(os.path.dirname(dest_path))
591 src_path = os.path.join(self.pkgdir, "All", myfile)
592 _movefile(src_path, dest_path, mysettings=self.settings)
593 self._pkg_paths[cpv] = mypath
595 def populate(self, getbinpkgs=0):
596 "populates the binarytree"
603 if os.access(self.pkgdir, os.W_OK):
604 pkgindex_lock = lockfile(self._pkgindex_file,
606 self._populating = True
607 self._populate(getbinpkgs)
610 unlockfile(pkgindex_lock)
611 self._populating = False
613 def _populate(self, getbinpkgs=0):
614 if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
617 # Clear all caches in case populate is called multiple times
618 # as may be the case when _global_updates calls populate()
619 # prior to performing package moves since it only wants to
620 # operate on local packages (getbinpkgs=0).
621 self._remotepkgs = None
622 self.dbapi._clear_cache()
623 self.dbapi._aux_cache.clear()
626 self._pkg_paths = pkg_paths
627 dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
631 dirs.insert(0, "All")
632 pkgindex = self._load_pkgindex()
634 if not self._pkgindex_version_supported(pkgindex):
635 pkgindex = self._new_pkgindex()
636 header = pkgindex.header
638 for d in pkgindex.packages:
639 metadata[d["CPV"]] = d
640 update_pkgindex = False
642 for myfile in listdir(os.path.join(self.pkgdir, mydir)):
643 if not myfile.endswith(".tbz2"):
645 mypath = os.path.join(mydir, myfile)
646 full_path = os.path.join(self.pkgdir, mypath)
647 s = os.lstat(full_path)
648 if stat.S_ISLNK(s.st_mode):
651 # Validate data from the package index and try to avoid
652 # reading the xpak if possible.
655 d = metadata.get(mydir+"/"+myfile[:-5])
661 for mycpv in metadata:
662 mycat, mypf = catsplit(mycpv)
664 mypf, []).append(metadata[mycpv])
665 possibilities = pf_index.get(myfile[:-5])
668 for d in possibilities:
670 if long(d["MTIME"]) != s[stat.ST_MTIME]:
672 except (KeyError, ValueError):
675 if long(d["SIZE"]) != long(s.st_size):
677 except (KeyError, ValueError):
679 if not self._pkgindex_keys.difference(d):
684 if mycpv in pkg_paths:
685 # discard duplicates (All/ is preferred)
687 mycpv = _pkg_str(mycpv)
688 pkg_paths[mycpv] = mypath
689 # update the path if the package has been moved
690 oldpath = d.get("PATH")
691 if oldpath and oldpath != mypath:
692 update_pkgindex = True
693 if mypath != mycpv + ".tbz2":
696 update_pkgindex = True
700 update_pkgindex = True
701 self.dbapi.cpv_inject(mycpv)
702 if not self.dbapi._aux_cache_keys.difference(d):
703 aux_cache = self.dbapi._aux_cache_slot_dict()
704 for k in self.dbapi._aux_cache_keys:
706 self.dbapi._aux_cache[mycpv] = aux_cache
708 if not os.access(full_path, os.R_OK):
709 writemsg(_("!!! Permission denied to read " \
710 "binary package: '%s'\n") % full_path,
712 self.invalids.append(myfile[:-5])
714 metadata_bytes = portage.xpak.tbz2(full_path).get_data()
715 mycat = _unicode_decode(metadata_bytes.get(b"CATEGORY", ""),
716 encoding=_encodings['repo.content'], errors='replace')
717 mypf = _unicode_decode(metadata_bytes.get(b"PF", ""),
718 encoding=_encodings['repo.content'], errors='replace')
719 slot = _unicode_decode(metadata_bytes.get(b"SLOT", ""),
720 encoding=_encodings['repo.content'], errors='replace')
722 if not mycat or not mypf or not slot:
723 #old-style or corrupt package
724 writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
728 missing_keys.append("CATEGORY")
730 missing_keys.append("PF")
732 missing_keys.append("SLOT")
736 msg.append(_("Missing metadata key(s): %s.") % \
737 ", ".join(missing_keys))
738 msg.append(_(" This binary package is not " \
739 "recoverable and should be deleted."))
740 for line in textwrap.wrap("".join(msg), 72):
741 writemsg("!!! %s\n" % line, noiselevel=-1)
742 self.invalids.append(mypkg)
744 mycat = mycat.strip()
746 if mycat != mydir and mydir != "All":
748 if mypkg != mypf.strip():
750 mycpv = mycat + "/" + mypkg
751 if mycpv in pkg_paths:
752 # All is first, so it's preferred.
754 if not self.dbapi._category_re.match(mycat):
755 writemsg(_("!!! Binary package has an " \
756 "unrecognized category: '%s'\n") % full_path,
758 writemsg(_("!!! '%s' has a category that is not" \
759 " listed in %setc/portage/categories\n") % \
760 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
763 mycpv = _pkg_str(mycpv)
764 pkg_paths[mycpv] = mypath
765 self.dbapi.cpv_inject(mycpv)
766 update_pkgindex = True
767 d = metadata.get(mycpv, {})
770 if long(d["MTIME"]) != s[stat.ST_MTIME]:
772 except (KeyError, ValueError):
776 if long(d["SIZE"]) != long(s.st_size):
778 except (KeyError, ValueError):
783 d["MTIME"] = str(s[stat.ST_MTIME])
784 d["SIZE"] = str(s.st_size)
786 d.update(zip(self._pkgindex_aux_keys,
787 self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
789 self._eval_use_flags(mycpv, d)
790 except portage.exception.InvalidDependString:
791 writemsg(_("!!! Invalid binary package: '%s'\n") % \
792 self.getname(mycpv), noiselevel=-1)
793 self.dbapi.cpv_remove(mycpv)
796 # record location if it's non-default
797 if mypath != mycpv + ".tbz2":
802 if not self.dbapi._aux_cache_keys.difference(d):
803 aux_cache = self.dbapi._aux_cache_slot_dict()
804 for k in self.dbapi._aux_cache_keys:
806 self.dbapi._aux_cache[mycpv] = aux_cache
808 for cpv in list(metadata):
809 if cpv not in pkg_paths:
812 # Do not bother to write the Packages index if $PKGDIR/All/ exists
813 # since it will provide no benefit due to the need to read CATEGORY
815 if update_pkgindex and os.access(self.pkgdir, os.W_OK):
816 del pkgindex.packages[:]
817 pkgindex.packages.extend(iter(metadata.values()))
818 self._update_pkgindex_header(pkgindex.header)
819 self._pkgindex_write(pkgindex)
821 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
822 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
825 if not getbinpkgs or 'PORTAGE_BINHOST' not in self.settings:
828 self._remotepkgs = {}
829 for base_url in self.settings["PORTAGE_BINHOST"].split():
830 parsed_url = urlparse(base_url)
831 host = parsed_url.netloc
832 port = parsed_url.port
837 user, host = host.split("@", 1)
838 user_passwd = user + "@"
840 user, passwd = user.split(":", 1)
843 port_str = ":%s" % (port,)
844 if host.endswith(port_str):
845 host = host[:-len(port_str)]
846 pkgindex_file = os.path.join(self.settings["EROOT"], CACHE_PATH, "binhost",
847 host, parsed_url.path.lstrip("/"), "Packages")
848 pkgindex = self._new_pkgindex()
850 f = io.open(_unicode_encode(pkgindex_file,
851 encoding=_encodings['fs'], errors='strict'),
852 mode='r', encoding=_encodings['repo.content'],
858 except EnvironmentError as e:
859 if e.errno != errno.ENOENT:
861 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
862 remote_timestamp = None
863 rmt_idx = self._new_pkgindex()
867 # urlparse.urljoin() only works correctly with recognized
868 # protocols and requires the base url to have a trailing
869 # slash, so join manually...
870 url = base_url.rstrip("/") + "/Packages"
872 f = _urlopen(url, if_modified_since=local_timestamp)
873 if hasattr(f, 'headers') and f.headers.get('timestamp', ''):
874 remote_timestamp = f.headers.get('timestamp')
875 except IOError as err:
876 if hasattr(err, 'code') and err.code == 304: # not modified (since local_timestamp)
877 raise UseCachedCopyOfRemoteIndex()
879 path = parsed_url.path.rstrip("/") + "/Packages"
881 if parsed_url.scheme == 'sftp':
882 # The sftp command complains about 'Illegal seek' if
883 # we try to make it write to /dev/stdout, so use a
885 fd, tmp_filename = tempfile.mkstemp()
888 port_args = ['-P', "%s" % (port,)]
889 proc = subprocess.Popen(['sftp'] + port_args + \
890 [user_passwd + host + ":" + path, tmp_filename])
891 if proc.wait() != os.EX_OK:
893 f = open(tmp_filename, 'rb')
894 elif parsed_url.scheme == 'ssh':
896 port_args = ['-p', "%s" % (port,)]
897 proc = subprocess.Popen(['ssh'] + port_args + \
898 [user_passwd + host, '--', 'cat', path],
899 stdout=subprocess.PIPE)
902 setting = 'FETCHCOMMAND_' + parsed_url.scheme.upper()
903 fcmd = self.settings.get(setting)
906 fd, tmp_filename = tempfile.mkstemp()
907 tmp_dirname, tmp_basename = os.path.split(tmp_filename)
909 success = portage.getbinpkg.file_get(url,
910 tmp_dirname, fcmd=fcmd, filename=tmp_basename)
912 raise EnvironmentError("%s failed" % (setting,))
913 f = open(tmp_filename, 'rb')
915 f_dec = codecs.iterdecode(f,
916 _encodings['repo.content'], errors='replace')
918 rmt_idx.readHeader(f_dec)
919 if not remote_timestamp: # in case it had not been read from HTTP header
920 remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
921 if not remote_timestamp:
922 # no timestamp in the header, something's wrong
924 writemsg(_("\n\n!!! Binhost package index " \
925 " has no TIMESTAMP field.\n"), noiselevel=-1)
927 if not self._pkgindex_version_supported(rmt_idx):
928 writemsg(_("\n\n!!! Binhost package index version" \
929 " is not supported: '%s'\n") % \
930 rmt_idx.header.get("VERSION"), noiselevel=-1)
932 elif local_timestamp != remote_timestamp:
933 rmt_idx.readBody(f_dec)
936 # Timeout after 5 seconds, in case close() blocks
937 # indefinitely (see bug #350139).
940 AlarmSignal.register(5)
943 AlarmSignal.unregister()
945 writemsg("\n\n!!! %s\n" % \
946 _("Timed out while closing connection to binhost"),
948 except UseCachedCopyOfRemoteIndex:
949 writemsg_stdout("\n")
951 colorize("GOOD", _("Local copy of remote index is up-to-date and will be used.")) + \
954 except EnvironmentError as e:
955 writemsg(_("\n\n!!! Error fetching binhost package" \
956 " info from '%s'\n") % _hide_url_passwd(base_url))
957 writemsg("!!! %s\n\n" % str(e))
961 if proc.poll() is None:
965 if tmp_filename is not None:
967 os.unlink(tmp_filename)
970 if pkgindex is rmt_idx:
971 pkgindex.modified = False # don't update the header
973 ensure_dirs(os.path.dirname(pkgindex_file))
974 f = atomic_ofstream(pkgindex_file)
977 except (IOError, PortageException):
978 if os.access(os.path.dirname(pkgindex_file), os.W_OK):
980 # The current user doesn't have permission to cache the
981 # file, but that's alright.
983 # Organize remote package list as a cpv -> metadata map.
984 remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
985 remote_base_uri = pkgindex.header.get("URI", base_url)
986 for cpv, remote_metadata in remotepkgs.items():
987 remote_metadata["BASE_URI"] = remote_base_uri
988 self._pkgindex_uri[cpv] = url
989 self._remotepkgs.update(remotepkgs)
990 self._remote_has_index = True
991 for cpv in remotepkgs:
992 self.dbapi.cpv_inject(cpv)
994 # Remote package instances override local package
995 # if they are not identical.
996 hash_names = ["SIZE"] + self._pkgindex_hashes
997 for cpv, local_metadata in metadata.items():
998 remote_metadata = self._remotepkgs.get(cpv)
999 if remote_metadata is None:
1001 # Use digests to compare identity.
1003 for hash_name in hash_names:
1004 local_value = local_metadata.get(hash_name)
1005 if local_value is None:
1007 remote_value = remote_metadata.get(hash_name)
1008 if remote_value is None:
1010 if local_value != remote_value:
1014 del self._remotepkgs[cpv]
1016 # Override the local package in the aux_get cache.
1017 self.dbapi._aux_cache[cpv] = remote_metadata
1019 # Local package instances override remote instances.
1020 for cpv in metadata:
1021 self._remotepkgs.pop(cpv, None)
1025 def inject(self, cpv, filename=None):
1026 """Add a freshly built package to the database. This updates
1027 $PKGDIR/Packages with the new package metadata (including MD5).
1028 @param cpv: The cpv of the new package to inject
1030 @param filename: File path of the package to inject, or None if it's
1031 already in the location returned by getname()
1032 @type filename: string
1035 mycat, mypkg = catsplit(cpv)
1036 if not self.populated:
1038 if filename is None:
1039 full_path = self.getname(cpv)
1041 full_path = filename
1043 s = os.stat(full_path)
1044 except OSError as e:
1045 if e.errno != errno.ENOENT:
1048 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
1051 mytbz2 = portage.xpak.tbz2(full_path)
1052 slot = mytbz2.getfile("SLOT")
1054 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
1058 self.dbapi.cpv_inject(cpv)
1060 # Reread the Packages index (in case it's been changed by another
1061 # process) and then updated it, all while holding a lock.
1062 pkgindex_lock = None
1063 created_symlink = False
1065 pkgindex_lock = lockfile(self._pkgindex_file,
1067 if filename is not None:
1068 new_filename = self.getname(cpv)
1070 samefile = os.path.samefile(filename, new_filename)
1074 self._ensure_dir(os.path.dirname(new_filename))
1075 _movefile(filename, new_filename, mysettings=self.settings)
1076 if self._all_directory and \
1077 self.getname(cpv).split(os.path.sep)[-2] == "All":
1078 self._create_symlink(cpv)
1079 created_symlink = True
1080 pkgindex = self._load_pkgindex()
1082 if not self._pkgindex_version_supported(pkgindex):
1083 pkgindex = self._new_pkgindex()
1085 # Discard remote metadata to ensure that _pkgindex_entry
1086 # gets the local metadata. This also updates state for future
1088 if self._remotepkgs is not None:
1089 self._remotepkgs.pop(cpv, None)
1091 # Discard cached metadata to ensure that _pkgindex_entry
1092 # doesn't return stale metadata.
1093 self.dbapi._aux_cache.pop(cpv, None)
1096 d = self._pkgindex_entry(cpv)
1097 except portage.exception.InvalidDependString:
1098 writemsg(_("!!! Invalid binary package: '%s'\n") % \
1099 self.getname(cpv), noiselevel=-1)
1100 self.dbapi.cpv_remove(cpv)
1101 del self._pkg_paths[cpv]
1104 # If found, remove package(s) with duplicate path.
1105 path = d.get("PATH", "")
1106 for i in range(len(pkgindex.packages) - 1, -1, -1):
1107 d2 = pkgindex.packages[i]
1108 if path and path == d2.get("PATH"):
1109 # Handle path collisions in $PKGDIR/All
1110 # when CPV is not identical.
1111 del pkgindex.packages[i]
1112 elif cpv == d2.get("CPV"):
1113 if path == d2.get("PATH", ""):
1114 del pkgindex.packages[i]
1115 elif created_symlink and not d2.get("PATH", ""):
1116 # Delete entry for the package that was just
1117 # overwritten by a symlink to this package.
1118 del pkgindex.packages[i]
1120 pkgindex.packages.append(d)
1122 self._update_pkgindex_header(pkgindex.header)
1123 self._pkgindex_write(pkgindex)
1127 unlockfile(pkgindex_lock)
1129 def _pkgindex_write(self, pkgindex):
1130 contents = codecs.getwriter(_encodings['repo.content'])(io.BytesIO())
1131 pkgindex.write(contents)
1132 contents = contents.getvalue()
1133 atime = mtime = long(pkgindex.header["TIMESTAMP"])
1134 output_files = [(atomic_ofstream(self._pkgindex_file, mode="wb"),
1135 self._pkgindex_file, None)]
1137 if "compress-index" in self.settings.features:
1138 gz_fname = self._pkgindex_file + ".gz"
1139 fileobj = atomic_ofstream(gz_fname, mode="wb")
1140 output_files.append((GzipFile(filename='', mode="wb",
1141 fileobj=fileobj, mtime=mtime), gz_fname, fileobj))
1143 for f, fname, f_close in output_files:
1146 if f_close is not None:
1148 # some seconds might have elapsed since TIMESTAMP
1149 os.utime(fname, (atime, mtime))
1151 def _pkgindex_entry(self, cpv):
1153 Performs checksums and evaluates USE flag conditionals.
1154 Raises InvalidDependString if necessary.
1156 @return: a dict containing entry for the give cpv.
1159 pkg_path = self.getname(cpv)
1161 d = dict(zip(self._pkgindex_aux_keys,
1162 self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1164 d.update(perform_multiple_checksums(
1165 pkg_path, hashes=self._pkgindex_hashes))
1168 st = os.stat(pkg_path)
1169 d["MTIME"] = str(st[stat.ST_MTIME])
1170 d["SIZE"] = str(st.st_size)
1172 rel_path = self._pkg_paths[cpv]
1173 # record location if it's non-default
1174 if rel_path != cpv + ".tbz2":
1175 d["PATH"] = rel_path
1177 self._eval_use_flags(cpv, d)
1180 def _new_pkgindex(self):
1181 return portage.getbinpkg.PackageIndex(
1182 allowed_pkg_keys=self._pkgindex_allowed_pkg_keys,
1183 default_header_data=self._pkgindex_default_header_data,
1184 default_pkg_data=self._pkgindex_default_pkg_data,
1185 inherited_keys=self._pkgindex_inherited_keys,
1186 translated_keys=self._pkgindex_translated_keys)
1188 def _update_pkgindex_header(self, header):
1189 portdir = normalize_path(os.path.realpath(self.settings["PORTDIR"]))
1190 profiles_base = os.path.join(portdir, "profiles") + os.path.sep
1191 if self.settings.profile_path:
1192 profile_path = normalize_path(
1193 os.path.realpath(self.settings.profile_path))
1194 if profile_path.startswith(profiles_base):
1195 profile_path = profile_path[len(profiles_base):]
1196 header["PROFILE"] = profile_path
1197 header["VERSION"] = str(self._pkgindex_version)
1198 base_uri = self.settings.get("PORTAGE_BINHOST_HEADER_URI")
1200 header["URI"] = base_uri
1202 header.pop("URI", None)
1203 for k in self._pkgindex_header_keys:
1204 v = self.settings.get(k, None)
1210 def _pkgindex_version_supported(self, pkgindex):
1211 version = pkgindex.header.get("VERSION")
1214 if int(version) <= self._pkgindex_version:
1220 def _eval_use_flags(self, cpv, metadata):
1221 use = frozenset(metadata["USE"].split())
1223 iuse = set(f.lstrip("-+") for f in metadata["IUSE"].split())
1224 use = [f for f in use if f in iuse]
1226 metadata["USE"] = " ".join(use)
1227 for k in self._pkgindex_use_evaluated_keys:
1228 if k.endswith('DEPEND'):
1235 deps = use_reduce(deps, uselist=raw_use, token_class=token_class)
1236 deps = paren_enclose(deps)
1237 except portage.exception.InvalidDependString as e:
1238 writemsg("%s: %s\n" % (k, str(e)),
1243 def exists_specific(self, cpv):
1244 if not self.populated:
1246 return self.dbapi.match(
1247 dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings))
1249 def dep_bestmatch(self, mydep):
1250 "compatibility method -- all matches, not just visible ones"
1251 if not self.populated:
1254 writemsg("mydep: %s\n" % mydep, 1)
1255 mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
1256 writemsg("mydep: %s\n" % mydep, 1)
1257 mykey = dep_getkey(mydep)
1258 writemsg("mykey: %s\n" % mykey, 1)
1259 mymatch = best(match_from_list(mydep,self.dbapi.cp_list(mykey)))
1260 writemsg("mymatch: %s\n" % mymatch, 1)
1265 def getname(self, pkgname):
1266 """Returns a file location for this package. The default location is
1267 ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1268 in the rare event of a collision. The prevent_collision() method can
1269 be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1271 if not self.populated:
1274 mypath = self._pkg_paths.get(mycpv, None)
1276 return os.path.join(self.pkgdir, mypath)
1277 mycat, mypkg = catsplit(mycpv)
1278 if self._all_directory:
1279 mypath = os.path.join("All", mypkg + ".tbz2")
1280 if mypath in self._pkg_paths.values():
1281 mypath = os.path.join(mycat, mypkg + ".tbz2")
1283 mypath = os.path.join(mycat, mypkg + ".tbz2")
1284 self._pkg_paths[mycpv] = mypath # cache for future lookups
1285 return os.path.join(self.pkgdir, mypath)
1287 def isremote(self, pkgname):
1288 """Returns true if the package is kept remotely and it has not been
1289 downloaded (or it is only partially downloaded)."""
1290 if self._remotepkgs is None or pkgname not in self._remotepkgs:
1292 # Presence in self._remotepkgs implies that it's remote. When a
1293 # package is downloaded, state is updated by self.inject().
1296 def get_pkgindex_uri(self, pkgname):
1297 """Returns the URI to the Packages file for a given package."""
1298 return self._pkgindex_uri.get(pkgname)
1302 def gettbz2(self, pkgname):
1303 """Fetches the package from a remote site, if necessary. Attempts to
1304 resume if the file appears to be partially downloaded."""
1305 tbz2_path = self.getname(pkgname)
1306 tbz2name = os.path.basename(tbz2_path)
1308 if os.path.exists(tbz2_path):
1309 if tbz2name[:-5] not in self.invalids:
1313 writemsg(_("Resuming download of this tbz2, but it is possible that it is corrupt.\n"),
1316 mydest = os.path.dirname(self.getname(pkgname))
1317 self._ensure_dir(mydest)
1318 # urljoin doesn't work correctly with unrecognized protocols like sftp
1319 if self._remote_has_index:
1320 rel_url = self._remotepkgs[pkgname].get("PATH")
1322 rel_url = pkgname+".tbz2"
1323 remote_base_uri = self._remotepkgs[pkgname]["BASE_URI"]
1324 url = remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1326 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1327 protocol = urlparse(url)[0]
1328 fcmd_prefix = "FETCHCOMMAND"
1330 fcmd_prefix = "RESUMECOMMAND"
1331 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper())
1333 fcmd = self.settings.get(fcmd_prefix)
1334 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd)
1337 os.unlink(self.getname(pkgname))
1340 raise portage.exception.FileNotFound(mydest)
1341 self.inject(pkgname)
1343 def _load_pkgindex(self):
1344 pkgindex = self._new_pkgindex()
1346 f = io.open(_unicode_encode(self._pkgindex_file,
1347 encoding=_encodings['fs'], errors='strict'),
1348 mode='r', encoding=_encodings['repo.content'],
1350 except EnvironmentError:
1359 def digestCheck(self, pkg):
1361 Verify digests for the given package and raise DigestException
1362 if verification fails.
1364 @return: True if digests could be located, False otherwise.
1367 if not isinstance(cpv, basestring):
1371 pkg_path = self.getname(cpv)
1373 if self._remotepkgs is None or cpv not in self._remotepkgs:
1374 for d in self._load_pkgindex().packages:
1379 metadata = self._remotepkgs[cpv]
1380 if metadata is None:
1384 for k in hashfunc_map:
1390 if "SIZE" in metadata:
1392 digests["size"] = int(metadata["SIZE"])
1394 writemsg(_("!!! Malformed SIZE attribute in remote " \
1395 "metadata for '%s'\n") % cpv)
1400 hash_filter = _hash_filter(
1401 self.settings.get("PORTAGE_CHECKSUM_FILTER", ""))
1402 if not hash_filter.transparent:
1403 digests = _apply_hash_filter(digests, hash_filter)
1405 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
1406 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
1408 ok, reason = verify_all(pkg_path, digests)
1410 raise portage.exception.DigestException(
1411 (pkg_path,) + tuple(reason))
1415 def getslot(self, mycatpkg):
1416 "Get a slot for a catpkg; assume it exists."
1419 myslot = self.dbapi._pkg_str(mycatpkg, None).slot