1 # Copyright 1998-2009 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 __all__ = ["bindbapi", "binarytree"]
7 portage.proxy.lazyimport.lazyimport(globals(),
8 'portage.dbapi.dep_expand:dep_expand',
9 'portage.dep:dep_getkey,isjustname,match_from_list',
10 'portage.output:EOutput,colorize',
11 'portage.package.ebuild.doebuild:_vdb_use_conditional_atoms',
12 'portage.update:update_dbentries',
13 'portage.util:atomic_ofstream,ensure_dirs,normalize_path,' + \
14 'writemsg,writemsg_stdout',
15 'portage.util.listdir:listdir',
16 'portage.versions:best,catpkgsplit,catsplit',
19 from portage.cache.mappings import slot_dict_class
20 from portage.dbapi.virtual import fakedbapi
21 from portage.dep import use_reduce, paren_enclose
22 from portage.exception import InvalidPackageName, \
23 PermissionDenied, PortageException
24 from portage.localization import _
26 from portage import _movefile
27 from portage import os
28 from portage import _encodings
29 from portage import _unicode_decode
30 from portage import _unicode_encode
31 from portage.package.ebuild.fetch import _check_distfile
38 from itertools import chain
40 if sys.hexversion >= 0x3000000:
44 class bindbapi(fakedbapi):
45 _known_keys = frozenset(list(fakedbapi._known_keys) + \
46 ["CHOST", "repository", "USE"])
47 def __init__(self, mybintree=None, **kwargs):
48 fakedbapi.__init__(self, **kwargs)
49 self.bintree = mybintree
50 self.move_ent = mybintree.move_ent
53 # Selectively cache metadata in order to optimize dep matching.
54 self._aux_cache_keys = set(
55 ["BUILD_TIME", "CHOST", "DEPEND", "EAPI", "IUSE", "KEYWORDS",
56 "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE",
57 "RDEPEND", "repository", "RESTRICT", "SLOT", "USE", "DEFINED_PHASES",
59 self._aux_cache_slot_dict = slot_dict_class(self._aux_cache_keys)
62 def match(self, *pargs, **kwargs):
63 if self.bintree and not self.bintree.populated:
64 self.bintree.populate()
65 return fakedbapi.match(self, *pargs, **kwargs)
67 def cpv_inject(self, cpv, **kwargs):
68 self._aux_cache.pop(cpv, None)
69 fakedbapi.cpv_inject(self, cpv, **kwargs)
71 def cpv_remove(self, cpv):
72 self._aux_cache.pop(cpv, None)
73 fakedbapi.cpv_remove(self, cpv)
75 def aux_get(self, mycpv, wants):
76 if self.bintree and not self.bintree.populated:
77 self.bintree.populate()
79 if not self._known_keys.intersection(
80 wants).difference(self._aux_cache_keys):
81 aux_cache = self._aux_cache.get(mycpv)
82 if aux_cache is not None:
83 return [aux_cache.get(x, "") for x in wants]
85 mysplit = mycpv.split("/")
87 tbz2name = mysplit[1]+".tbz2"
88 if not self.bintree._remotepkgs or \
89 not self.bintree.isremote(mycpv):
90 tbz2_path = self.bintree.getname(mycpv)
91 if not os.path.exists(tbz2_path):
93 metadata_bytes = portage.xpak.tbz2(tbz2_path).get_data()
95 v = metadata_bytes.get(_unicode_encode(k,
96 encoding=_encodings['repo.content'],
97 errors='backslashreplace'))
99 v = _unicode_decode(v,
100 encoding=_encodings['repo.content'], errors='replace')
103 getitem = self.bintree._remotepkgs[mycpv].get
107 mykeys = self._aux_cache_keys.union(wants)
110 # myval is None if the key doesn't exist
111 # or the tbz2 is corrupt.
113 mydata[x] = " ".join(myval.split())
115 if not mydata.setdefault('EAPI', _unicode_decode('0')):
116 mydata['EAPI'] = _unicode_decode('0')
119 aux_cache = self._aux_cache_slot_dict()
120 for x in self._aux_cache_keys:
121 aux_cache[x] = mydata.get(x, _unicode_decode(''))
122 self._aux_cache[mycpv] = aux_cache
123 return [mydata.get(x, _unicode_decode('')) for x in wants]
125 def aux_update(self, cpv, values):
126 if not self.bintree.populated:
127 self.bintree.populate()
128 tbz2path = self.bintree.getname(cpv)
129 if not os.path.exists(tbz2path):
131 mytbz2 = portage.xpak.tbz2(tbz2path)
132 mydata = mytbz2.get_data()
134 for k, v in values.items():
135 k = _unicode_encode(k,
136 encoding=_encodings['repo.content'], errors='backslashreplace')
137 v = _unicode_encode(v,
138 encoding=_encodings['repo.content'], errors='backslashreplace')
141 for k, v in list(mydata.items()):
144 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
145 # inject will clear stale caches via cpv_inject.
146 self.bintree.inject(cpv)
148 def cp_list(self, *pargs, **kwargs):
149 if not self.bintree.populated:
150 self.bintree.populate()
151 return fakedbapi.cp_list(self, *pargs, **kwargs)
154 if not self.bintree.populated:
155 self.bintree.populate()
156 return fakedbapi.cp_all(self)
159 if not self.bintree.populated:
160 self.bintree.populate()
161 return fakedbapi.cpv_all(self)
163 def _pkgindex_cpv_map_latest_build(pkgindex):
165 Given a PackageIndex instance, create a dict of cpv -> metadata map.
166 If multiple packages have identical CPV values, prefer the package
167 with latest BUILD_TIME value.
168 @param pkgindex: A PackageIndex instance.
169 @type pkgindex: PackageIndex
171 @returns: a dict containing entry for the give cpv.
175 for d in pkgindex.packages:
178 btime = d.get('BUILD_TIME', '')
184 other_d = cpv_map.get(cpv)
185 if other_d is not None:
186 other_btime = other_d.get('BUILD_TIME', '')
188 other_btime = int(other_btime)
191 if other_btime and (not btime or other_btime > btime):
198 class binarytree(object):
199 "this tree scans for a list of all packages available in PKGDIR"
200 def __init__(self, root, pkgdir, virtual=None, settings=None):
203 #self.pkgdir=settings["PKGDIR"]
204 self.pkgdir = normalize_path(pkgdir)
205 self.dbapi = bindbapi(self, settings=settings)
206 self.update_ents = self.dbapi.update_ents
207 self.move_slot_ent = self.dbapi.move_slot_ent
210 self._remote_has_index = False
211 self._remote_base_uri = None
212 self._remotepkgs = None # remote metadata indexed by cpv
213 self.__remotepkgs = {} # indexed by tbz2 name (deprecated)
215 self.settings = settings
217 self._populating = False
218 self._all_directory = os.path.isdir(
219 os.path.join(self.pkgdir, "All"))
220 self._pkgindex_version = 0
221 self._pkgindex_hashes = ["MD5","SHA1"]
222 self._pkgindex_file = os.path.join(self.pkgdir, "Packages")
223 self._pkgindex_keys = self.dbapi._aux_cache_keys.copy()
224 self._pkgindex_keys.update(["CPV", "MTIME", "SIZE"])
225 self._pkgindex_aux_keys = \
226 ["BUILD_TIME", "CHOST", "DEPEND", "DESCRIPTION", "EAPI",
227 "IUSE", "KEYWORDS", "LICENSE", "PDEPEND", "PROPERTIES",
228 "PROVIDE", "RDEPEND", "repository", "SLOT", "USE", "DEFINED_PHASES",
230 self._pkgindex_aux_keys = list(self._pkgindex_aux_keys)
231 self._pkgindex_use_evaluated_keys = \
232 ("LICENSE", "RDEPEND", "DEPEND",
233 "PDEPEND", "PROPERTIES", "PROVIDE")
234 self._pkgindex_header_keys = set([
235 "ACCEPT_KEYWORDS", "ACCEPT_LICENSE",
236 "ACCEPT_PROPERTIES", "CBUILD",
237 "CHOST", "CONFIG_PROTECT", "CONFIG_PROTECT_MASK", "FEATURES",
238 "GENTOO_MIRRORS", "INSTALL_MASK", "SYNC", "USE"])
239 self._pkgindex_default_pkg_data = {
254 "DEFINED_PHASES" : "",
257 self._pkgindex_inherited_keys = ["CHOST", "repository"]
258 self._pkgindex_default_header_data = {
261 self._pkgindex_translated_keys = (
262 ("DESCRIPTION" , "DESC"),
263 ("repository" , "REPO"),
266 self._pkgindex_allowed_pkg_keys = set(chain(
268 self._pkgindex_aux_keys,
269 self._pkgindex_hashes,
270 self._pkgindex_default_pkg_data,
271 self._pkgindex_inherited_keys,
272 self._pkgindex_default_header_data,
273 chain(*self._pkgindex_translated_keys)
276 def move_ent(self, mylist, repo_match=None):
277 if not self.populated:
282 for atom in (origcp, newcp):
283 if not isjustname(atom):
284 raise InvalidPackageName(str(atom))
285 mynewcat = catsplit(newcp)[0]
286 origmatches=self.dbapi.cp_list(origcp)
290 for mycpv in origmatches:
291 mycpv_cp = portage.cpv_getkey(mycpv)
292 if mycpv_cp != origcp:
293 # Ignore PROVIDE virtual match.
295 if repo_match is not None \
296 and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
298 mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
299 myoldpkg = catsplit(mycpv)[1]
300 mynewpkg = catsplit(mynewcpv)[1]
302 if (mynewpkg != myoldpkg) and os.path.exists(self.getname(mynewcpv)):
303 writemsg(_("!!! Cannot update binary: Destination exists.\n"),
305 writemsg("!!! "+mycpv+" -> "+mynewcpv+"\n", noiselevel=-1)
308 tbz2path = self.getname(mycpv)
309 if os.path.exists(tbz2path) and not os.access(tbz2path,os.W_OK):
310 writemsg(_("!!! Cannot update readonly binary: %s\n") % mycpv,
315 mytbz2 = portage.xpak.tbz2(tbz2path)
316 mydata = mytbz2.get_data()
317 updated_items = update_dbentries([mylist], mydata)
318 mydata.update(updated_items)
319 mydata[_unicode_encode('PF',
320 encoding=_encodings['repo.content'])] = \
321 _unicode_encode(mynewpkg + "\n",
322 encoding=_encodings['repo.content'])
323 mydata[_unicode_encode('CATEGORY',
324 encoding=_encodings['repo.content'])] = \
325 _unicode_encode(mynewcat + "\n",
326 encoding=_encodings['repo.content'])
327 if mynewpkg != myoldpkg:
328 ebuild_data = mydata.pop(_unicode_encode(myoldpkg + '.ebuild',
329 encoding=_encodings['repo.content']), None)
330 if ebuild_data is not None:
331 mydata[_unicode_encode(mynewpkg + '.ebuild',
332 encoding=_encodings['repo.content'])] = ebuild_data
334 mytbz2.recompose_mem(portage.xpak.xpak_mem(mydata))
336 self.dbapi.cpv_remove(mycpv)
337 del self._pkg_paths[mycpv]
338 new_path = self.getname(mynewcpv)
339 self._pkg_paths[mynewcpv] = os.path.join(
340 *new_path.split(os.path.sep)[-2:])
341 if new_path != mytbz2:
342 self._ensure_dir(os.path.dirname(new_path))
343 _movefile(tbz2path, new_path, mysettings=self.settings)
344 self._remove_symlink(mycpv)
345 if new_path.split(os.path.sep)[-2] == "All":
346 self._create_symlink(mynewcpv)
347 self.inject(mynewcpv)
351 def _remove_symlink(self, cpv):
352 """Remove a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink and also remove
353 the ${PKGDIR}/${CATEGORY} directory if empty. The file will not be
354 removed if os.path.islink() returns False."""
355 mycat, mypkg = catsplit(cpv)
356 mylink = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
357 if os.path.islink(mylink):
358 """Only remove it if it's really a link so that this method never
359 removes a real package that was placed here to avoid a collision."""
362 os.rmdir(os.path.join(self.pkgdir, mycat))
364 if e.errno not in (errno.ENOENT,
365 errno.ENOTEMPTY, errno.EEXIST):
369 def _create_symlink(self, cpv):
370 """Create a ${PKGDIR}/${CATEGORY}/${PF}.tbz2 symlink (and
371 ${PKGDIR}/${CATEGORY} directory, if necessary). Any file that may
372 exist in the location of the symlink will first be removed."""
373 mycat, mypkg = catsplit(cpv)
374 full_path = os.path.join(self.pkgdir, mycat, mypkg + ".tbz2")
375 self._ensure_dir(os.path.dirname(full_path))
379 if e.errno != errno.ENOENT:
382 os.symlink(os.path.join("..", "All", mypkg + ".tbz2"), full_path)
384 def prevent_collision(self, cpv):
385 """Make sure that the file location ${PKGDIR}/All/${PF}.tbz2 is safe to
386 use for a given cpv. If a collision will occur with an existing
387 package from another category, the existing package will be bumped to
388 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 so that both can coexist."""
389 if not self._all_directory:
392 # Copy group permissions for new directories that
393 # may have been created.
394 for path in ("All", catsplit(cpv)[0]):
395 path = os.path.join(self.pkgdir, path)
396 self._ensure_dir(path)
397 if not os.access(path, os.W_OK):
398 raise PermissionDenied("access('%s', W_OK)" % path)
400 full_path = self.getname(cpv)
401 if "All" == full_path.split(os.path.sep)[-2]:
403 """Move a colliding package if it exists. Code below this point only
404 executes in rare cases."""
405 mycat, mypkg = catsplit(cpv)
406 myfile = mypkg + ".tbz2"
407 mypath = os.path.join("All", myfile)
408 dest_path = os.path.join(self.pkgdir, mypath)
411 st = os.lstat(dest_path)
415 if stat.S_ISLNK(st.st_mode):
420 if os.path.exists(dest_path):
424 # For invalid packages, other_cat could be None.
425 other_cat = portage.xpak.tbz2(dest_path).getfile(
426 _unicode_encode("CATEGORY",
427 encoding=_encodings['repo.content']))
429 other_cat = _unicode_decode(other_cat,
430 encoding=_encodings['repo.content'], errors='replace')
431 other_cat = other_cat.strip()
432 other_cpv = other_cat + "/" + mypkg
433 self._move_from_all(other_cpv)
434 self.inject(other_cpv)
435 self._move_to_all(cpv)
437 def _ensure_dir(self, path):
439 Create the specified directory. Also, copy gid and group mode
440 bits from self.pkgdir if possible.
441 @param cat_dir: Absolute path of the directory to be created.
442 @type cat_dir: String
445 pkgdir_st = os.stat(self.pkgdir)
449 pkgdir_gid = pkgdir_st.st_gid
450 pkgdir_grp_mode = 0o2070 & pkgdir_st.st_mode
452 ensure_dirs(path, gid=pkgdir_gid, mode=pkgdir_grp_mode, mask=0)
453 except PortageException:
454 if not os.path.isdir(path):
457 def _move_to_all(self, cpv):
458 """If the file exists, move it. Whether or not it exists, update state
459 for future getname() calls."""
460 mycat, mypkg = catsplit(cpv)
461 myfile = mypkg + ".tbz2"
462 self._pkg_paths[cpv] = os.path.join("All", myfile)
463 src_path = os.path.join(self.pkgdir, mycat, myfile)
465 mystat = os.lstat(src_path)
468 if mystat and stat.S_ISREG(mystat.st_mode):
469 self._ensure_dir(os.path.join(self.pkgdir, "All"))
470 dest_path = os.path.join(self.pkgdir, "All", myfile)
471 _movefile(src_path, dest_path, mysettings=self.settings)
472 self._create_symlink(cpv)
475 def _move_from_all(self, cpv):
476 """Move a package from ${PKGDIR}/All/${PF}.tbz2 to
477 ${PKGDIR}/${CATEGORY}/${PF}.tbz2 and update state from getname calls."""
478 self._remove_symlink(cpv)
479 mycat, mypkg = catsplit(cpv)
480 myfile = mypkg + ".tbz2"
481 mypath = os.path.join(mycat, myfile)
482 dest_path = os.path.join(self.pkgdir, mypath)
483 self._ensure_dir(os.path.dirname(dest_path))
484 src_path = os.path.join(self.pkgdir, "All", myfile)
485 _movefile(src_path, dest_path, mysettings=self.settings)
486 self._pkg_paths[cpv] = mypath
488 def populate(self, getbinpkgs=0):
489 "populates the binarytree"
493 from portage.locks import lockfile, unlockfile
496 if os.access(self.pkgdir, os.W_OK):
497 pkgindex_lock = lockfile(self._pkgindex_file,
499 self._populating = True
500 self._populate(getbinpkgs)
503 unlockfile(pkgindex_lock)
504 self._populating = False
506 def _populate(self, getbinpkgs=0):
507 if (not os.path.isdir(self.pkgdir) and not getbinpkgs):
510 # Clear all caches in case populate is called multiple times
511 # as may be the case when _global_updates calls populate()
512 # prior to performing package moves since it only wants to
513 # operate on local packages (getbinpkgs=0).
514 self._remotepkgs = None
515 self.dbapi._clear_cache()
516 self.dbapi._aux_cache.clear()
519 self._pkg_paths = pkg_paths
520 dirs = listdir(self.pkgdir, dirsonly=True, EmptyOnError=True)
524 dirs.insert(0, "All")
525 pkgindex = self._load_pkgindex()
527 if not self._pkgindex_version_supported(pkgindex):
528 pkgindex = self._new_pkgindex()
529 header = pkgindex.header
531 for d in pkgindex.packages:
532 metadata[d["CPV"]] = d
533 update_pkgindex = False
535 for myfile in listdir(os.path.join(self.pkgdir, mydir)):
536 if not myfile.endswith(".tbz2"):
538 mypath = os.path.join(mydir, myfile)
539 full_path = os.path.join(self.pkgdir, mypath)
540 s = os.lstat(full_path)
541 if stat.S_ISLNK(s.st_mode):
544 # Validate data from the package index and try to avoid
545 # reading the xpak if possible.
548 d = metadata.get(mydir+"/"+myfile[:-5])
554 for mycpv in metadata:
555 mycat, mypf = catsplit(mycpv)
557 mypf, []).append(metadata[mycpv])
558 possibilities = pf_index.get(myfile[:-5])
561 for d in possibilities:
563 if long(d["MTIME"]) != s[stat.ST_MTIME]:
565 except (KeyError, ValueError):
568 if long(d["SIZE"]) != long(s.st_size):
570 except (KeyError, ValueError):
572 if not self._pkgindex_keys.difference(d):
577 if mycpv in pkg_paths:
578 # discard duplicates (All/ is preferred)
580 pkg_paths[mycpv] = mypath
581 # update the path if the package has been moved
582 oldpath = d.get("PATH")
583 if oldpath and oldpath != mypath:
584 update_pkgindex = True
585 if mypath != mycpv + ".tbz2":
588 update_pkgindex = True
592 update_pkgindex = True
593 self.dbapi.cpv_inject(mycpv)
594 if not self.dbapi._aux_cache_keys.difference(d):
595 aux_cache = self.dbapi._aux_cache_slot_dict()
596 for k in self.dbapi._aux_cache_keys:
598 self.dbapi._aux_cache[mycpv] = aux_cache
600 if not os.access(full_path, os.R_OK):
601 writemsg(_("!!! Permission denied to read " \
602 "binary package: '%s'\n") % full_path,
604 self.invalids.append(myfile[:-5])
606 metadata_bytes = portage.xpak.tbz2(full_path).get_data()
607 mycat = _unicode_decode(metadata_bytes.get(
608 _unicode_encode("CATEGORY",
609 encoding=_encodings['repo.content']), ""),
610 encoding=_encodings['repo.content'], errors='replace')
611 mypf = _unicode_decode(metadata_bytes.get(
612 _unicode_encode("PF",
613 encoding=_encodings['repo.content']), ""),
614 encoding=_encodings['repo.content'], errors='replace')
615 slot = _unicode_decode(metadata_bytes.get(
616 _unicode_encode("SLOT",
617 encoding=_encodings['repo.content']), ""),
618 encoding=_encodings['repo.content'], errors='replace')
620 if not mycat or not mypf or not slot:
621 #old-style or corrupt package
622 writemsg(_("\n!!! Invalid binary package: '%s'\n") % full_path,
626 missing_keys.append("CATEGORY")
628 missing_keys.append("PF")
630 missing_keys.append("SLOT")
634 msg.append(_("Missing metadata key(s): %s.") % \
635 ", ".join(missing_keys))
636 msg.append(_(" This binary package is not " \
637 "recoverable and should be deleted."))
638 from textwrap import wrap
639 for line in wrap("".join(msg), 72):
640 writemsg("!!! %s\n" % line, noiselevel=-1)
641 self.invalids.append(mypkg)
643 mycat = mycat.strip()
645 if mycat != mydir and mydir != "All":
647 if mypkg != mypf.strip():
649 mycpv = mycat + "/" + mypkg
650 if mycpv in pkg_paths:
651 # All is first, so it's preferred.
653 if not self.dbapi._category_re.match(mycat):
654 writemsg(_("!!! Binary package has an " \
655 "unrecognized category: '%s'\n") % full_path,
657 writemsg(_("!!! '%s' has a category that is not" \
658 " listed in %setc/portage/categories\n") % \
659 (mycpv, self.settings["PORTAGE_CONFIGROOT"]),
662 pkg_paths[mycpv] = mypath
663 self.dbapi.cpv_inject(mycpv)
664 update_pkgindex = True
665 d = metadata.get(mycpv, {})
668 if long(d["MTIME"]) != s[stat.ST_MTIME]:
670 except (KeyError, ValueError):
674 if long(d["SIZE"]) != long(s.st_size):
676 except (KeyError, ValueError):
681 d["MTIME"] = str(s[stat.ST_MTIME])
682 d["SIZE"] = str(s.st_size)
684 d.update(zip(self._pkgindex_aux_keys,
685 self.dbapi.aux_get(mycpv, self._pkgindex_aux_keys)))
687 self._eval_use_flags(mycpv, d)
688 except portage.exception.InvalidDependString:
689 writemsg(_("!!! Invalid binary package: '%s'\n") % \
690 self.getname(mycpv), noiselevel=-1)
691 self.dbapi.cpv_remove(mycpv)
694 # record location if it's non-default
695 if mypath != mycpv + ".tbz2":
700 if not self.dbapi._aux_cache_keys.difference(d):
701 aux_cache = self.dbapi._aux_cache_slot_dict()
702 for k in self.dbapi._aux_cache_keys:
704 self.dbapi._aux_cache[mycpv] = aux_cache
706 for cpv in list(metadata):
707 if cpv not in pkg_paths:
710 # Do not bother to write the Packages index if $PKGDIR/All/ exists
711 # since it will provide no benefit due to the need to read CATEGORY
713 if update_pkgindex and os.access(self.pkgdir, os.W_OK):
714 del pkgindex.packages[:]
715 pkgindex.packages.extend(iter(metadata.values()))
716 self._update_pkgindex_header(pkgindex.header)
717 f = atomic_ofstream(self._pkgindex_file)
721 if getbinpkgs and not self.settings["PORTAGE_BINHOST"]:
722 writemsg(_("!!! PORTAGE_BINHOST unset, but use is requested.\n"),
725 if getbinpkgs and 'PORTAGE_BINHOST' in self.settings:
726 base_url = self.settings["PORTAGE_BINHOST"]
727 from portage.const import CACHE_PATH
729 from urllib.parse import urlparse
731 from urlparse import urlparse
732 urldata = urlparse(base_url)
733 pkgindex_file = os.path.join(self.settings["ROOT"], CACHE_PATH, "binhost",
734 urldata[1] + urldata[2], "Packages")
735 pkgindex = self._new_pkgindex()
737 f = codecs.open(_unicode_encode(pkgindex_file,
738 encoding=_encodings['fs'], errors='strict'),
739 mode='r', encoding=_encodings['repo.content'],
745 except EnvironmentError as e:
746 if e.errno != errno.ENOENT:
748 local_timestamp = pkgindex.header.get("TIMESTAMP", None)
750 from urllib.request import urlopen as urllib_request_urlopen
752 from urllib import urlopen as urllib_request_urlopen
753 rmt_idx = self._new_pkgindex()
755 # urlparse.urljoin() only works correctly with recognized
756 # protocols and requires the base url to have a trailing
757 # slash, so join manually...
758 f = urllib_request_urlopen(base_url.rstrip("/") + "/Packages")
759 f_dec = codecs.iterdecode(f,
760 _encodings['repo.content'], errors='replace')
762 rmt_idx.readHeader(f_dec)
763 remote_timestamp = rmt_idx.header.get("TIMESTAMP", None)
764 if not remote_timestamp:
765 # no timestamp in the header, something's wrong
768 if not self._pkgindex_version_supported(rmt_idx):
769 writemsg(_("\n\n!!! Binhost package index version" \
770 " is not supported: '%s'\n") % \
771 rmt_idx.header.get("VERSION"), noiselevel=-1)
773 elif local_timestamp != remote_timestamp:
774 rmt_idx.readBody(f_dec)
778 except EnvironmentError as e:
779 writemsg(_("\n\n!!! Error fetching binhost package" \
780 " info from '%s'\n") % base_url)
781 writemsg("!!! %s\n\n" % str(e))
784 if pkgindex is rmt_idx:
785 pkgindex.modified = False # don't update the header
787 ensure_dirs(os.path.dirname(pkgindex_file))
788 f = atomic_ofstream(pkgindex_file)
791 except (IOError, PortageException):
792 if os.access(os.path.dirname(pkgindex_file), os.W_OK):
794 # The current user doesn't have permission to cache the
795 # file, but that's alright.
797 # Organize remote package list as a cpv -> metadata map.
798 self._remotepkgs = _pkgindex_cpv_map_latest_build(pkgindex)
799 self._remote_has_index = True
800 self._remote_base_uri = pkgindex.header.get("URI", base_url)
801 self.__remotepkgs = {}
802 for cpv in self._remotepkgs:
803 self.dbapi.cpv_inject(cpv)
806 # Remote package instances override local package
807 # if they are not identical.
808 hash_names = ["SIZE"] + self._pkgindex_hashes
809 for cpv, local_metadata in metadata.items():
810 remote_metadata = self._remotepkgs.get(cpv)
811 if remote_metadata is None:
813 # Use digests to compare identity.
815 for hash_name in hash_names:
816 local_value = local_metadata.get(hash_name)
817 if local_value is None:
819 remote_value = remote_metadata.get(hash_name)
820 if remote_value is None:
822 if local_value != remote_value:
826 del self._remotepkgs[cpv]
828 # Override the local package in the aux_get cache.
829 self.dbapi._aux_cache[cpv] = remote_metadata
831 # Local package instances override remote instances.
833 self._remotepkgs.pop(cpv, None)
835 self._remotepkgs = {}
837 chunk_size = long(self.settings["PORTAGE_BINHOST_CHUNKSIZE"])
840 except (ValueError, KeyError):
842 writemsg_stdout("\n")
844 colorize("GOOD", _("Fetching bininfo from ")) + \
845 re.sub(r'//(.+):.+@(.+)/', r'//\1:*password*@\2/', base_url) + "\n")
846 self.__remotepkgs = portage.getbinpkg.dir_get_metadata(
847 self.settings["PORTAGE_BINHOST"], chunk_size=chunk_size)
848 #writemsg(green(" -- DONE!\n\n"))
850 for mypkg in list(self.__remotepkgs):
851 if "CATEGORY" not in self.__remotepkgs[mypkg]:
852 #old-style or corrupt package
853 writemsg(_("!!! Invalid remote binary package: %s\n") % mypkg,
855 del self.__remotepkgs[mypkg]
857 mycat = self.__remotepkgs[mypkg]["CATEGORY"].strip()
858 fullpkg = mycat+"/"+mypkg[:-5]
860 if fullpkg in metadata:
861 # When using this old protocol, comparison with the remote
862 # package isn't supported, so the local package is always
863 # preferred even if getbinpkgsonly is enabled.
866 if not self.dbapi._category_re.match(mycat):
867 writemsg(_("!!! Remote binary package has an " \
868 "unrecognized category: '%s'\n") % fullpkg,
870 writemsg(_("!!! '%s' has a category that is not" \
871 " listed in %setc/portage/categories\n") % \
872 (fullpkg, self.settings["PORTAGE_CONFIGROOT"]),
875 mykey = portage.cpv_getkey(fullpkg)
877 # invalid tbz2's can hurt things.
878 self.dbapi.cpv_inject(fullpkg)
879 remote_metadata = self.__remotepkgs[mypkg]
880 for k, v in remote_metadata.items():
881 remote_metadata[k] = v.strip()
883 # Eliminate metadata values with names that digestCheck
884 # uses, since they are not valid when using the old
885 # protocol. Typically this is needed for SIZE metadata
886 # which corresponds to the size of the unpacked files
887 # rather than the binpkg file size, triggering digest
888 # verification failures as reported in bug #303211.
889 remote_metadata.pop('SIZE', None)
890 for k in portage.checksum.hashfunc_map:
891 remote_metadata.pop(k, None)
893 self._remotepkgs[fullpkg] = remote_metadata
894 except SystemExit as e:
897 writemsg(_("!!! Failed to inject remote binary package: %s\n") % fullpkg,
899 del self.__remotepkgs[mypkg]
903 def inject(self, cpv, filename=None):
904 """Add a freshly built package to the database. This updates
905 $PKGDIR/Packages with the new package metadata (including MD5).
906 @param cpv: The cpv of the new package to inject
908 @param filename: File path of the package to inject, or None if it's
909 already in the location returned by getname()
910 @type filename: string
913 mycat, mypkg = catsplit(cpv)
914 if not self.populated:
917 full_path = self.getname(cpv)
921 s = os.stat(full_path)
923 if e.errno != errno.ENOENT:
926 writemsg(_("!!! Binary package does not exist: '%s'\n") % full_path,
929 mytbz2 = portage.xpak.tbz2(full_path)
930 slot = mytbz2.getfile("SLOT")
932 writemsg(_("!!! Invalid binary package: '%s'\n") % full_path,
936 self.dbapi.cpv_inject(cpv)
938 # Reread the Packages index (in case it's been changed by another
939 # process) and then updated it, all while holding a lock.
940 from portage.locks import lockfile, unlockfile
942 created_symlink = False
944 pkgindex_lock = lockfile(self._pkgindex_file,
946 if filename is not None:
947 new_filename = self.getname(cpv)
948 self._ensure_dir(os.path.dirname(new_filename))
949 _movefile(filename, new_filename, mysettings=self.settings)
950 if self._all_directory and \
951 self.getname(cpv).split(os.path.sep)[-2] == "All":
952 self._create_symlink(cpv)
953 created_symlink = True
954 pkgindex = self._load_pkgindex()
956 if not self._pkgindex_version_supported(pkgindex):
957 pkgindex = self._new_pkgindex()
959 # Discard remote metadata to ensure that _pkgindex_entry
960 # gets the local metadata. This also updates state for future
962 if self._remotepkgs is not None:
963 self._remotepkgs.pop(cpv, None)
965 # Discard cached metadata to ensure that _pkgindex_entry
966 # doesn't return stale metadata.
967 self.dbapi._aux_cache.pop(cpv, None)
970 d = self._pkgindex_entry(cpv)
971 except portage.exception.InvalidDependString:
972 writemsg(_("!!! Invalid binary package: '%s'\n") % \
973 self.getname(cpv), noiselevel=-1)
974 self.dbapi.cpv_remove(cpv)
975 del self._pkg_paths[cpv]
978 # If found, remove package(s) with duplicate path.
979 path = d.get("PATH", "")
980 for i in range(len(pkgindex.packages) - 1, -1, -1):
981 d2 = pkgindex.packages[i]
982 if path and path == d2.get("PATH"):
983 # Handle path collisions in $PKGDIR/All
984 # when CPV is not identical.
985 del pkgindex.packages[i]
986 elif cpv == d2.get("CPV"):
987 if path == d2.get("PATH", ""):
988 del pkgindex.packages[i]
989 elif created_symlink and not d2.get("PATH", ""):
990 # Delete entry for the package that was just
991 # overwritten by a symlink to this package.
992 del pkgindex.packages[i]
994 pkgindex.packages.append(d)
996 self._update_pkgindex_header(pkgindex.header)
997 f = atomic_ofstream(os.path.join(self.pkgdir, "Packages"))
1002 unlockfile(pkgindex_lock)
1004 def _pkgindex_entry(self, cpv):
1006 Performs checksums and evaluates USE flag conditionals.
1007 Raises InvalidDependString if necessary.
1009 @returns: a dict containing entry for the give cpv.
1012 pkg_path = self.getname(cpv)
1013 from portage.checksum import perform_multiple_checksums
1015 d = dict(zip(self._pkgindex_aux_keys,
1016 self.dbapi.aux_get(cpv, self._pkgindex_aux_keys)))
1018 d.update(perform_multiple_checksums(
1019 pkg_path, hashes=self._pkgindex_hashes))
1022 st = os.stat(pkg_path)
1023 d["MTIME"] = str(st[stat.ST_MTIME])
1024 d["SIZE"] = str(st.st_size)
1026 rel_path = self._pkg_paths[cpv]
1027 # record location if it's non-default
1028 if rel_path != cpv + ".tbz2":
1029 d["PATH"] = rel_path
1031 self._eval_use_flags(cpv, d)
1034 def _new_pkgindex(self):
1035 return portage.getbinpkg.PackageIndex(
1036 allowed_pkg_keys=self._pkgindex_allowed_pkg_keys,
1037 default_header_data=self._pkgindex_default_header_data,
1038 default_pkg_data=self._pkgindex_default_pkg_data,
1039 inherited_keys=self._pkgindex_inherited_keys,
1040 translated_keys=self._pkgindex_translated_keys)
1042 def _update_pkgindex_header(self, header):
1043 portdir = normalize_path(os.path.realpath(self.settings["PORTDIR"]))
1044 profiles_base = os.path.join(portdir, "profiles") + os.path.sep
1045 if self.settings.profile_path:
1046 profile_path = normalize_path(
1047 os.path.realpath(self.settings.profile_path))
1048 if profile_path.startswith(profiles_base):
1049 profile_path = profile_path[len(profiles_base):]
1050 header["PROFILE"] = profile_path
1051 header["VERSION"] = str(self._pkgindex_version)
1052 base_uri = self.settings.get("PORTAGE_BINHOST_HEADER_URI")
1054 header["URI"] = base_uri
1056 header.pop("URI", None)
1057 for k in self._pkgindex_header_keys:
1058 v = self.settings.get(k, None)
1064 def _pkgindex_version_supported(self, pkgindex):
1065 version = pkgindex.header.get("VERSION")
1068 if int(version) <= self._pkgindex_version:
1074 def _eval_use_flags(self, cpv, metadata):
1075 use = frozenset(metadata["USE"].split())
1077 iuse = set(f.lstrip("-+") for f in metadata["IUSE"].split())
1078 use = [f for f in use if f in iuse]
1080 metadata["USE"] = " ".join(use)
1081 for k in self._pkgindex_use_evaluated_keys:
1084 deps = use_reduce(deps, uselist=raw_use)
1085 deps = paren_enclose(deps)
1086 except portage.exception.InvalidDependString as e:
1087 writemsg("%s: %s\n" % (k, str(e)),
1090 if k in _vdb_use_conditional_atoms:
1092 for x in deps.split():
1094 x = portage.dep.Atom(x)
1095 except portage.exception.InvalidAtom:
1098 v_split.append(str(x.evaluate_conditionals(raw_use)))
1099 deps = ' '.join(v_split)
1102 def exists_specific(self, cpv):
1103 if not self.populated:
1105 return self.dbapi.match(
1106 dep_expand("="+cpv, mydb=self.dbapi, settings=self.settings))
1108 def dep_bestmatch(self, mydep):
1109 "compatibility method -- all matches, not just visible ones"
1110 if not self.populated:
1113 writemsg("mydep: %s\n" % mydep, 1)
1114 mydep = dep_expand(mydep, mydb=self.dbapi, settings=self.settings)
1115 writemsg("mydep: %s\n" % mydep, 1)
1116 mykey = dep_getkey(mydep)
1117 writemsg("mykey: %s\n" % mykey, 1)
1118 mymatch = best(match_from_list(mydep,self.dbapi.cp_list(mykey)))
1119 writemsg("mymatch: %s\n" % mymatch, 1)
1124 def getname(self, pkgname):
1125 """Returns a file location for this package. The default location is
1126 ${PKGDIR}/All/${PF}.tbz2, but will be ${PKGDIR}/${CATEGORY}/${PF}.tbz2
1127 in the rare event of a collision. The prevent_collision() method can
1128 be called to ensure that ${PKGDIR}/All/${PF}.tbz2 is available for a
1130 if not self.populated:
1133 mypath = self._pkg_paths.get(mycpv, None)
1135 return os.path.join(self.pkgdir, mypath)
1136 mycat, mypkg = catsplit(mycpv)
1137 if self._all_directory:
1138 mypath = os.path.join("All", mypkg + ".tbz2")
1139 if mypath in self._pkg_paths.values():
1140 mypath = os.path.join(mycat, mypkg + ".tbz2")
1142 mypath = os.path.join(mycat, mypkg + ".tbz2")
1143 self._pkg_paths[mycpv] = mypath # cache for future lookups
1144 return os.path.join(self.pkgdir, mypath)
1146 def isremote(self, pkgname):
1147 """Returns true if the package is kept remotely and it has not been
1148 downloaded (or it is only partially downloaded)."""
1149 if self._remotepkgs is None or pkgname not in self._remotepkgs:
1151 # Presence in self._remotepkgs implies that it's remote. When a
1152 # package is downloaded, state is updated by self.inject().
1155 def gettbz2(self, pkgname):
1156 """Fetches the package from a remote site, if necessary. Attempts to
1157 resume if the file appears to be partially downloaded."""
1158 tbz2_path = self.getname(pkgname)
1159 tbz2name = os.path.basename(tbz2_path)
1161 if os.path.exists(tbz2_path):
1162 if (tbz2name not in self.invalids):
1166 writemsg(_("Resuming download of this tbz2, but it is possible that it is corrupt.\n"),
1169 mydest = os.path.dirname(self.getname(pkgname))
1170 self._ensure_dir(mydest)
1172 from urllib.parse import urlparse
1174 from urlparse import urlparse
1175 # urljoin doesn't work correctly with unrecognized protocols like sftp
1176 if self._remote_has_index:
1177 rel_url = self._remotepkgs[pkgname].get("PATH")
1179 rel_url = pkgname+".tbz2"
1180 url = self._remote_base_uri.rstrip("/") + "/" + rel_url.lstrip("/")
1182 url = self.settings["PORTAGE_BINHOST"].rstrip("/") + "/" + tbz2name
1183 protocol = urlparse(url)[0]
1184 fcmd_prefix = "FETCHCOMMAND"
1186 fcmd_prefix = "RESUMECOMMAND"
1187 fcmd = self.settings.get(fcmd_prefix + "_" + protocol.upper())
1189 fcmd = self.settings.get(fcmd_prefix)
1190 success = portage.getbinpkg.file_get(url, mydest, fcmd=fcmd)
1193 os.unlink(self.getname(pkgname))
1196 raise portage.exception.FileNotFound(mydest)
1197 self.inject(pkgname)
1199 def _load_pkgindex(self):
1200 pkgindex = self._new_pkgindex()
1202 f = codecs.open(_unicode_encode(self._pkgindex_file,
1203 encoding=_encodings['fs'], errors='strict'),
1204 mode='r', encoding=_encodings['repo.content'],
1206 except EnvironmentError:
1215 def digestCheck(self, pkg):
1217 Verify digests for the given package and raise DigestException
1218 if verification fails.
1220 @returns: True if digests could be located, False otherwise.
1223 if not isinstance(cpv, basestring):
1227 pkg_path = self.getname(cpv)
1229 if self._remotepkgs is None or cpv not in self._remotepkgs:
1230 for d in self._load_pkgindex().packages:
1235 metadata = self._remotepkgs[cpv]
1236 if metadata is None:
1240 from portage.checksum import hashfunc_map, verify_all
1241 for k in hashfunc_map:
1247 if "SIZE" in metadata:
1249 digests["size"] = int(metadata["SIZE"])
1251 writemsg(_("!!! Malformed SIZE attribute in remote " \
1252 "metadata for '%s'\n") % cpv)
1258 eout.quiet = self.settings.get("PORTAGE_QUIET") == "1"
1259 ok, st = _check_distfile(pkg_path, digests, eout, show_errors=0)
1261 ok, reason = verify_all(pkg_path, digests)
1263 raise portage.exception.DigestException(
1264 (pkg_path,) + tuple(reason))
1268 def getslot(self, mycatpkg):
1269 "Get a slot for a catpkg; assume it exists."
1272 myslot = self.dbapi.aux_get(mycatpkg,["SLOT"])[0]
1273 except SystemExit as e:
1275 except Exception as e: