1 # Copyright 1998-2010 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
5 "close_portdbapi_caches", "FetchlistDict", "portagetree", "portdbapi"
9 portage.proxy.lazyimport.lazyimport(globals(),
11 'portage.data:portage_gid,secpass',
12 'portage.dbapi.dep_expand:dep_expand',
13 'portage.dep:dep_getkey,match_from_list,use_reduce',
14 'portage.env.loaders:KeyValuePairFileLoader',
15 'portage.package.ebuild.doebuild:doebuild',
16 'portage.util:ensure_dirs,shlex_split,writemsg,writemsg_level',
17 'portage.util.listdir:listdir',
18 'portage.versions:best,catpkgsplit,_pkgsplit@pkgsplit,ver_regexp',
21 from portage.cache.cache_errors import CacheError
22 from portage.cache.mappings import Mapping
23 from portage.const import REPO_NAME_LOC
24 from portage.dbapi import dbapi
25 from portage.exception import PortageException, \
26 FileNotFound, InvalidDependString, InvalidPackageName
27 from portage.localization import _
28 from portage.manifest import Manifest
30 from portage import eclass_cache, auxdbkeys, \
31 eapi_is_supported, dep_check, \
33 from portage import os
34 from portage import _encodings
35 from portage import _unicode_encode
36 from portage import OrderedDict
37 from _emerge.EbuildMetadataPhase import EbuildMetadataPhase
38 from _emerge.PollScheduler import PollScheduler
47 if sys.hexversion >= 0x3000000:
51 class _repo_info(object):
52 __slots__ = ('name', 'path', 'eclass_db', 'portdir', 'portdir_overlay')
53 def __init__(self, name, path, eclass_db):
56 self.eclass_db = eclass_db
57 self.portdir = eclass_db.porttrees[0]
58 self.portdir_overlay = ' '.join(eclass_db.porttrees[1:])
60 class portdbapi(dbapi):
61 """this tree will scan a portage directory located at root (passed to init)"""
62 portdbapi_instances = []
66 def _categories(self):
67 return self.settings.categories
69 def __init__(self, _unused_param=None, mysettings=None):
71 @param _unused_param: deprecated, use mysettings['PORTDIR'] instead
72 @type _unused_param: None
73 @param mysettings: an immutable config instance
74 @type mysettings: portage.config
76 portdbapi.portdbapi_instances.append(self)
78 from portage import config
80 self.settings = mysettings
82 from portage import settings
83 self.settings = config(clone=settings)
85 porttree_root = self.settings['PORTDIR']
87 if _unused_param is not None and _unused_param != porttree_root:
88 warnings.warn("The first parameter of the " + \
89 "portage.dbapi.porttree.portdbapi" + \
90 " constructor is now unused. " + \
91 "mysettings['PORTDIR'] will be used instead.",
92 DeprecationWarning, stacklevel=2)
94 # This is strictly for use in aux_get() doebuild calls when metadata
95 # is generated by the depend phase. It's safest to use a clone for
96 # this purpose because doebuild makes many changes to the config
97 # instance that is passed in.
98 self.doebuild_settings = config(clone=self.settings)
99 self.depcachedir = os.path.realpath(self.settings.depcachedir)
101 if os.environ.get("SANDBOX_ON") == "1":
102 # Make api consumers exempt from sandbox violations
103 # when doing metadata cache updates.
104 sandbox_write = os.environ.get("SANDBOX_WRITE", "").split(":")
105 if self.depcachedir not in sandbox_write:
106 sandbox_write.append(self.depcachedir)
107 os.environ["SANDBOX_WRITE"] = \
108 ":".join(filter(None, sandbox_write))
110 porttrees = [os.path.realpath(porttree_root)]
111 porttrees.extend(os.path.realpath(x) for x in \
112 shlex_split(self.settings.get('PORTDIR_OVERLAY', '')))
115 self.treemap = treemap
116 self._repository_map = repository_map
117 identically_named_paths = {}
118 for path in porttrees:
119 if path in repository_map:
121 repo_name_path = os.path.join(path, REPO_NAME_LOC)
123 repo_name = codecs.open(
124 _unicode_encode(repo_name_path,
125 encoding=_encodings['fs'], errors='strict'),
126 mode='r', encoding=_encodings['repo.content'],
127 errors='replace').readline().strip()
128 except EnvironmentError:
129 # warn about missing repo_name at some other time, since we
130 # don't want to see a warning every time the portage module is
134 identically_named_path = treemap.get(repo_name)
135 if identically_named_path is not None:
136 # The earlier one is discarded.
137 del repository_map[identically_named_path]
138 identically_named_paths[identically_named_path] = repo_name
139 if identically_named_path == porttrees[0]:
140 # Found another repo with the same name as
141 # $PORTDIR, so update porttrees[0] to match.
143 treemap[repo_name] = path
144 repository_map[path] = repo_name
146 # Ensure that each repo_name is unique. Later paths override
147 # earlier ones that correspond to the same name.
148 porttrees = [x for x in porttrees if x not in identically_named_paths]
150 for path, repo_name in identically_named_paths.items():
151 ignored_map.setdefault(repo_name, []).append(path)
152 self._ignored_repos = tuple((repo_name, tuple(paths)) \
153 for repo_name, paths in ignored_map.items())
155 self.porttrees = porttrees
156 porttree_root = porttrees[0]
157 self.porttree_root = porttree_root
159 self.eclassdb = eclass_cache.cache(porttree_root)
161 # This is used as sanity check for aux_get(). If there is no
162 # root eclass dir, we assume that PORTDIR is invalid or
163 # missing. This check allows aux_get() to detect a missing
164 # portage tree and return early by raising a KeyError.
165 self._have_root_eclass_dir = os.path.isdir(
166 os.path.join(self.porttree_root, "eclass"))
168 self.metadbmodule = self.settings.load_best_module("portdbapi.metadbmodule")
170 #if the portdbapi is "frozen", then we assume that we can cache everything (that no updates to it are happening)
175 eclass_dbs = {porttree_root : self.eclassdb}
176 local_repo_configs = self.settings._local_repo_configs
177 default_loc_repo_config = None
179 if local_repo_configs is not None:
180 default_loc_repo_config = local_repo_configs.get('DEFAULT')
181 for repo_name, loc_repo_conf in local_repo_configs.items():
182 if loc_repo_conf.aliases is not None:
183 for alias in loc_repo_conf.aliases:
184 overridden_alias = repo_aliases.get(alias)
185 if overridden_alias is not None:
186 writemsg_level(_("!!! Alias '%s' " \
187 "created for '%s' overrides " \
189 "'%s'\n") % (alias, repo_name,
191 self.settings._local_repo_conf_path),
192 level=logging.WARNING, noiselevel=-1)
193 repo_aliases[alias] = repo_name
195 for path in self.porttrees:
196 if path in self._repo_info:
199 repo_name = self._repository_map.get(path)
202 if local_repo_configs is not None:
203 if repo_name is not None:
204 loc_repo_conf = local_repo_configs.get(repo_name)
205 if loc_repo_conf is None:
206 loc_repo_conf = default_loc_repo_config
208 layout_filename = os.path.join(path, "metadata/layout.conf")
209 layout_file = KeyValuePairFileLoader(layout_filename, None, None)
210 layout_data, layout_errors = layout_file.load()
214 if loc_repo_conf is not None and \
215 loc_repo_conf.masters is not None:
216 masters = loc_repo_conf.masters
218 masters = layout_data.get('masters', '').split()
220 for master_name in masters:
221 master_name = repo_aliases.get(master_name, master_name)
222 master_path = self.treemap.get(master_name)
223 if master_path is None:
224 writemsg_level(_("Unavailable repository '%s' " \
225 "referenced by masters entry in '%s'\n") % \
226 (master_name, layout_filename),
227 level=logging.ERROR, noiselevel=-1)
229 porttrees.append(master_path)
231 if not porttrees and path != porttree_root:
232 # Make PORTDIR the default master, but only if our
233 # heuristics suggest that it's necessary.
234 profiles_desc = os.path.join(path, 'profiles', 'profiles.desc')
235 eclass_dir = os.path.join(path, 'eclass')
236 if not os.path.isfile(profiles_desc) or \
237 not os.path.isdir(eclass_dir):
238 porttrees.append(porttree_root)
240 porttrees.append(path)
242 if loc_repo_conf is not None and \
243 loc_repo_conf.eclass_overrides is not None:
244 for other_name in loc_repo_conf.eclass_overrides:
245 other_path = self.treemap.get(other_name)
246 if other_path is None:
247 writemsg_level(_("Unavailable repository '%s' " \
248 "referenced by eclass-overrides entry in " \
249 "'%s'\n") % (other_name,
250 self.settings._local_repo_conf_path),
251 level=logging.ERROR, noiselevel=-1)
253 porttrees.append(other_path)
256 for porttree in porttrees:
257 tree_db = eclass_dbs.get(porttree)
259 tree_db = eclass_cache.cache(porttree)
260 eclass_dbs[porttree] = tree_db
261 if eclass_db is None:
262 eclass_db = tree_db.copy()
264 eclass_db.append(tree_db)
266 self._repo_info[path] = _repo_info(repo_name, path, eclass_db)
268 #Keep a list of repo names, sorted by priority (highest priority first).
269 self._ordered_repo_name_list = tuple(self._repo_info[path].name for path in reversed(self.porttrees))
271 self.auxdbmodule = self.settings.load_best_module("portdbapi.auxdbmodule")
273 self._pregen_auxdb = {}
274 self._init_cache_dirs()
275 depcachedir_w_ok = os.access(self.depcachedir, os.W_OK)
282 # portage_gid is irrelevant, so just obey umask
283 cache_kwargs['gid'] = -1
284 cache_kwargs['perms'] = -1
286 # XXX: REMOVE THIS ONCE UNUSED_0 IS YANKED FROM auxdbkeys
288 filtered_auxdbkeys = [x for x in auxdbkeys if not x.startswith("UNUSED_0")]
289 filtered_auxdbkeys.sort()
290 from portage.cache import metadata_overlay, volatile
291 if not depcachedir_w_ok:
292 for x in self.porttrees:
293 db_ro = self.auxdbmodule(self.depcachedir, x,
294 filtered_auxdbkeys, gid=portage_gid, readonly=True)
295 self.auxdb[x] = metadata_overlay.database(
296 self.depcachedir, x, filtered_auxdbkeys,
297 gid=portage_gid, db_rw=volatile.database,
300 for x in self.porttrees:
303 # location, label, auxdbkeys
304 self.auxdb[x] = self.auxdbmodule(
305 self.depcachedir, x, filtered_auxdbkeys, **cache_kwargs)
306 if self.auxdbmodule is metadata_overlay.database:
307 self.auxdb[x].db_ro.ec = self._repo_info[x].eclass_db
308 if "metadata-transfer" not in self.settings.features:
309 for x in self.porttrees:
310 if x in self._pregen_auxdb:
312 if os.path.isdir(os.path.join(x, "metadata", "cache")):
313 self._pregen_auxdb[x] = self.metadbmodule(
314 x, "metadata/cache", filtered_auxdbkeys, readonly=True)
316 self._pregen_auxdb[x].ec = self._repo_info[x].eclass_db
317 except AttributeError:
319 # Selectively cache metadata in order to optimize dep matching.
320 self._aux_cache_keys = set(
321 ["DEPEND", "EAPI", "INHERITED", "IUSE", "KEYWORDS", "LICENSE",
322 "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND", "repository",
323 "RESTRICT", "SLOT", "DEFINED_PHASES", "REQUIRED_USE"])
326 self._broken_ebuilds = set()
328 def _init_cache_dirs(self):
329 """Create /var/cache/edb/dep and adjust permissions for the portage
337 ensure_dirs(self.depcachedir, gid=portage_gid,
338 mode=dirmode, mask=modemask)
339 except PortageException as e:
342 def close_caches(self):
343 if not hasattr(self, "auxdb"):
344 # unhandled exception thrown from constructor
350 def flush_cache(self):
351 for x in self.auxdb.values():
354 def findLicensePath(self, license_name):
355 mytrees = self.porttrees[:]
358 license_path = os.path.join(x, "licenses", license_name)
359 if os.access(license_path, os.R_OK):
363 def findname(self,mycpv, mytree = None, myrepo = None):
364 return self.findname2(mycpv, mytree, myrepo)[0]
366 def getRepositoryPath(self, repository_id):
368 This function is required for GLEP 42 compliance; given a valid repository ID
369 it must return a path to the repository
370 TreeMap = { id:path }
372 if repository_id in self.treemap:
373 return self.treemap[repository_id]
376 def getRepositoryName(self, canonical_repo_path):
378 This is the inverse of getRepositoryPath().
379 @param canonical_repo_path: the canonical path of a repository, as
380 resolved by os.path.realpath()
381 @type canonical_repo_path: String
382 @returns: The repo_name for the corresponding repository, or None
383 if the path does not correspond a known repository
384 @rtype: String or None
386 return self._repository_map.get(canonical_repo_path)
388 def getRepositories(self):
390 This function is required for GLEP 42 compliance; it will return a list of
394 return self._ordered_repo_name_list
396 def findname2(self, mycpv, mytree=None, myrepo = None):
398 Returns the location of the CPV, and what overlay it was in.
399 Searches overlays first, then PORTDIR; this allows us to return the first
400 matching file. As opposed to starting in portdir and then doing overlays
401 second, we would have to exhaustively search the overlays until we found
403 If myrepo is not None it will find packages from this repository(overlay)
409 if myrepo in self.treemap:
410 mytree = self.treemap[myrepo]
414 mysplit = mycpv.split("/")
415 psplit = pkgsplit(mysplit[1])
416 if psplit is None or len(mysplit) != 2:
417 raise InvalidPackageName(mycpv)
419 # For optimal performace in this hot spot, we do manual unicode
420 # handling here instead of using the wrapped os module.
421 encoding = _encodings['fs']
427 mytrees = self.porttrees[:]
430 relative_path = mysplit[0] + _os.sep + psplit[0] + _os.sep + \
431 mysplit[1] + ".ebuild"
434 filename = x + _os.sep + relative_path
435 if _os.access(_unicode_encode(filename,
436 encoding=encoding, errors=errors), _os.R_OK):
440 def _metadata_process(self, cpv, ebuild_path, repo_path):
442 Create an EbuildMetadataPhase instance to generate metadata for the
444 @rtype: EbuildMetadataPhase
445 @returns: A new EbuildMetadataPhase instance, or None if the
446 metadata cache is already valid.
448 metadata, st, emtime = self._pull_valid_cache(cpv, ebuild_path, repo_path)
449 if metadata is not None:
452 process = EbuildMetadataPhase(cpv=cpv, ebuild_path=ebuild_path,
453 ebuild_mtime=emtime, metadata_callback=self._metadata_callback,
454 portdb=self, repo_path=repo_path, settings=self.doebuild_settings)
457 def _metadata_callback(self, cpv, ebuild_path, repo_path, metadata, mtime):
460 if hasattr(metadata, "items"):
461 i = iter(metadata.items())
464 if metadata.get("INHERITED", False):
465 metadata["_eclasses_"] = self._repo_info[repo_path
466 ].eclass_db.get_eclass_data(metadata["INHERITED"].split())
468 metadata["_eclasses_"] = {}
470 metadata.pop("INHERITED", None)
471 metadata["_mtime_"] = mtime
473 eapi = metadata.get("EAPI")
474 if not eapi or not eapi.strip():
476 metadata["EAPI"] = eapi
477 if not eapi_is_supported(eapi):
478 for k in set(metadata).difference(("_mtime_", "_eclasses_")):
480 metadata["EAPI"] = "-" + eapi.lstrip("-")
482 self.auxdb[repo_path][cpv] = metadata
485 def _pull_valid_cache(self, cpv, ebuild_path, repo_path):
487 # Don't use unicode-wrapped os module, for better performance.
488 st = _os.stat(_unicode_encode(ebuild_path,
489 encoding=_encodings['fs'], errors='strict'))
490 emtime = st[stat.ST_MTIME]
492 writemsg(_("!!! aux_get(): ebuild for " \
493 "'%s' does not exist at:\n") % (cpv,), noiselevel=-1)
494 writemsg("!!! %s\n" % ebuild_path, noiselevel=-1)
497 # Pull pre-generated metadata from the metadata/cache/
498 # directory if it exists and is valid, otherwise fall
499 # back to the normal writable cache.
501 pregen_auxdb = self._pregen_auxdb.get(repo_path)
502 if pregen_auxdb is not None:
503 auxdbs.append(pregen_auxdb)
504 auxdbs.append(self.auxdb[repo_path])
505 eclass_db = self._repo_info[repo_path].eclass_db
510 metadata = auxdb[cpv]
514 if auxdb is not pregen_auxdb:
522 eapi = metadata.get('EAPI', '').strip()
525 if not (eapi[:1] == '-' and eapi_is_supported(eapi[1:])) and \
526 emtime == metadata['_mtime_'] and \
527 eclass_db.is_eclass_data_valid(metadata['_eclasses_']):
536 return (metadata, st, emtime)
538 def aux_get(self, mycpv, mylist, mytree=None, myrepo=None):
539 "stub code for returning auxilliary db information, such as SLOT, DEPEND, etc."
540 'input: "sys-apps/foo-1.0",["SLOT","DEPEND","HOMEPAGE"]'
541 'return: ["0",">=sys-libs/bar-1.0","http://www.foo.com"] or raise KeyError if error'
544 if myrepo in self.treemap:
545 mytree = self.treemap[myrepo]
547 raise KeyError(myrepo)
551 if not mytree and not self._known_keys.intersection(
552 mylist).difference(self._aux_cache_keys):
553 aux_cache = self._aux_cache.get(mycpv)
554 if aux_cache is not None:
555 return [aux_cache.get(x, "") for x in mylist]
557 global auxdbkeys, auxdbkeylen
559 cat, pkg = mycpv.split("/", 1)
561 # Missing slash. Can't find ebuild so raise KeyError.
562 raise KeyError(mycpv)
564 myebuild, mylocation = self.findname2(mycpv, mytree)
567 writemsg("!!! aux_get(): %s\n" % \
568 _("ebuild not found for '%s'") % mycpv, noiselevel=1)
569 raise KeyError(mycpv)
571 mydata, st, emtime = self._pull_valid_cache(mycpv, myebuild, mylocation)
572 doregen = mydata is None
575 if myebuild in self._broken_ebuilds:
576 raise KeyError(mycpv)
577 if not self._have_root_eclass_dir:
578 raise KeyError(mycpv)
580 self.doebuild_settings.setcpv(mycpv)
583 if eapi is None and \
584 'parse-eapi-ebuild-head' in self.doebuild_settings.features:
585 eapi = portage._parse_eapi_ebuild_head(codecs.open(
586 _unicode_encode(myebuild,
587 encoding=_encodings['fs'], errors='strict'),
588 mode='r', encoding=_encodings['repo.content'],
592 self.doebuild_settings.configdict['pkg']['EAPI'] = eapi
594 if eapi is not None and not portage.eapi_is_supported(eapi):
595 mydata = self._metadata_callback(
596 mycpv, myebuild, mylocation, {'EAPI':eapi}, emtime)
598 proc = EbuildMetadataPhase(cpv=mycpv, ebuild_path=myebuild,
600 metadata_callback=self._metadata_callback, portdb=self,
601 repo_path=mylocation,
602 scheduler=PollScheduler().sched_iface,
603 settings=self.doebuild_settings)
608 if proc.returncode != os.EX_OK:
609 self._broken_ebuilds.add(myebuild)
610 raise KeyError(mycpv)
612 mydata = proc.metadata
614 # do we have a origin repository name for the current package
615 mydata["repository"] = self._repository_map.get(mylocation, "")
617 mydata["INHERITED"] = ' '.join(mydata.get("_eclasses_", []))
618 mydata["_mtime_"] = st[stat.ST_MTIME]
620 eapi = mydata.get("EAPI")
623 mydata["EAPI"] = eapi
624 if not eapi_is_supported(eapi):
625 for k in set(mydata).difference(("_mtime_", "_eclasses_")):
627 mydata["EAPI"] = "-" + eapi.lstrip("-")
629 #finally, we look at our internal cache entry and return the requested data.
630 returnme = [mydata.get(x, "") for x in mylist]
634 for x in self._aux_cache_keys:
635 aux_cache[x] = mydata.get(x, "")
636 self._aux_cache[mycpv] = aux_cache
640 def getFetchMap(self, mypkg, useflags=None, mytree=None):
642 Get the SRC_URI metadata as a dict which maps each file name to a
643 set of alternative URIs.
645 @param mypkg: cpv for an ebuild
647 @param useflags: a collection of enabled USE flags, for evaluation of
649 @type useflags: set, or None to enable all conditionals
650 @param mytree: The canonical path of the tree in which the ebuild
651 is located, or None for automatic lookup
653 @returns: A dict which maps each file name to a set of alternative
659 eapi, myuris = self.aux_get(mypkg,
660 ["EAPI", "SRC_URI"], mytree=mytree)
662 # Convert this to an InvalidDependString exception since callers
664 raise portage.exception.InvalidDependString(
665 "getFetchMap(): aux_get() error reading "+mypkg+"; aborting.")
667 if not eapi_is_supported(eapi):
668 # Convert this to an InvalidDependString exception
669 # since callers already handle it.
670 raise portage.exception.InvalidDependString(
671 "getFetchMap(): '%s' has unsupported EAPI: '%s'" % \
672 (mypkg, eapi.lstrip("-")))
674 return _parse_uri_map(mypkg, {'EAPI':eapi,'SRC_URI':myuris},
677 def getfetchsizes(self, mypkg, useflags=None, debug=0, myrepo=None):
678 # returns a filename:size dictionnary of remaining downloads
679 myebuild = self.findname(mypkg, myrepo=myrepo)
681 raise AssertionError(_("ebuild not found for '%s'") % mypkg)
682 pkgdir = os.path.dirname(myebuild)
683 mf = Manifest(pkgdir, self.settings["DISTDIR"])
684 checksums = mf.getDigests()
687 writemsg(_("[empty/missing/bad digest]: %s\n") % (mypkg,))
690 myfiles = self.getFetchMap(mypkg, useflags=useflags)
691 #XXX: maybe this should be improved: take partial downloads
692 # into account? check checksums?
693 for myfile in myfiles:
695 fetch_size = int(checksums[myfile]["size"])
696 except (KeyError, ValueError):
698 writemsg(_("[bad digest]: missing %(file)s for %(pkg)s\n") % {"file":myfile, "pkg":mypkg})
700 file_path = os.path.join(self.settings["DISTDIR"], myfile)
703 mystat = os.stat(file_path)
708 ro_distdirs = self.settings.get("PORTAGE_RO_DISTDIRS")
709 if ro_distdirs is not None:
710 for x in shlex_split(ro_distdirs):
712 mystat = os.stat(os.path.join(x, myfile))
716 if mystat.st_size == fetch_size:
717 existing_size = fetch_size
720 existing_size = mystat.st_size
721 remaining_size = fetch_size - existing_size
722 if remaining_size > 0:
723 # Assume the download is resumable.
724 filesdict[myfile] = remaining_size
725 elif remaining_size < 0:
726 # The existing file is too large and therefore corrupt.
727 filesdict[myfile] = int(checksums[myfile]["size"])
730 def fetch_check(self, mypkg, useflags=None, mysettings=None, all=False, myrepo=None):
733 elif useflags is None:
735 useflags = mysettings["USE"].split()
737 if myrepo in self.treemap:
738 mytree = self.treemap[myrepo]
744 myfiles = self.getFetchMap(mypkg, useflags=useflags, mytree=mytree)
745 myebuild = self.findname(mypkg, myrepo=myrepo)
747 raise AssertionError(_("ebuild not found for '%s'") % mypkg)
748 pkgdir = os.path.dirname(myebuild)
749 mf = Manifest(pkgdir, self.settings["DISTDIR"])
750 mysums = mf.getDigests()
754 if not mysums or x not in mysums:
756 reason = _("digest missing")
759 ok, reason = portage.checksum.verify_all(
760 os.path.join(self.settings["DISTDIR"], x), mysums[x])
761 except FileNotFound as e:
763 reason = _("File Not Found: '%s'") % (e,)
770 def cpv_exists(self, mykey, myrepo=None):
771 "Tells us whether an actual ebuild exists on disk (no masking)"
772 cps2 = mykey.split("/")
773 cps = catpkgsplit(mykey, silent=0)
777 if self.findname(cps[0] + "/" + cps2[1], myrepo=myrepo):
782 def cp_all(self, categories=None, trees=None):
784 This returns a list of all keys in our tree or trees
785 @param categories: optional list of categories to search or
786 defaults to self.settings.categories
787 @param trees: optional list of trees to search the categories in or
788 defaults to self.porttrees
789 @rtype list of [cat/pkg,...]
792 if categories is None:
793 categories = self.settings.categories
795 trees = self.porttrees
798 for y in listdir(oroot+"/"+x, EmptyOnError=1, ignorecvs=1, dirsonly=1):
799 if not self._pkg_dir_name_re.match(y) or \
807 def cp_list(self, mycp, use_cache=1, mytree=None):
808 if self.frozen and mytree is None:
809 cachelist = self.xcache["cp-list"].get(mycp)
810 if cachelist is not None:
811 # Try to propagate this to the match-all cache here for
812 # repoman since he uses separate match-all caches for each
813 # profile (due to old-style virtuals). Do not propagate
814 # old-style virtuals since cp_list() doesn't expand them.
815 if not (not cachelist and mycp.startswith("virtual/")):
816 self.xcache["match-all"][mycp] = cachelist
818 mysplit = mycp.split("/")
819 invalid_category = mysplit[0] not in self._categories
821 if mytree is not None:
822 if isinstance(mytree, basestring):
825 # assume it's iterable
828 mytrees = self.porttrees
829 for oroot in mytrees:
831 file_list = os.listdir(os.path.join(oroot, mycp))
836 if x[-7:] == '.ebuild':
842 writemsg(_("\nInvalid ebuild name: %s\n") % \
843 os.path.join(oroot, mycp, x), noiselevel=-1)
845 if ps[0] != mysplit[1]:
846 writemsg(_("\nInvalid ebuild name: %s\n") % \
847 os.path.join(oroot, mycp, x), noiselevel=-1)
849 ver_match = ver_regexp.match("-".join(ps[1:]))
850 if ver_match is None or not ver_match.groups():
851 writemsg(_("\nInvalid ebuild version: %s\n") % \
852 os.path.join(oroot, mycp, x), noiselevel=-1)
854 d[mysplit[0]+"/"+pf] = None
855 if invalid_category and d:
856 writemsg(_("\n!!! '%s' has a category that is not listed in " \
857 "%setc/portage/categories\n") % \
858 (mycp, self.settings["PORTAGE_CONFIGROOT"]), noiselevel=-1)
862 # Always sort in ascending order here since it's handy
863 # and the result can be easily cached and reused.
864 self._cpv_sort_ascending(mylist)
865 if self.frozen and mytree is None:
866 cachelist = mylist[:]
867 self.xcache["cp-list"][mycp] = cachelist
868 # Do not propagate old-style virtuals since
869 # cp_list() doesn't expand them.
870 if not (not cachelist and mycp.startswith("virtual/")):
871 self.xcache["match-all"][mycp] = cachelist
875 for x in "bestmatch-visible", "cp-list", "list-visible", "match-all", \
876 "match-visible", "minimum-all", "minimum-visible":
884 def xmatch(self,level,origdep,mydep=None,mykey=None,mylist=None):
885 "caching match function; very trick stuff"
886 #if no updates are being made to the tree, we can consult our xcache...
889 return self.xcache[level][origdep][:]
894 #this stuff only runs on first call of xmatch()
895 #create mydep, mykey from origdep
896 mydep = dep_expand(origdep, mydb=self, settings=self.settings)
899 if level == "list-visible":
900 #a list of all visible packages, not called directly (just by xmatch())
901 #myval = self.visible(self.cp_list(mykey))
903 myval = self.gvisible(self.visible(self.cp_list(mykey)))
904 elif level == "minimum-all":
905 # Find the minimum matching version. This is optimized to
906 # minimize the number of metadata accesses (improves performance
907 # especially in cases where metadata needs to be generated).
908 cpv_iter = iter(self.cp_list(mykey))
910 cpv_iter = self._iter_match(mydep, cpv_iter)
912 myval = next(cpv_iter)
913 except StopIteration:
916 elif level in ("minimum-visible", "bestmatch-visible"):
917 # Find the minimum matching visible version. This is optimized to
918 # minimize the number of metadata accesses (improves performance
919 # especially in cases where metadata needs to be generated).
921 mylist = self.cp_list(mykey)
923 mylist = match_from_list(mydep, self.cp_list(mykey, myrepo = mydep.repo))
925 settings = self.settings
926 local_config = settings.local_config
927 aux_keys = list(self._aux_cache_keys)
928 if level == "minimum-visible":
932 for cpv in iterfunc(mylist):
934 metadata = dict(zip(aux_keys,
935 self.aux_get(cpv, aux_keys)))
937 # ebuild masked by corruption
939 if not eapi_is_supported(metadata["EAPI"]):
941 if mydep.slot and mydep.slot != metadata["SLOT"]:
943 if settings._getMissingKeywords(cpv, metadata):
945 if settings._getMaskAtom(cpv, metadata):
947 if settings._getProfileMaskAtom(cpv, metadata):
951 if "?" in metadata["LICENSE"] or "?" in metadata["PROPERTIES"]:
952 self.doebuild_settings.setcpv(cpv, mydb=metadata)
953 metadata["USE"] = self.doebuild_settings.get("USE", "")
955 if settings._getMissingLicenses(cpv, metadata):
957 if settings._getMissingProperties(cpv, metadata):
959 except InvalidDependString:
963 for has_iuse in self._iter_match_use(mydep, [cpv]):
969 elif level == "bestmatch-list":
970 #dep match -- find best match but restrict search to sublist
971 #no point in calling xmatch again since we're not caching list deps
973 myval = best(list(self._iter_match(mydep, mylist)))
974 elif level == "match-list":
975 #dep match -- find all matches but restrict search to sublist (used in 2nd half of visible())
977 myval = list(self._iter_match(mydep, mylist))
978 elif level == "match-visible":
979 #dep match -- find all visible matches
980 #get all visible packages, then get the matching ones
981 myval = list(self._iter_match(mydep,
982 self.xmatch("list-visible", mykey, mydep=mykey, mykey=mykey), myrepo=mydep.repo))
983 elif level == "match-all":
984 #match *all* visible *and* masked packages
986 myval = self.cp_list(mykey)
988 myval = list(self._iter_match(mydep, self.cp_list(mykey), myrepo = mydep.repo))
990 raise AssertionError(
991 "Invalid level argument: '%s'" % level)
993 if self.frozen and (level not in ["match-list", "bestmatch-list"]):
994 self.xcache[level][mydep] = myval
995 if origdep and origdep != mydep:
996 self.xcache[level][origdep] = myval
999 def match(self, mydep, use_cache=1):
1000 return self.xmatch("match-visible", mydep)
1002 def visible(self, mylist):
1003 """two functions in one. Accepts a list of cpv values and uses the package.mask *and*
1004 packages file to remove invisible entries, returning remaining items. This function assumes
1005 that all entries in mylist have the same category and package name."""
1011 getMaskAtom = self.settings._getMaskAtom
1012 getProfileMaskAtom = self.settings._getProfileMaskAtom
1015 metadata = dict(zip(db_keys, self.aux_get(cpv, db_keys)))
1017 # masked by corruption
1019 if not metadata["SLOT"]:
1021 if getMaskAtom(cpv, metadata):
1023 if getProfileMaskAtom(cpv, metadata):
1028 def gvisible(self,mylist):
1029 "strip out group-masked (not in current group) entries"
1034 aux_keys = list(self._aux_cache_keys)
1036 local_config = self.settings.local_config
1037 chost = self.settings.get('CHOST', '')
1038 accept_chost = self.settings._accept_chost
1039 for mycpv in mylist:
1042 metadata.update(zip(aux_keys, self.aux_get(mycpv, aux_keys)))
1045 except PortageException as e:
1046 writemsg("!!! Error: aux_get('%s', %s)\n" % (mycpv, aux_keys),
1048 writemsg("!!! %s\n" % (e,), noiselevel=-1)
1051 eapi = metadata["EAPI"]
1052 if not eapi_is_supported(eapi):
1054 if _eapi_is_deprecated(eapi):
1056 if self.settings._getMissingKeywords(mycpv, metadata):
1059 metadata['CHOST'] = chost
1060 if not accept_chost(mycpv, metadata):
1062 metadata["USE"] = ""
1063 if "?" in metadata["LICENSE"] or "?" in metadata["PROPERTIES"]:
1064 self.doebuild_settings.setcpv(mycpv, mydb=metadata)
1065 metadata['USE'] = self.doebuild_settings['PORTAGE_USE']
1067 if self.settings._getMissingLicenses(mycpv, metadata):
1069 if self.settings._getMissingProperties(mycpv, metadata):
1071 except InvalidDependString:
1073 newlist.append(mycpv)
1076 def close_portdbapi_caches():
1077 for i in portdbapi.portdbapi_instances:
1080 portage.process.atexit_register(portage.portageexit)
1082 class portagetree(object):
1083 def __init__(self, root=None, virtual=None, settings=None):
1085 Constructor for a PortageTree
1087 @param root: deprecated, defaults to settings['ROOT']
1088 @type root: String/Path
1089 @param virtual: UNUSED
1090 @type virtual: No Idea
1091 @param settings: Portage Configuration object (portage.settings)
1092 @type settings: Instance of portage.config
1095 if settings is None:
1096 settings = portage.settings
1097 self.settings = settings
1099 if root is not None and root != settings['ROOT']:
1100 warnings.warn("The root parameter of the " + \
1101 "portage.dbapi.porttree.portagetree" + \
1102 " constructor is now unused. Use " + \
1103 "settings['ROOT'] instead.",
1104 DeprecationWarning, stacklevel=2)
1106 self.portroot = settings["PORTDIR"]
1107 self.virtual = virtual
1108 self.dbapi = portdbapi(mysettings=settings)
1112 warnings.warn("The root attribute of " + \
1113 "portage.dbapi.porttree.portagetree" + \
1114 " is deprecated. Use " + \
1115 "settings['ROOT'] instead.",
1116 DeprecationWarning, stacklevel=2)
1117 return self.settings['ROOT']
1119 def dep_bestmatch(self,mydep):
1120 "compatibility method"
1121 mymatch = self.dbapi.xmatch("bestmatch-visible",mydep)
1126 def dep_match(self,mydep):
1127 "compatibility method"
1128 mymatch = self.dbapi.xmatch("match-visible",mydep)
1133 def exists_specific(self,cpv):
1134 return self.dbapi.cpv_exists(cpv)
1136 def getallnodes(self):
1137 """new behavior: these are all *unmasked* nodes. There may or may not be available
1138 masked package for nodes in this nodes list."""
1139 return self.dbapi.cp_all()
1141 def getname(self, pkgname):
1142 "returns file location for this particular package (DEPRECATED)"
1145 mysplit = pkgname.split("/")
1146 psplit = pkgsplit(mysplit[1])
1147 return "/".join([self.portroot, mysplit[0], psplit[0], mysplit[1]])+".ebuild"
1149 def depcheck(self, mycheck, use="yes", myusesplit=None):
1150 return dep_check(mycheck, self.dbapi, use=use, myuse=myusesplit)
1152 def getslot(self,mycatpkg):
1153 "Get a slot for a catpkg; assume it exists."
1156 myslot = self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1157 except SystemExit as e:
1159 except Exception as e:
1163 class FetchlistDict(Mapping):
1165 This provide a mapping interface to retrieve fetch lists. It's used
1166 to allow portage.manifest.Manifest to access fetch lists via a standard
1167 mapping interface rather than use the dbapi directly.
1169 def __init__(self, pkgdir, settings, mydbapi):
1170 """pkgdir is a directory containing ebuilds and settings is passed into
1171 portdbapi.getfetchlist for __getitem__ calls."""
1172 self.pkgdir = pkgdir
1173 self.cp = os.sep.join(pkgdir.split(os.sep)[-2:])
1174 self.settings = settings
1175 self.mytree = os.path.realpath(os.path.dirname(os.path.dirname(pkgdir)))
1176 self.portdb = mydbapi
1178 def __getitem__(self, pkg_key):
1179 """Returns the complete fetch list for a given package."""
1180 return list(self.portdb.getFetchMap(pkg_key, mytree=self.mytree))
1182 def __contains__(self, cpv):
1183 return cpv in self.__iter__()
1185 def has_key(self, pkg_key):
1186 """Returns true if the given package exists within pkgdir."""
1187 warnings.warn("portage.dbapi.porttree.FetchlistDict.has_key() is "
1188 "deprecated, use the 'in' operator instead",
1189 DeprecationWarning, stacklevel=2)
1190 return pkg_key in self
1193 return iter(self.portdb.cp_list(self.cp, mytree=self.mytree))
1196 """This needs to be implemented in order to avoid
1197 infinite recursion in some cases."""
1198 return len(self.portdb.cp_list(self.cp, mytree=self.mytree))
1201 """Returns keys for all packages within pkgdir"""
1202 return self.portdb.cp_list(self.cp, mytree=self.mytree)
1204 if sys.hexversion >= 0x3000000:
1207 def _parse_uri_map(cpv, metadata, use=None):
1209 myuris = use_reduce(metadata.get('SRC_URI', ''),
1210 uselist=use, matchall=(use is None),
1212 eapi=metadata['EAPI'])
1214 uri_map = OrderedDict()
1219 if myuris and myuris[-1] == "->":
1220 operator = myuris.pop()
1221 distfile = myuris.pop()
1223 distfile = os.path.basename(uri)
1225 raise portage.exception.InvalidDependString(
1226 ("getFetchMap(): '%s' SRC_URI has no file " + \
1227 "name: '%s'") % (cpv, uri))
1229 uri_set = uri_map.get(distfile)
1232 uri_map[distfile] = uri_set