Merge remote-tracking branch 'overlays-gentoo-org/master' into prefix
authorFabian Groffen <grobian@gentoo.org>
Fri, 27 May 2011 17:39:37 +0000 (19:39 +0200)
committerFabian Groffen <grobian@gentoo.org>
Fri, 27 May 2011 17:39:37 +0000 (19:39 +0200)
Ported changes to LinkageMapELF to the other LinkageMaps

Conflicts:
bin/etc-update
bin/glsa-check
bin/regenworld
pym/portage/dbapi/vartree.py

22 files changed:
1  2 
bin/ebuild.sh
bin/etc-update
bin/glsa-check
bin/regenworld
bin/repoman
man/emerge.1
pym/_emerge/Binpkg.py
pym/_emerge/EbuildBuild.py
pym/_emerge/EbuildBuildDir.py
pym/_emerge/Package.py
pym/_emerge/actions.py
pym/_emerge/depgraph.py
pym/_emerge/main.py
pym/portage/const.py
pym/portage/dbapi/vartree.py
pym/portage/output.py
pym/portage/package/ebuild/doebuild.py
pym/portage/util/__init__.py
pym/portage/util/_dyn_libs/LinkageMapELF.py
pym/portage/util/_dyn_libs/LinkageMapMachO.py
pym/portage/util/_dyn_libs/LinkageMapPeCoff.py
pym/portage/util/_dyn_libs/LinkageMapXCoff.py

diff --cc bin/ebuild.sh
Simple merge
diff --cc bin/etc-update
index 5fbd34582a667c22cca7e1fd4a8b336166cadefb,2369f04f49bb7d26f95bf76a990b509037a2ff1e..205438943f712e8751eaa836612017bba0869529
@@@ -1,5 -1,5 +1,5 @@@
 -#!/bin/bash
 +#!@PORTAGE_BASH@
- # Copyright 1999-2007 Gentoo Foundation
+ # Copyright 1999-2011 Gentoo Foundation
  # Distributed under the terms of the GNU General Public License v2
  
  # Author Brandon Low <lostlogic@gentoo.org>
diff --cc bin/glsa-check
index 64209ab825b99704ca40e753e2ecd90175206ef0,2f2d55523d9fabe6954f788dbd25ae2527d3e502..4f50a1ffb31d85cb0288cba66b52de6650021c29
@@@ -1,5 -1,5 +1,5 @@@
 -#!/usr/bin/python
 +#!@PREFIX_PORTAGE_PYTHON@
- # Copyright 2008-2009 Gentoo Foundation
+ # Copyright 2008-2011 Gentoo Foundation
  # Distributed under the terms of the GNU General Public License v2
  
  from __future__ import print_function
diff --cc bin/regenworld
index e0e9774c41e209a2d65c5253439fba5507fa1dd2,6b5af4ca547e7f885ec1fcb34ee2b8fefb5b0e89..9e0e2916c52415ed5dfcb5c0df333bc441eb952b
@@@ -1,5 -1,5 +1,5 @@@
 -#!/usr/bin/python
 +#!@PREFIX_PORTAGE_PYTHON@
- # Copyright 1999-2010 Gentoo Foundation
+ # Copyright 1999-2011 Gentoo Foundation
  # Distributed under the terms of the GNU General Public License v2
  
  from __future__ import print_function
diff --cc bin/repoman
Simple merge
diff --cc man/emerge.1
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
Simple merge
index 60575200031cb3d4943536e69a05e0744b78309c,e91c009890ef065ad3abedb2871c4adbac524472..00a53e446372ef44bbff004edd215c6adc51fe0c
@@@ -132,10 -88,9 +132,10 @@@ EBUILD_PHASES            = ("pretend", 
  SUPPORTED_FEATURES       = frozenset([
                             "assume-digests", "binpkg-logs", "buildpkg", "buildsyspkg", "candy",
                             "ccache", "chflags", "collision-protect", "compress-build-logs",
-                            "digest", "distcc", "distlocks", "ebuild-locks", "fakeroot",
+                            "digest", "distcc", "distcc-pump", "distlocks", "ebuild-locks", "fakeroot",
                             "fail-clean", "fixpackages", "force-mirror", "getbinpkg",
                             "installsources", "keeptemp", "keepwork", "fixlafiles", "lmirror",
 +                            "macossandbox", "macosprefixsandbox", "macosusersandbox",
                             "metadata-transfer", "mirror", "multilib-strict", "news",
                             "noauto", "noclean", "nodoc", "noinfo", "noman",
                             "nostrip", "notitles", "parallel-fetch", "parallel-install",
index 581300fc5fc7be01bfc9f9bf2897fbb5cd144260,e742358dc782ab07aa2321133d8c4f94627c051b..e0f0856ef674ec7b62cf14fbd4791a2e9c391ca5
@@@ -2347,7 -2386,7 +2407,7 @@@ class dblink(object)
                def path_to_node(path):
                        node = path_node_map.get(path)
                        if node is None:
-                               node = linkmap._LibGraphNode(path, root)
 -                              node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
++                              node = linkmap._LibGraphNode(linkmap._obj_key(path))
                                alt_path_node = lib_graph.get(node)
                                if alt_path_node is not None:
                                        node = alt_path_node
                def path_to_node(path):
                        node = path_node_map.get(path)
                        if node is None:
 -                              node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
 +                              chost = self.settings.get('CHOST')
 +                              if chost.find('darwin') >= 0:
-                                       node = LinkageMapMachO._LibGraphNode(path, root)
++                                      node = LinkageMapMachO._LibGraphNode(linkmap._obj_key(path))
 +                              elif chost.find('interix') >= 0 or chost.find('winnt') >= 0:
-                                       node = LinkageMapPeCoff._LibGraphNode(path, root)
++                                      node = LinkageMapPeCoff._LibGraphNode(linkmap._obj_key(path))
 +                              elif chost.find('aix') >= 0:
-                                       node = LinkageMapXCoff._LibGraphNode(path, root)
++                                      node = LinkageMapXCoff._LibGraphNode(linkmap._obj_key(path))
 +                              else:
-                                       node = LinkageMap._LibGraphNode(path, root)
++                                      node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
                                alt_path_node = lib_graph.get(node)
                                if alt_path_node is not None:
                                        node = alt_path_node
Simple merge
Simple merge
index cbdf6c23b5a2b79cbee68bc85a1a5c387900ee28,0000000000000000000000000000000000000000..7ed004a728eecebbf1619f9100f9883d83743232
mode 100644,000000..100644
--- /dev/null
@@@ -1,641 -1,0 +1,648 @@@
-               __slots__ = ("__weakref__", "_key")
 +# Copyright 1998-2011 Gentoo Foundation
 +# Distributed under the terms of the GNU General Public License v2
 +
 +import errno
 +import logging
 +import subprocess
 +
 +import portage
 +from portage import _encodings
 +from portage import _os_merge
 +from portage import _unicode_decode
 +from portage import _unicode_encode
 +from portage.cache.mappings import slot_dict_class
 +from portage.exception import CommandNotFound
 +from portage.localization import _
 +from portage.util import getlibpaths
 +from portage.util import grabfile
 +from portage.util import normalize_path
 +from portage.util import writemsg_level
 +from portage.const import EPREFIX
 +
 +class LinkageMapMachO(object):
 +
 +      """Models dynamic linker dependencies."""
 +
 +      _needed_aux_key = "NEEDED.MACHO.3"
 +      _installname_map_class = slot_dict_class(
 +              ("consumers", "providers"), prefix="")
 +
 +      def __init__(self, vardbapi):
 +              self._dbapi = vardbapi
 +              self._root = self._dbapi.settings['ROOT']
 +              self._libs = {}
 +              self._obj_properties = {}
 +              self._obj_key_cache = {}
 +              self._path_key_cache = {}
 +
 +      def _clear_cache(self):
 +              self._libs.clear()
 +              self._obj_properties.clear()
 +              self._obj_key_cache.clear()
 +              self._path_key_cache.clear()
 +
 +      def _path_key(self, path):
 +              key = self._path_key_cache.get(path)
 +              if key is None:
 +                      key = self._ObjectKey(path, self._root)
 +                      self._path_key_cache[path] = key
 +              return key
 +
 +      def _obj_key(self, path):
 +              key = self._obj_key_cache.get(path)
 +              if key is None:
 +                      key = self._ObjectKey(path, self._root)
 +                      self._obj_key_cache[path] = key
 +              return key
 +
 +      class _ObjectKey(object):
 +
 +              """Helper class used as _obj_properties keys for objects."""
 +
-               def __init__(self, obj, root):
-                       LinkageMapMachO._ObjectKey.__init__(self, obj, root)
++              __slots__ = ("_key")
 +
 +              def __init__(self, obj, root):
 +                      """
 +                      This takes a path to an object.
 +
 +                      @param object: path to a file
 +                      @type object: string (example: '/usr/bin/bar')
 +
 +                      """
 +                      self._key = self._generate_object_key(obj, root)
 +
 +              def __hash__(self):
 +                      return hash(self._key)
 +
 +              def __eq__(self, other):
 +                      return self._key == other._key
 +
 +              def _generate_object_key(self, obj, root):
 +                      """
 +                      Generate object key for a given object.
 +
 +                      @param object: path to a file
 +                      @type object: string (example: '/usr/bin/bar')
 +                      @rtype: 2-tuple of types (long, int) if object exists. string if
 +                              object does not exist.
 +                      @return:
 +                              1. 2-tuple of object's inode and device from a stat call, if object
 +                                      exists.
 +                              2. realpath of object if object does not exist.
 +
 +                      """
 +
 +                      os = _os_merge
 +
 +                      try:
 +                              _unicode_encode(obj,
 +                                      encoding=_encodings['merge'], errors='strict')
 +                      except UnicodeEncodeError:
 +                              # The package appears to have been merged with a 
 +                              # different value of sys.getfilesystemencoding(),
 +                              # so fall back to utf_8 if appropriate.
 +                              try:
 +                                      _unicode_encode(obj,
 +                                              encoding=_encodings['fs'], errors='strict')
 +                              except UnicodeEncodeError:
 +                                      pass
 +                              else:
 +                                      os = portage.os
 +
 +                      abs_path = os.path.join(root, obj.lstrip(os.sep))
 +                      try:
 +                              object_stat = os.stat(abs_path)
 +                      except OSError:
 +                              # Use the realpath as the key if the file does not exists on the
 +                              # filesystem.
 +                              return os.path.realpath(abs_path)
 +                      # Return a tuple of the device and inode.
 +                      return (object_stat.st_dev, object_stat.st_ino)
 +
 +              def file_exists(self):
 +                      """
 +                      Determine if the file for this key exists on the filesystem.
 +
 +                      @rtype: Boolean
 +                      @return:
 +                              1. True if the file exists.
 +                              2. False if the file does not exist or is a broken symlink.
 +
 +                      """
 +                      return isinstance(self._key, tuple)
 +
 +      class _LibGraphNode(_ObjectKey):
 +              __slots__ = ("alt_paths",)
 +
++              def __init__(self, key):
++                      """
++                      Create a _LibGraphNode from an existing _ObjectKey.
++                      This re-uses the _key attribute in order to avoid repeating
++                      any previous stat calls, which helps to avoid potential race
++                      conditions due to inconsistent stat results when the
++                      file system is being modified concurrently.
++                      """
++                      self._key = key._key
 +                      self.alt_paths = set()
 +
 +              def __str__(self):
 +                      return str(sorted(self.alt_paths))
 +
 +      def rebuild(self, exclude_pkgs=None, include_file=None,
 +              preserve_paths=None):
 +              """
 +              Raises CommandNotFound if there are preserved libs
 +              and the scanmacho binary is not available.
 +
 +              @param exclude_pkgs: A set of packages that should be excluded from
 +                      the LinkageMap, since they are being unmerged and their NEEDED
 +                      entries are therefore irrelevant and would only serve to corrupt
 +                      the LinkageMap.
 +              @type exclude_pkgs: set
 +              @param include_file: The path of a file containing NEEDED entries for
 +                      a package which does not exist in the vardbapi yet because it is
 +                      currently being merged.
 +              @type include_file: String
 +              @param preserve_paths: Libraries preserved by a package instance that
 +                      is currently being merged. They need to be explicitly passed to the
 +                      LinkageMap, since they are not registered in the
 +                      PreservedLibsRegistry yet.
 +              @type preserve_paths: set
 +              """
 +
 +              os = _os_merge
 +              root = self._root
 +              root_len = len(root) - 1
 +              self._clear_cache()
 +              libs = self._libs
 +              obj_properties = self._obj_properties
 +
 +              lines = []
 +
 +              # Data from include_file is processed first so that it
 +              # overrides any data from previously installed files.
 +              if include_file is not None:
 +                      for line in grabfile(include_file):
 +                              lines.append((include_file, line))
 +
 +              aux_keys = [self._needed_aux_key]
 +              can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
 +              if can_lock:
 +                      self._dbapi.lock()
 +              try:
 +                      for cpv in self._dbapi.cpv_all():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      continue
 +                              needed_file = self._dbapi.getpath(cpv,
 +                                      filename=self._needed_aux_key)
 +                              for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
 +                                      lines.append((needed_file, line))
 +              finally:
 +                      if can_lock:
 +                              self._dbapi.unlock()
 +
 +              # have to call scanmacho for preserved libs here as they aren't 
 +              # registered in NEEDED.MACHO.3 files
 +              plibs = set()
 +              if preserve_paths is not None:
 +                      plibs.update(preserve_paths)
 +              if self._dbapi._plib_registry and \
 +                      self._dbapi._plib_registry.hasEntries():
 +                      for cpv, items in \
 +                              self._dbapi._plib_registry.getPreservedLibs().items():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      # These preserved libs will either be unmerged,
 +                                      # rendering them irrelevant, or they will be
 +                                      # preserved in the replacement package and are
 +                                      # already represented via the preserve_paths
 +                                      # parameter.
 +                                      continue
 +                              plibs.update(items)
 +              if plibs:
 +                      args = [EPREFIX+"/usr/bin/scanmacho", "-qF", "%a;%F;%S;%n"]
 +                      args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
 +                              for x in plibs)
 +                      try:
 +                              proc = subprocess.Popen(args, stdout=subprocess.PIPE)
 +                      except EnvironmentError as e:
 +                              if e.errno != errno.ENOENT:
 +                                      raise
 +                              raise CommandNotFound(args[0])
 +                      else:
 +                              for l in proc.stdout:
 +                                      try:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='strict')
 +                                      except UnicodeDecodeError:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='replace')
 +                                              writemsg_level(_("\nError decoding characters " \
 +                                                      "returned from scanmacho: %s\n\n") % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                      l = l[3:].rstrip("\n")
 +                                      if not l:
 +                                              continue
 +                                      fields = l.split(";")
 +                                      if len(fields) < 4:
 +                                              writemsg_level("\nWrong number of fields " + \
 +                                                      "returned from scanmacho: %s\n\n" % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                              continue
 +                                      fields[1] = fields[1][root_len:]
 +                                      plibs.discard(fields[1])
 +                                      lines.append(("scanmacho", ";".join(fields)))
 +                              proc.wait()
 +
 +              if plibs:
 +                      # Preserved libraries that did not appear in the scanmacho
 +                      # output.  This is known to happen with statically linked
 +                      # libraries.  Generate dummy lines for these, so we can
 +                      # assume that every preserved library has an entry in
 +                      # self._obj_properties.  This is important in order to
 +                      # prevent findConsumers from raising an unwanted KeyError.
 +                      for x in plibs:
 +                              lines.append(("plibs", ";".join(['', x, '', '', ''])))
 +
 +              for location, l in lines:
 +                      l = l.rstrip("\n")
 +                      if not l:
 +                              continue
 +                      fields = l.split(";")
 +                      if len(fields) < 4:
 +                              writemsg_level("\nWrong number of fields " + \
 +                                      "in %s: %s\n\n") % (location, l),
 +                                      level=logging.ERROR, noiselevel=-1)
 +                              continue
 +                      arch = fields[0]
 +                      obj = fields[1]
 +                      install_name = os.path.normpath(fields[2])
 +                      needed = filter(None, fields[3].split(","))
 +
 +                      obj_key = self._obj_key(obj)
 +                      indexed = True
 +                      myprops = obj_properties.get(obj_key)
 +                      if myprops is None:
 +                              indexed = False
 +                              myprops = (arch, needed, install_name, set())
 +                              obj_properties[obj_key] = myprops
 +                      # All object paths are added into the obj_properties tuple.
 +                      myprops[3].add(obj)
 +
 +                      # Don't index the same file more that once since only one
 +                      # set of data can be correct and therefore mixing data
 +                      # may corrupt the index (include_file overrides previously
 +                      # installed).
 +                      if indexed:
 +                              continue
 +
 +                      arch_map = libs.get(arch)
 +                      if arch_map is None:
 +                              arch_map = {}
 +                              libs[arch] = arch_map
 +                      if install_name:
 +                              installname_map = arch_map.get(install_name)
 +                              if installname_map is None:
 +                                      installname_map = self._installname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[install_name] = installname_map
 +                              installname_map.providers.add(obj_key)
 +                      for needed_installname in needed:
 +                              installname_map = arch_map.get(needed_installname)
 +                              if installname_map is None:
 +                                      installname_map = self._installname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[needed_installname] = installname_map
 +                              installname_map.consumers.add(obj_key)
 +
 +      def listBrokenBinaries(self, debug=False):
 +              """
 +              Find binaries and their needed install_names, which have no providers.
 +
 +              @param debug: Boolean to enable debug output
 +              @type debug: Boolean
 +              @rtype: dict (example: {'/usr/bin/foo': set(['/usr/lib/libbar.dylib'])})
 +              @return: The return value is an object -> set-of-install_names
 +                      mapping, where object is a broken binary and the set
 +                      consists of install_names needed by object that have no
 +                      corresponding libraries to fulfill the dependency.
 +
 +              """
 +
 +              os = _os_merge
 +
 +              class _LibraryCache(object):
 +
 +                      """
 +                      Caches properties associated with paths.
 +
 +                      The purpose of this class is to prevent multiple instances of
 +                      _ObjectKey for the same paths.
 +
 +                      """
 +
 +                      def __init__(cache_self):
 +                              cache_self.cache = {}
 +
 +                      def get(cache_self, obj):
 +                              """
 +                              Caches and returns properties associated with an object.
 +
 +                              @param obj: absolute path (can be symlink)
 +                              @type obj: string (example: '/usr/lib/libfoo.dylib')
 +                              @rtype: 4-tuple with types
 +                                      (string or None, string or None, 2-tuple, Boolean)
 +                              @return: 4-tuple with the following components:
 +                                      1. arch as a string or None if it does not exist,
 +                                      2. soname as a string or None if it does not exist,
 +                                      3. obj_key as 2-tuple,
 +                                      4. Boolean representing whether the object exists.
 +                                      (example: ('libfoo.1.dylib', (123L, 456L), True))
 +
 +                              """
 +                              if obj in cache_self.cache:
 +                                      return cache_self.cache[obj]
 +                              else:
 +                                      obj_key = self._obj_key(obj)
 +                                      # Check that the library exists on the filesystem.
 +                                      if obj_key.file_exists():
 +                                              # Get the install_name from LinkageMapMachO._obj_properties if
 +                                              # it exists. Otherwise, None.
 +                                              arch = self._obj_properties.get(obj_key, (None,)*4)[0]
 +                                              install_name = self._obj_properties.get(obj_key, (None,)*4)[2]
 +                                              return cache_self.cache.setdefault(obj, \
 +                                                              (arch, install_name, obj_key, True))
 +                                      else:
 +                                              return cache_self.cache.setdefault(obj, \
 +                                                              (None, None, obj_key, False))
 +
 +              rValue = {}
 +              cache = _LibraryCache()
 +              providers = self.listProviders()
 +
 +              # Iterate over all obj_keys and their providers.
 +              for obj_key, install_names in providers.items():
 +                      arch = self._obj_properties[obj_key][0]
 +                      objs = self._obj_properties[obj_key][3]
 +                      # Iterate over each needed install_name and the set of
 +                      # library paths that fulfill the install_name to determine
 +                      # if the dependency is broken.
 +                      for install_name, libraries in install_names.items():
 +                              # validLibraries is used to store libraries, which
 +                              # satisfy install_name, so if no valid libraries are
 +                              # found, the install_name is not satisfied for obj_key.
 +                              # If unsatisfied, objects associated with obj_key must
 +                              # be emerged.
 +                              validLibrary = set() # for compat with LinkageMap
 +                              cachedArch, cachedInstallname, cachedKey, cachedExists = \
 +                                              cache.get(install_name)
 +                              # Check that the this library provides the needed soname.  Doing
 +                              # this, however, will cause consumers of libraries missing
 +                              # sonames to be unnecessarily emerged. (eg libmix.so)
 +                              if cachedInstallname == install_name and cachedArch == arch:
 +                                      validLibrary.add(cachedKey)
 +                                      if debug and cachedKey not in \
 +                                                      set(map(self._obj_key_cache.get, libraries)):
 +                                              # XXX This is most often due to soname symlinks not in
 +                                              # a library's directory.  We could catalog symlinks in
 +                                              # LinkageMap to avoid checking for this edge case here.
 +                                              print(_("Found provider outside of findProviders:"), \
 +                                                              install_name, "->", cachedRealpath)
 +                              if debug and cachedArch == arch and \
 +                                              cachedKey in self._obj_properties:
 +                                      print(_("Broken symlink or missing/bad install_name:"), \
 +                                                      install_name, '->', cachedRealpath, \
 +                                                      "with install_name", cachedInstallname, "but expecting", install_name)
 +                              # This conditional checks if there are no libraries to
 +                              # satisfy the install_name (empty set).
 +                              if not validLibrary:
 +                                      for obj in objs:
 +                                              rValue.setdefault(obj, set()).add(install_name)
 +                                      # If no valid libraries have been found by this
 +                                      # point, then the install_name does not exist in the
 +                                      # filesystem, but if there are libraries (from the
 +                                      # providers mapping), it is likely that soname
 +                                      # symlinks or the actual libraries are missing or
 +                                      # broken.  Thus those libraries are added to rValue
 +                                      # in order to emerge corrupt library packages.
 +                                      for lib in libraries:
 +                                              rValue.setdefault(lib, set()).add(install_name)
 +                                              if debug:
 +                                                      if not os.path.isfile(lib):
 +                                                              writemsg_level(_("Missing library:") + " %s\n" % (lib,),
 +                                                                      level=logging.DEBUG,
 +                                                                      noiselevel=-1)
 +                                                      else:
 +                                                              writemsg_level(_("Possibly missing symlink:") + \
 +                                                                      "%s\n" % (os.path.join(os.path.dirname(lib), soname)),
 +                                                                      level=logging.DEBUG,
 +                                                                      noiselevel=-1)
 +              return rValue
 +
 +      def listProviders(self):
 +              """
 +              Find the providers for all object keys in LinkageMap.
 +
 +              @rtype: dict (example:
 +                      {(123L, 456L): {'libbar.dylib': set(['/lib/libbar.1.5.dylib'])}})
 +              @return: The return value is an object -> providers mapping, where
 +                      providers is a mapping of install_name ->
 +                      set-of-library-paths returned from the findProviders method.
 +
 +              """
 +              rValue = {}
 +              if not self._libs:
 +                      self.rebuild()
 +              # Iterate over all object keys within LinkageMapMachO.
 +              for obj_key in self._obj_properties:
 +                      rValue.setdefault(obj_key, self.findProviders(obj_key))
 +              return rValue
 +
 +      def isMasterLink(self, obj):
 +              """
 +              Determine whether an object is a master link.
 +
 +              @param obj: absolute path to an object
 +              @type obj: string (example: '/usr/bin/foo')
 +              @rtype: Boolean
 +              @return:
 +                      1. True if obj is a master link
 +                      2. False if obj is not a master link
 +
 +              """
 +              os = _os_merge
 +              basename = os.path.basename(obj)
 +              obj_key = self._obj_key(obj)
 +              if obj_key not in self._obj_properties:
 +                      raise KeyError("%s (%s) not in object list" % (obj_key, obj))
 +              install_name = self._obj_properties[obj_key][2]
 +              return (len(basename) < len(os.path.basename(install_name)))
 +
 +      def listLibraryObjects(self):
 +              """
 +              Return a list of library objects.
 +
 +              Known limitation: library objects lacking an soname are not included.
 +
 +              @rtype: list of strings
 +              @return: list of paths to all providers
 +
 +              """
 +              rValue = []
 +              if not self._libs:
 +                      self.rebuild()
 +              for arch_map in self._libs.itervalues():
 +                      for soname_map in arch_map.itervalues():
 +                              for obj_key in soname_map.providers:
 +                                      rValue.extend(self._obj_properties[obj_key][3])
 +              return rValue
 +
 +      def getSoname(self, obj):
 +              """
 +              Return the soname associated with an object.
 +
 +              @param obj: absolute path to an object
 +              @type obj: string (example: '/usr/bin/bar')
 +              @rtype: string
 +              @return: soname as a string
 +
 +              """
 +              if not self._libs:
 +                      self.rebuild()
 +              if isinstance(obj, self._ObjectKey):
 +                      obj_key = obj
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s not in object list" % obj_key)
 +                      return self._obj_properties[obj_key][2]
 +              if obj not in self._obj_key_cache:
 +                      raise KeyError("%s not in object list" % obj)
 +              return self._obj_properties[self._obj_key_cache[obj]][2]
 +
 +      def findProviders(self, obj):
 +              """
 +              Find providers for an object or object key.
 +
 +              This method may be called with a key from _obj_properties.
 +
 +              In some cases, not all valid libraries are returned.  This may occur when
 +              an soname symlink referencing a library is in an object's runpath while
 +              the actual library is not.  We should consider cataloging symlinks within
 +              LinkageMap as this would avoid those cases and would be a better model of
 +              library dependencies (since the dynamic linker actually searches for
 +              files named with the soname in the runpaths).
 +
 +              @param obj: absolute path to an object or a key from _obj_properties
 +              @type obj: string (example: '/usr/bin/bar') or _ObjectKey
 +              @rtype: dict (example: {'libbar.dylib': set(['/lib/libbar.1.5.dylib'])})
 +              @return: The return value is a install_name -> set-of-library-paths, where
 +              set-of-library-paths satisfy install_name.
 +
 +              """
 +
 +              os = _os_merge
 +
 +              rValue = {}
 +
 +              if not self._libs:
 +                      self.rebuild()
 +
 +              # Determine the obj_key from the arguments.
 +              if isinstance(obj, self._ObjectKey):
 +                      obj_key = obj
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s not in object list" % obj_key)
 +              else:
 +                      obj_key = self._obj_key(obj)
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s (%s) not in object list" % (obj_key, obj))
 +
 +              arch, needed, install_name, _ = self._obj_properties[obj_key]
 +              for install_name in needed:
 +                      rValue[install_name] = set()
 +                      if arch not in self._libs or install_name not in self._libs[arch]:
 +                              continue
 +                      # For each potential provider of the install_name, add it to
 +                      # rValue if it exists.  (Should be one)
 +                      for provider_key in self._libs[arch][install_name].providers:
 +                              providers = self._obj_properties[provider_key][3]
 +                              for provider in providers:
 +                                      if os.path.exists(provider):
 +                                              rValue[install_name].add(provider)
 +              return rValue
 +
 +      def findConsumers(self, obj):
 +              """
 +              Find consumers of an object or object key.
 +
 +              This method may be called with a key from _obj_properties.  If this
 +              method is going to be called with an object key, to avoid not catching
 +              shadowed libraries, do not pass new _ObjectKey instances to this method.
 +              Instead pass the obj as a string.
 +
 +              In some cases, not all consumers are returned.  This may occur when
 +              an soname symlink referencing a library is in an object's runpath while
 +              the actual library is not. For example, this problem is noticeable for
 +              binutils since it's libraries are added to the path via symlinks that
 +              are gemerated in the /usr/$CHOST/lib/ directory by binutils-config.
 +              Failure to recognize consumers of these symlinks makes preserve-libs
 +              fail to preserve binutils libs that are needed by these unrecognized
 +              consumers.
 +
 +              Note that library consumption via dlopen (common for kde plugins) is
 +              currently undetected. However, it is possible to use the
 +              corresponding libtool archive (*.la) files to detect such consumers
 +              (revdep-rebuild is able to detect them).
 +
 +              @param obj: absolute path to an object or a key from _obj_properties
 +              @type obj: string (example: '/usr/bin/bar') or _ObjectKey
 +              @rtype: set of strings (example: set(['/bin/foo', '/usr/bin/bar']))
 +              @return: The return value is a install_name -> set-of-library-paths, where
 +              set-of-library-paths satisfy install_name.
 +
 +              """
 +
 +              os = _os_merge
 +
 +              rValue = set()
 +
 +              if not self._libs:
 +                      self.rebuild()
 +
 +              # Determine the obj_key and the set of objects matching the arguments.
 +              if isinstance(obj, self._ObjectKey):
 +                      obj_key = obj
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s not in object list" % obj_key)
 +                      objs = self._obj_properties[obj_key][3]
 +              else:
 +                      objs = set([obj])
 +                      obj_key = self._obj_key(obj)
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s (%s) not in object list" % (obj_key, obj))
 +
 +              # If there is another version of this lib with the
 +              # same soname and the master link points to that
 +              # other version, this lib will be shadowed and won't
 +              # have any consumers.
 +              if not isinstance(obj, self._ObjectKey):
 +                      install_name = self._obj_properties[obj_key][2]
 +                      master_link = os.path.join(self._root,
 +                                      install_name.lstrip(os.path.sep))
 +                      try:
 +                              master_st = os.stat(master_link)
 +                              obj_st = os.stat(obj)
 +                      except OSError:
 +                              pass
 +                      else:
 +                              if (obj_st.st_dev, obj_st.st_ino) != \
 +                                      (master_st.st_dev, master_st.st_ino):
 +                                      return set()
 +
 +              arch = self._obj_properties[obj_key][0]
 +              install_name = self._obj_properties[obj_key][2]
 +              if arch in self._libs and install_name in self._libs[arch]:
 +                      # For each potential consumer, add it to rValue if an object from the
 +                      # arguments resides in the consumer's runpath.
 +                      for consumer_key in self._libs[arch][install_name].consumers:
 +                              consumer_objs = self._obj_properties[consumer_key][3]
 +                              rValue.update(consumer_objs)
 +              return rValue
index c90947ee7d0566f6714765aae01db3a98af7df2d,0000000000000000000000000000000000000000..25e8a45c279e12450b955a8dda4de99ea0bd672b
mode 100644,000000..100644
--- /dev/null
@@@ -1,267 -1,0 +1,274 @@@
-               def __init__(self, obj, root):
-                       LinkageMapPeCoff._ObjectKey.__init__(self, obj, root)
 +# Copyright 1998-2011 Gentoo Foundation
 +# Distributed under the terms of the GNU General Public License v2
 +
 +import errno
 +import logging
 +import subprocess
 +
 +import portage
 +from portage import _encodings
 +from portage import _os_merge
 +from portage import _unicode_decode
 +from portage import _unicode_encode
 +from portage.cache.mappings import slot_dict_class
 +from portage.exception import CommandNotFound
 +from portage.localization import _
 +from portage.util import getlibpaths
 +from portage.util import grabfile
 +from portage.util import normalize_path
 +from portage.util import writemsg_level
 +from portage.const import EPREFIX
 +from portage.util._dyn_libs.LinkageMapELF import LinkageMapELF
 +
 +class LinkageMapPeCoff(LinkageMapELF):
 +
 +      """Models dynamic linker dependencies."""
 +
 +      # NEEDED.PECOFF.1 has effectively the _same_ format as NEEDED.ELF.2,
 +      # but we keep up the relation "scanelf" -> "NEEDED.ELF", "readpecoff" ->
 +      # "NEEDED.PECOFF", "scanmacho" -> "NEEDED.MACHO", etc. others will follow.
 +      _needed_aux_key = "NEEDED.PECOFF.1"
 +
 +      class _ObjectKey(LinkageMapELF._ObjectKey):
 +
 +              """Helper class used as _obj_properties keys for objects."""
 +
 +              def _generate_object_key(self, obj, root):
 +                      """
 +                      Generate object key for a given object. This is different from the
 +                      Linux implementation, since some systems (e.g. interix) don't have
 +                      "inodes", thus the inode field is always zero, or a random value,
 +                      making it inappropriate for identifying a file... :)
 +
 +                      @param object: path to a file
 +                      @type object: string (example: '/usr/bin/bar')
 +                      @rtype: 2-tuple of types (bool, string)
 +                      @return:
 +                              2-tuple of boolean indicating existance, and absolut path
 +                      """
 +
 +                      os = _os_merge
 +
 +                      try:
 +                              _unicode_encode(obj,
 +                                      encoding=_encodings['merge'], errors='strict')
 +                      except UnicodeEncodeError:
 +                              # The package appears to have been merged with a 
 +                              # different value of sys.getfilesystemencoding(),
 +                              # so fall back to utf_8 if appropriate.
 +                              try:
 +                                      _unicode_encode(obj,
 +                                              encoding=_encodings['fs'], errors='strict')
 +                              except UnicodeEncodeError:
 +                                      pass
 +                              else:
 +                                      os = portage.os
 +
 +                      abs_path = os.path.join(root, obj.lstrip(os.sep))
 +                      try:
 +                              object_stat = os.stat(abs_path)
 +                      except OSError:
 +                              return (False, os.path.realpath(abs_path))
 +                      # On Interix, the inode field may always be zero, since the
 +                      # filesystem (NTFS) has no inodes ...
 +                      return (True, os.path.realpath(abs_path))
 +
 +              def file_exists(self):
 +                      """
 +                      Determine if the file for this key exists on the filesystem.
 +
 +                      @rtype: Boolean
 +                      @return:
 +                              1. True if the file exists.
 +                              2. False if the file does not exist or is a broken symlink.
 +
 +                      """
 +                      return self._key[0]
 +
 +      class _LibGraphNode(_ObjectKey):
 +              __slots__ = ("alt_paths",)
 +
++              def __init__(self, key):
++                      """
++                      Create a _LibGraphNode from an existing _ObjectKey.
++                      This re-uses the _key attribute in order to avoid repeating
++                      any previous stat calls, which helps to avoid potential race
++                      conditions due to inconsistent stat results when the
++                      file system is being modified concurrently.
++                      """
++                      self._key = key._key
 +                      self.alt_paths = set()
 +
 +              def __str__(self):
 +                      return str(sorted(self.alt_paths))
 +
 +      def rebuild(self, exclude_pkgs=None, include_file=None,
 +              preserve_paths=None):
 +              """
 +              Raises CommandNotFound if there are preserved libs
 +              and the readpecoff binary is not available.
 +
 +              @param exclude_pkgs: A set of packages that should be excluded from
 +                      the LinkageMap, since they are being unmerged and their NEEDED
 +                      entries are therefore irrelevant and would only serve to corrupt
 +                      the LinkageMap.
 +              @type exclude_pkgs: set
 +              @param include_file: The path of a file containing NEEDED entries for
 +                      a package which does not exist in the vardbapi yet because it is
 +                      currently being merged.
 +              @type include_file: String
 +              @param preserve_paths: Libraries preserved by a package instance that
 +                      is currently being merged. They need to be explicitly passed to the
 +                      LinkageMap, since they are not registered in the
 +                      PreservedLibsRegistry yet.
 +              @type preserve_paths: set
 +              """
 +
 +              os = _os_merge
 +              root = self._root
 +              root_len = len(root) - 1
 +              self._clear_cache()
 +              self._defpath.update(getlibpaths(self._root))
 +              libs = self._libs
 +              obj_properties = self._obj_properties
 +
 +              lines = []
 +
 +              # Data from include_file is processed first so that it
 +              # overrides any data from previously installed files.
 +              if include_file is not None:
 +                      for line in grabfile(include_file):
 +                              lines.append((include_file, line))
 +
 +              aux_keys = [self._needed_aux_key]
 +              can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
 +              if can_lock:
 +                      self._dbapi.lock()
 +              try:
 +                      for cpv in self._dbapi.cpv_all():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      continue
 +                              needed_file = self._dbapi.getpath(cpv,
 +                                      filename=self._needed_aux_key)
 +                              for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
 +                                      lines.append((needed_file, line))
 +              finally:
 +                      if can_lock:
 +                              self._dbapi.unlock()
 +
 +              # have to call readpecoff for preserved libs here as they aren't 
 +              # registered in NEEDED.PECOFF.1 files
 +              plibs = set()
 +              if preserve_paths is not None:
 +                      plibs.update(preserve_paths)
 +              if self._dbapi._plib_registry and \
 +                      self._dbapi._plib_registry.hasEntries():
 +                      for cpv, items in \
 +                              self._dbapi._plib_registry.getPreservedLibs().items():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      # These preserved libs will either be unmerged,
 +                                      # rendering them irrelevant, or they will be
 +                                      # preserved in the replacement package and are
 +                                      # already represented via the preserve_paths
 +                                      # parameter.
 +                                      continue
 +                              plibs.update(items)
 +              if plibs:
 +                      args = ["readpecoff", self._dbapi.settings.get('CHOST')]
 +                      args.extend(os.path.join(root, x.lstrip("." + os.sep)) \
 +                              for x in plibs)
 +                      try:
 +                              proc = subprocess.Popen(args, stdout=subprocess.PIPE)
 +                      except EnvironmentError as e:
 +                              if e.errno != errno.ENOENT:
 +                                      raise
 +                              raise CommandNotFound(args[0])
 +                      else:
 +                              for l in proc.stdout:
 +                                      try:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='strict')
 +                                      except UnicodeDecodeError:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='replace')
 +                                              writemsg_level(_("\nError decoding characters " \
 +                                                      "returned from readpecoff: %s\n\n") % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                      l = l[3:].rstrip("\n")
 +                                      if not l:
 +                                              continue
 +                                      fields = l.split(";")
 +                                      if len(fields) < 5:
 +                                              writemsg_level(_("\nWrong number of fields " \
 +                                                      "returned from readpecoff: %s\n\n") % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                              continue
 +                                      fields[1] = fields[1][root_len:]
 +                                      plibs.discard(fields[1])
 +                                      lines.append(("readpecoff", ";".join(fields)))
 +                              proc.wait()
 +
 +              if plibs:
 +                      # Preserved libraries that did not appear in the scanelf output.
 +                      # This is known to happen with statically linked libraries.
 +                      # Generate dummy lines for these, so we can assume that every
 +                      # preserved library has an entry in self._obj_properties. This
 +                      # is important in order to prevent findConsumers from raising
 +                      # an unwanted KeyError.
 +                      for x in plibs:
 +                              lines.append(("plibs", ";".join(['', x, '', '', ''])))
 +
 +              for location, l in lines:
 +                      l = l.rstrip("\n")
 +                      if not l:
 +                              continue
 +                      fields = l.split(";")
 +                      if len(fields) < 5:
 +                              writemsg_level(_("\nWrong number of fields " \
 +                                      "in %s: %s\n\n") % (location, l),
 +                                      level=logging.ERROR, noiselevel=-1)
 +                              continue
 +                      arch = fields[0]
 +                      obj = fields[1]
 +                      soname = fields[2]
 +                      path = set([normalize_path(x) \
 +                              for x in filter(None, fields[3].replace(
 +                              "${ORIGIN}", os.path.dirname(obj)).replace(
 +                              "$ORIGIN", os.path.dirname(obj)).split(":"))])
 +                      needed = [x for x in fields[4].split(",") if x]
 +
 +                      obj_key = self._obj_key(obj)
 +                      indexed = True
 +                      myprops = obj_properties.get(obj_key)
 +                      if myprops is None:
 +                              indexed = False
 +                              myprops = (arch, needed, path, soname, set())
 +                              obj_properties[obj_key] = myprops
 +                      # All object paths are added into the obj_properties tuple.
 +                      myprops[4].add(obj)
 +
 +                      # Don't index the same file more that once since only one
 +                      # set of data can be correct and therefore mixing data
 +                      # may corrupt the index (include_file overrides previously
 +                      # installed).
 +                      if indexed:
 +                              continue
 +
 +                      arch_map = libs.get(arch)
 +                      if arch_map is None:
 +                              arch_map = {}
 +                              libs[arch] = arch_map
 +                      if soname:
 +                              soname_map = arch_map.get(soname)
 +                              if soname_map is None:
 +                                      soname_map = self._soname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[soname] = soname_map
 +                              soname_map.providers.add(obj_key)
 +                      for needed_soname in needed:
 +                              soname_map = arch_map.get(needed_soname)
 +                              if soname_map is None:
 +                                      soname_map = self._soname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[needed_soname] = soname_map
 +                              soname_map.consumers.add(obj_key)
index 0e930fee676423f9a960cd13bfa661a42cddfb0f,0000000000000000000000000000000000000000..782cc542d7362a8148550d9145b6227bde73ab1a
mode 100644,000000..100644
--- /dev/null
@@@ -1,319 -1,0 +1,326 @@@
-               def __init__(self, obj, root):
-                       LinkageMapXCoff._ObjectKey.__init__(self, obj, root)
 +# Copyright 1998-2011 Gentoo Foundation
 +# Distributed under the terms of the GNU General Public License v2
 +
 +import errno
 +import logging
 +import subprocess
 +
 +import portage
 +from portage import _encodings
 +from portage import _os_merge
 +from portage import _unicode_decode
 +from portage import _unicode_encode
 +from portage.cache.mappings import slot_dict_class
 +from portage.exception import CommandNotFound
 +from portage.localization import _
 +from portage.util import getlibpaths
 +from portage.util import grabfile
 +from portage.util import normalize_path
 +from portage.util import writemsg_level
 +from portage.const import EPREFIX, BASH_BINARY
 +from portage.util._dyn_libs.LinkageMapELF import LinkageMapELF
 +
 +class LinkageMapXCoff(LinkageMapELF):
 +
 +      """Models dynamic linker dependencies."""
 +
 +      _needed_aux_key = "NEEDED.XCOFF.1"
 +
 +      class _ObjectKey(LinkageMapELF._ObjectKey):
 +
 +              def __init__(self, obj, root):
 +                      LinkageMapELF._ObjectKey.__init__(self, obj, root)
 +
 +              def _generate_object_key(self, obj, root):
 +                      """
 +                      Generate object key for a given object.
 +
 +                      @param object: path to a file
 +                      @type object: string (example: '/usr/bin/bar')
 +                      @rtype: 2-tuple of types (long, int) if object exists. string if
 +                              object does not exist.
 +                      @return:
 +                              1. 2-tuple of object's inode and device from a stat call, if object
 +                                      exists.
 +                              2. realpath of object if object does not exist.
 +
 +                      """
 +
 +                      os = _os_merge
 +
 +                      try:
 +                              _unicode_encode(obj,
 +                                      encoding=_encodings['merge'], errors='strict')
 +                      except UnicodeEncodeError:
 +                              # The package appears to have been merged with a 
 +                              # different value of sys.getfilesystemencoding(),
 +                              # so fall back to utf_8 if appropriate.
 +                              try:
 +                                      _unicode_encode(obj,
 +                                              encoding=_encodings['fs'], errors='strict')
 +                              except UnicodeEncodeError:
 +                                      pass
 +                              else:
 +                                      os = portage.os
 +
 +                      abs_path = os.path.join(root, obj.lstrip(os.sep))
 +                      try:
 +                              object_stat = os.stat(abs_path)
 +                      except OSError:
 +                              # Use the realpath as the key if the file does not exists on the
 +                              # filesystem.
 +                              return os.path.realpath(abs_path)
 +                      # Return a tuple of the device and inode, as well as the basename,
 +                      # because of hardlinks the device and inode might be identical.
 +                      return (object_stat.st_dev, object_stat.st_ino, os.path.basename(abs_path.rstrip(os.sep)))
 +
 +              def file_exists(self):
 +                      """
 +                      Determine if the file for this key exists on the filesystem.
 +
 +                      @rtype: Boolean
 +                      @return:
 +                              1. True if the file exists.
 +                              2. False if the file does not exist or is a broken symlink.
 +
 +                      """
 +                      return isinstance(self._key, tuple)
 +
 +      class _LibGraphNode(_ObjectKey):
 +              __slots__ = ("alt_paths",)
 +
++              def __init__(self, key):
++                      """
++                      Create a _LibGraphNode from an existing _ObjectKey.
++                      This re-uses the _key attribute in order to avoid repeating
++                      any previous stat calls, which helps to avoid potential race
++                      conditions due to inconsistent stat results when the
++                      file system is being modified concurrently.
++                      """
++                      self._key = key._key
 +                      self.alt_paths = set()
 +
 +              def __str__(self):
 +                      return str(sorted(self.alt_paths))
 +
 +      def rebuild(self, exclude_pkgs=None, include_file=None,
 +              preserve_paths=None):
 +              """
 +              Raises CommandNotFound if there are preserved libs
 +              and the scanelf binary is not available.
 +
 +              @param exclude_pkgs: A set of packages that should be excluded from
 +                      the LinkageMap, since they are being unmerged and their NEEDED
 +                      entries are therefore irrelevant and would only serve to corrupt
 +                      the LinkageMap.
 +              @type exclude_pkgs: set
 +              @param include_file: The path of a file containing NEEDED entries for
 +                      a package which does not exist in the vardbapi yet because it is
 +                      currently being merged.
 +              @type include_file: String
 +              @param preserve_paths: Libraries preserved by a package instance that
 +                      is currently being merged. They need to be explicitly passed to the
 +                      LinkageMap, since they are not registered in the
 +                      PreservedLibsRegistry yet.
 +              @type preserve_paths: set
 +              """
 +
 +              os = _os_merge
 +              root = self._root
 +              root_len = len(root) - 1
 +              self._clear_cache()
 +              self._defpath.update(getlibpaths(self._root))
 +              libs = self._libs
 +              obj_properties = self._obj_properties
 +
 +              lines = []
 +
 +              # Data from include_file is processed first so that it
 +              # overrides any data from previously installed files.
 +              if include_file is not None:
 +                      for line in grabfile(include_file):
 +                              lines.append((include_file, line))
 +
 +              aux_keys = [self._needed_aux_key]
 +              can_lock = os.access(os.path.dirname(self._dbapi._dbroot), os.W_OK)
 +              if can_lock:
 +                      self._dbapi.lock()
 +              try:
 +                      for cpv in self._dbapi.cpv_all():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      continue
 +                              needed_file = self._dbapi.getpath(cpv,
 +                                      filename=self._needed_aux_key)
 +                              for line in self._dbapi.aux_get(cpv, aux_keys)[0].splitlines():
 +                                      lines.append((needed_file, line))
 +              finally:
 +                      if can_lock:
 +                              self._dbapi.unlock()
 +
 +              # have to call scanelf for preserved libs here as they aren't 
 +              # registered in NEEDED.XCOFF.1 files
 +              plibs = set()
 +              if preserve_paths is not None:
 +                      plibs.update(preserve_paths)
 +              if self._dbapi._plib_registry and \
 +                      self._dbapi._plib_registry.hasEntries():
 +                      for cpv, items in \
 +                              self._dbapi._plib_registry.getPreservedLibs().items():
 +                              if exclude_pkgs is not None and cpv in exclude_pkgs:
 +                                      # These preserved libs will either be unmerged,
 +                                      # rendering them irrelevant, or they will be
 +                                      # preserved in the replacement package and are
 +                                      # already represented via the preserve_paths
 +                                      # parameter.
 +                                      continue
 +                              plibs.update(items)
 +              if plibs:
 +                      for x in plibs:
 +                              args = [BASH_BINARY, "-c", ':'
 +                                      + '; member="' + x + '"'
 +                                      + '; archive=${member}'
 +                                      + '; if [[ ${member##*/} == .*"["*"]" ]]'
 +                                      + '; then member=${member%/.*}/${member##*/.}'
 +                                               + '; archive=${member%[*}'
 +                                      + '; fi'
 +                                      + '; member=${member#${archive}}'
 +                                      + '; [[ -r ${archive} ]] || chmod a+r "${archive}"'
 +                                      + '; eval $(aixdll-query "${archive}${member}" FILE MEMBER FLAGS FORMAT RUNPATH DEPLIBS)'
 +                                      + '; [[ -n ${member} ]] && needed=${FILE##*/} || needed='
 +                                      + '; for deplib in ${DEPLIBS}'
 +                                      + '; do eval deplib=${deplib}'
 +                                         + '; if [[ ${deplib} != "." && ${deplib} != ".." ]]'
 +                                         + '; then needed="${needed}${needed:+,}${deplib}"'
 +                                         + '; fi'
 +                                      + '; done'
 +                                      + '; [[ -n ${MEMBER} ]] && MEMBER="[${MEMBER}]"'
 +                                      + '; [[ " ${FLAGS} " == *" SHROBJ "* ]] && soname=${FILE##*/}${MEMBER} || soname='
 +                                      + '; echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}}${MEMBER};${soname};${RUNPATH};${needed}"'
 +                                      + '; [[ -z ${member} && -n ${MEMBER} ]] && echo "${FORMAT##* }${FORMAT%%-*};${FILE#${ROOT%/}};${FILE##*/};;"'
 +                              ]
 +                      try:
 +                              proc = subprocess.Popen(args, stdout=subprocess.PIPE)
 +                      except EnvironmentError as e:
 +                              if e.errno != errno.ENOENT:
 +                                      raise
 +                              raise CommandNotFound(args[0])
 +                      else:
 +                              for l in proc.stdout:
 +                                      try:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='strict')
 +                                      except UnicodeDecodeError:
 +                                              l = _unicode_decode(l,
 +                                                      encoding=_encodings['content'], errors='replace')
 +                                              writemsg_level(_("\nError decoding characters " \
 +                                                      "returned from aixdll-query: %s\n\n") % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                      l = l.rstrip("\n")
 +                                      if not l:
 +                                              continue
 +                                      fields = l.split(";")
 +                                      if len(fields) < 5:
 +                                              writemsg_level(_("\nWrong number of fields " \
 +                                                      "returned from aixdll-query: %s\n\n") % (l,),
 +                                                      level=logging.ERROR, noiselevel=-1)
 +                                              continue
 +                                      fields[1] = fields[1][root_len:]
 +                                      plibs.discard(fields[1])
 +                                      lines.append(("aixdll-query", ";".join(fields)))
 +                              proc.wait()
 +
 +              if plibs:
 +                      # Preserved libraries that did not appear in the bash
 +                      # aixdll-query code output.  This is known to happen with
 +                      # statically linked libraries.  Generate dummy lines for
 +                      # these, so we can assume that every preserved library has
 +                      # an entry in self._obj_properties.  This is important in
 +                      # order to prevent findConsumers from raising an unwanted
 +                      # KeyError.
 +                      for x in plibs:
 +                              lines.append(("plibs", ";".join(['', x, '', '', ''])))
 +
 +              for location, l in lines:
 +                      l = l.rstrip("\n")
 +                      if not l:
 +                              continue
 +                      fields = l.split(";")
 +                      if len(fields) < 5:
 +                              writemsg_level(_("\nWrong number of fields " \
 +                                      "in %s: %s\n\n") % (location, l),
 +                                      level=logging.ERROR, noiselevel=-1)
 +                              continue
 +                      arch = fields[0]
 +
 +                      def as_contentmember(obj):
 +                              if obj.endswith("]"):
 +                                      if obj.find("/") >= 0:
 +                                              return obj[:obj.rfind("/")] + "/." + obj[obj.rfind("/")+1:]
 +                                      return "." + obj
 +                              return obj
 +
 +                      obj = as_contentmember(fields[1])
 +                      soname = as_contentmember(fields[2])
 +                      path = set([normalize_path(x) \
 +                              for x in filter(None, fields[3].replace(
 +                              "${ORIGIN}", os.path.dirname(obj)).replace(
 +                              "$ORIGIN", os.path.dirname(obj)).split(":"))])
 +                      needed = [as_contentmember(x) for x in fields[4].split(",") if x]
 +
 +                      obj_key = self._obj_key(obj)
 +                      indexed = True
 +                      myprops = obj_properties.get(obj_key)
 +                      if myprops is None:
 +                              indexed = False
 +                              myprops = (arch, needed, path, soname, set())
 +                              obj_properties[obj_key] = myprops
 +                      # All object paths are added into the obj_properties tuple.
 +                      myprops[4].add(obj)
 +
 +                      # Don't index the same file more that once since only one
 +                      # set of data can be correct and therefore mixing data
 +                      # may corrupt the index (include_file overrides previously
 +                      # installed).
 +                      if indexed:
 +                              continue
 +
 +                      arch_map = libs.get(arch)
 +                      if arch_map is None:
 +                              arch_map = {}
 +                              libs[arch] = arch_map
 +                      if soname:
 +                              soname_map = arch_map.get(soname)
 +                              if soname_map is None:
 +                                      soname_map = self._soname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[soname] = soname_map
 +                              soname_map.providers.add(obj_key)
 +                      for needed_soname in needed:
 +                              soname_map = arch_map.get(needed_soname)
 +                              if soname_map is None:
 +                                      soname_map = self._soname_map_class(
 +                                              providers=set(), consumers=set())
 +                                      arch_map[needed_soname] = soname_map
 +                              soname_map.consumers.add(obj_key)
 +
 +      def getSoname(self, obj):
 +              """
 +              Return the soname associated with an object.
 +
 +              @param obj: absolute path to an object
 +              @type obj: string (example: '/usr/bin/bar')
 +              @rtype: string
 +              @return: soname as a string
 +
 +              """
 +              if not self._libs:
 +                      self.rebuild()
 +              if isinstance(obj, self._ObjectKey):
 +                      obj_key = obj
 +                      if obj_key not in self._obj_properties:
 +                              raise KeyError("%s not in object list" % obj_key)
 +                      return self._obj_properties[obj_key][3]
 +              if obj not in self._obj_key_cache:
 +                      raise KeyError("%s not in object list" % obj)
 +              return self._obj_properties[self._obj_key_cache[obj]][3]
 +