Use nanosecond precision in portage.util.movefile.movefile().
[portage.git] / pym / portage / dbapi / vartree.py
index ba2cc0886e789455481e064d7c02d27dfa49bf45..08580e8078e887c275d11eff4c505543f766e919 100644 (file)
@@ -1,4 +1,4 @@
-# Copyright 1998-2011 Gentoo Foundation
+# Copyright 1998-2012 Gentoo Foundation
 # Distributed under the terms of the GNU General Public License v2
 
 __all__ = [
@@ -11,8 +11,9 @@ portage.proxy.lazyimport.lazyimport(globals(),
        'portage.data:portage_gid,portage_uid,secpass',
        'portage.dbapi.dep_expand:dep_expand',
        'portage.dbapi._MergeProcess:MergeProcess',
-       'portage.dep:dep_getkey,isjustname,match_from_list,' + \
-               'use_reduce,_slot_re',
+       'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list,' + \
+               'use_reduce,_get_slot_re',
+       'portage.eapi:_get_eapi_attrs',
        'portage.elog:collect_ebuild_messages,collect_messages,' + \
                'elog_process,_merge_logentries',
        'portage.locks:lockdir,unlockdir,lockfile,unlockfile',
@@ -20,6 +21,7 @@ portage.proxy.lazyimport.lazyimport(globals(),
        'portage.package.ebuild.doebuild:doebuild_environment,' + \
                '_merge_unicode_error', '_spawn_phase',
        'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
+       'portage.package.ebuild._ipc.QueryCommand:QueryCommand',
        'portage.update:fixdbentries',
        'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
                'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
@@ -27,10 +29,13 @@ portage.proxy.lazyimport.lazyimport(globals(),
        'portage.util.digraph:digraph',
        'portage.util.env_update:env_update',
        'portage.util.listdir:dircache,listdir',
+       'portage.util.movefile:movefile',
        'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
        'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
-       'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \
-               '_pkgsplit@pkgsplit',
+       'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,vercmp,' + \
+               '_pkgsplit@pkgsplit,_pkg_str',
+       'subprocess',
+       'tarfile',
 )
 
 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
@@ -41,12 +46,12 @@ from portage.exception import CommandNotFound, \
        InvalidData, InvalidLocation, InvalidPackageName, \
        FileNotFound, PermissionDenied, UnsupportedAPIException
 from portage.localization import _
-from portage.util.movefile import movefile
 
 from portage import abssymlink, _movefile, bsd_chflags
 
 # This is a special version of the os module, wrapped for unicode support.
 from portage import os
+from portage import shutil
 from portage import _encodings
 from portage import _os_merge
 from portage import _selinux_merge
@@ -58,15 +63,18 @@ from _emerge.EbuildPhase import EbuildPhase
 from _emerge.emergelog import emergelog
 from _emerge.PollScheduler import PollScheduler
 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
+from _emerge.SpawnProcess import SpawnProcess
 
 import errno
+import fnmatch
 import gc
+import grp
 import io
 from itertools import chain
 import logging
 import os as _os
+import pwd
 import re
-import shutil
 import stat
 import sys
 import tempfile
@@ -82,6 +90,9 @@ except ImportError:
 if sys.hexversion >= 0x3000000:
        basestring = str
        long = int
+       _unicode = str
+else:
+       _unicode = unicode
 
 class vardbapi(dbapi):
 
@@ -129,12 +140,11 @@ class vardbapi(dbapi):
                if settings is None:
                        settings = portage.settings
                self.settings = settings
-               self.root = settings['ROOT']
 
-               if _unused_param is not None and _unused_param != self.root:
-                       warnings.warn("The first parameter of the " + \
-                               "portage.dbapi.vartree.vardbapi" + \
-                               " constructor is now unused. Use " + \
+               if _unused_param is not None and _unused_param != settings['ROOT']:
+                       warnings.warn("The first parameter of the "
+                               "portage.dbapi.vartree.vardbapi"
+                               " constructor is now unused. Use "
                                "settings['ROOT'] instead.",
                                DeprecationWarning, stacklevel=2)
 
@@ -148,14 +158,14 @@ class vardbapi(dbapi):
                self._fs_lock_count = 0
 
                if vartree is None:
-                       vartree = portage.db[self.root]["vartree"]
+                       vartree = portage.db[settings['EROOT']]['vartree']
                self.vartree = vartree
                self._aux_cache_keys = set(
                        ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
                        "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
                        "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
                        "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
-                       "REQUIRED_USE"])
+                       ])
                self._aux_cache_obj = None
                self._aux_cache_filename = os.path.join(self._eroot,
                        CACHE_PATH, "vdb_metadata.pickle")
@@ -164,7 +174,7 @@ class vardbapi(dbapi):
 
                self._plib_registry = None
                if _ENABLE_PRESERVE_LIBS:
-                       self._plib_registry = PreservedLibsRegistry(self.root,
+                       self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
                                os.path.join(self._eroot, PRIVATE_PATH,
                                "preserved_libs_registry"))
 
@@ -175,6 +185,15 @@ class vardbapi(dbapi):
 
                self._cached_counter = None
 
+       @property
+       def root(self):
+               warnings.warn("The root attribute of "
+                       "portage.dbapi.vartree.vardbapi"
+                       " is deprecated. Use "
+                       "settings['ROOT'] instead.",
+                       DeprecationWarning, stacklevel=3)
+               return self.settings['ROOT']
+
        def getpath(self, mykey, filename=None):
                # This is an optimized hotspot, so don't use unicode-wrapped
                # os module and don't use os.path.join().
@@ -302,14 +321,24 @@ class vardbapi(dbapi):
                if not origmatches:
                        return moves
                for mycpv in origmatches:
+                       try:
+                               mycpv = self._pkg_str(mycpv, None)
+                       except (KeyError, InvalidData):
+                               continue
                        mycpv_cp = cpv_getkey(mycpv)
                        if mycpv_cp != origcp:
                                # Ignore PROVIDE virtual match.
                                continue
                        if repo_match is not None \
-                               and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
+                               and not repo_match(mycpv.repo):
                                continue
-                       mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
+
+                       # Use isvalidatom() to check if this move is valid for the
+                       # EAPI (characters allowed in package names may vary).
+                       if not isvalidatom(newcp, eapi=mycpv.eapi):
+                               continue
+
+                       mynewcpv = mycpv.replace(mycpv_cp, _unicode(newcp), 1)
                        mynewcat = catsplit(newcp)[0]
                        origpath = self.getpath(mycpv)
                        if not os.path.exists(origpath):
@@ -339,7 +368,7 @@ class vardbapi(dbapi):
                                        del e
                        write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
                        write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
-                       fixdbentries([mylist], newpath)
+                       fixdbentries([mylist], newpath, eapi=mycpv.eapi)
                return moves
 
        def cp_list(self, mycp, use_cache=1):
@@ -373,7 +402,7 @@ class vardbapi(dbapi):
                                continue
                        if len(mysplit) > 1:
                                if ps[0] == mysplit[1]:
-                                       returnme.append(mysplit[0]+"/"+x)
+                                       returnme.append(_pkg_str(mysplit[0]+"/"+x))
                self._cpv_sort_ascending(returnme)
                if use_cache:
                        self.cpcache[mycp] = [mystat, returnme[:]]
@@ -472,6 +501,7 @@ class vardbapi(dbapi):
                "caching match function"
                mydep = dep_expand(
                        origdep, mydb=self, use_cache=use_cache, settings=self.settings)
+               cache_key = (mydep, mydep.unevaluated_atom)
                mykey = dep_getkey(mydep)
                mycat = catsplit(mykey)[0]
                if not use_cache:
@@ -493,8 +523,8 @@ class vardbapi(dbapi):
                if mydep not in self.matchcache[mycat]:
                        mymatch = list(self._iter_match(mydep,
                                self.cp_list(mydep.cp, use_cache=use_cache)))
-                       self.matchcache[mycat][mydep] = mymatch
-               return self.matchcache[mycat][mydep][:]
+                       self.matchcache[mycat][cache_key] = mymatch
+               return self.matchcache[mycat][cache_key][:]
 
        def findname(self, mycpv, myrepo=None):
                return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
@@ -555,8 +585,11 @@ class vardbapi(dbapi):
                        aux_cache = mypickle.load()
                        f.close()
                        del f
-               except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError) as e:
-                       if isinstance(e, pickle.UnpicklingError):
+               except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
+                       if isinstance(e, EnvironmentError) and \
+                               getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
+                               pass
+                       else:
                                writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
                                        (self._aux_cache_filename, e), noiselevel=-1)
                        del e
@@ -610,7 +643,8 @@ class vardbapi(dbapi):
                                cache_these_wants.add(x)
 
                if not cache_these_wants:
-                       return self._aux_get(mycpv, wants)
+                       mydata = self._aux_get(mycpv, wants)
+                       return [mydata[x] for x in wants]
 
                cache_these = set(self._aux_cache_keys)
                cache_these.update(cache_these_wants)
@@ -623,7 +657,8 @@ class vardbapi(dbapi):
                        if e.errno != errno.ENOENT:
                                raise
                        raise KeyError(mycpv)
-               mydir_mtime = mydir_stat[stat.ST_MTIME]
+               # Use float mtime when available.
+               mydir_mtime = mydir_stat.st_mtime
                pkg_data = self._aux_cache["packages"].get(mycpv)
                pull_me = cache_these.union(wants)
                mydata = {"_mtime_" : mydir_mtime}
@@ -636,13 +671,18 @@ class vardbapi(dbapi):
                                pkg_data = None
                        else:
                                cache_mtime, metadata = pkg_data
-                               if not isinstance(cache_mtime, (long, int)) or \
+                               if not isinstance(cache_mtime, (float, long, int)) or \
                                        not isinstance(metadata, dict):
                                        pkg_data = None
 
                if pkg_data:
                        cache_mtime, metadata = pkg_data
-                       cache_valid = cache_mtime == mydir_mtime
+                       if isinstance(cache_mtime, float):
+                               cache_valid = cache_mtime == mydir_stat.st_mtime
+                       else:
+                               # Cache may contain integer mtime.
+                               cache_valid = cache_mtime == mydir_stat[stat.ST_MTIME]
+
                if cache_valid:
                        # Migrate old metadata to unicode.
                        for k, v in metadata.items():
@@ -655,19 +695,19 @@ class vardbapi(dbapi):
                if pull_me:
                        # pull any needed data and cache it
                        aux_keys = list(pull_me)
-                       for k, v in zip(aux_keys,
-                               self._aux_get(mycpv, aux_keys, st=mydir_stat)):
-                               mydata[k] = v
+                       mydata.update(self._aux_get(mycpv, aux_keys, st=mydir_stat))
                        if not cache_valid or cache_these.difference(metadata):
                                cache_data = {}
                                if cache_valid and metadata:
                                        cache_data.update(metadata)
                                for aux_key in cache_these:
                                        cache_data[aux_key] = mydata[aux_key]
-                               self._aux_cache["packages"][mycpv] = (mydir_mtime, cache_data)
+                               self._aux_cache["packages"][_unicode(mycpv)] = \
+                                       (mydir_mtime, cache_data)
                                self._aux_cache["modified"].add(mycpv)
 
-               if _slot_re.match(mydata['SLOT']) is None:
+               eapi_attrs = _get_eapi_attrs(mydata['EAPI'])
+               if _get_slot_re(eapi_attrs).match(mydata['SLOT']) is None:
                        # Empty or invalid slot triggers InvalidAtom exceptions when
                        # generating slot atoms for packages, so translate it to '0' here.
                        mydata['SLOT'] = _unicode_decode('0')
@@ -688,10 +728,11 @@ class vardbapi(dbapi):
                                        raise
                if not stat.S_ISDIR(st.st_mode):
                        raise KeyError(mycpv)
-               results = []
+               results = {}
+               env_keys = []
                for x in wants:
                        if x == "_mtime_":
-                               results.append(st[stat.ST_MTIME])
+                               results[x] = st[stat.ST_MTIME]
                                continue
                        try:
                                myf = io.open(
@@ -703,16 +744,103 @@ class vardbapi(dbapi):
                                        myd = myf.read()
                                finally:
                                        myf.close()
-                               # Preserve \n for metadata that is known to
-                               # contain multiple lines.
-                               if self._aux_multi_line_re.match(x) is None:
-                                       myd = " ".join(myd.split())
                        except IOError:
+                               if x not in self._aux_cache_keys and \
+                                       self._aux_cache_keys_re.match(x) is None:
+                                       env_keys.append(x)
+                                       continue
                                myd = _unicode_decode('')
-                       if x == "EAPI" and not myd:
-                               results.append(_unicode_decode('0'))
-                       else:
-                               results.append(myd)
+
+                       # Preserve \n for metadata that is known to
+                       # contain multiple lines.
+                       if self._aux_multi_line_re.match(x) is None:
+                               myd = " ".join(myd.split())
+
+                       results[x] = myd
+
+               if env_keys:
+                       env_results = self._aux_env_search(mycpv, env_keys)
+                       for k in env_keys:
+                               v = env_results.get(k)
+                               if v is None:
+                                       v = _unicode_decode('')
+                               if self._aux_multi_line_re.match(k) is None:
+                                       v = " ".join(v.split())
+                               results[k] = v
+
+               if results.get("EAPI") == "":
+                       results[_unicode_decode("EAPI")] = _unicode_decode('0')
+
+               return results
+
+       def _aux_env_search(self, cpv, variables):
+               """
+               Search environment.bz2 for the specified variables. Returns
+               a dict mapping variables to values, and any variables not
+               found in the environment will not be included in the dict.
+               This is useful for querying variables like ${SRC_URI} and
+               ${A}, which are not saved in separate files but are available
+               in environment.bz2 (see bug #395463).
+               """
+               env_file = self.getpath(cpv, filename="environment.bz2")
+               if not os.path.isfile(env_file):
+                       return {}
+               bunzip2_cmd = portage.util.shlex_split(
+                       self.settings.get("PORTAGE_BUNZIP2_COMMAND", ""))
+               if not bunzip2_cmd:
+                       bunzip2_cmd = portage.util.shlex_split(
+                               self.settings["PORTAGE_BZIP2_COMMAND"])
+                       bunzip2_cmd.append("-d")
+               args = bunzip2_cmd + ["-c", env_file]
+               try:
+                       proc = subprocess.Popen(args, stdout=subprocess.PIPE)
+               except EnvironmentError as e:
+                       if e.errno != errno.ENOENT:
+                               raise
+                       raise portage.exception.CommandNotFound(args[0])
+
+               # Parts of the following code are borrowed from
+               # filter-bash-environment.py (keep them in sync).
+               var_assign_re = re.compile(r'(^|^declare\s+-\S+\s+|^declare\s+|^export\s+)([^=\s]+)=("|\')?(.*)$')
+               close_quote_re = re.compile(r'(\\"|"|\')\s*$')
+               def have_end_quote(quote, line):
+                       close_quote_match = close_quote_re.search(line)
+                       return close_quote_match is not None and \
+                               close_quote_match.group(1) == quote
+
+               variables = frozenset(variables)
+               results = {}
+               for line in proc.stdout:
+                       line = _unicode_decode(line,
+                               encoding=_encodings['content'], errors='replace')
+                       var_assign_match = var_assign_re.match(line)
+                       if var_assign_match is not None:
+                               key = var_assign_match.group(2)
+                               quote = var_assign_match.group(3)
+                               if quote is not None:
+                                       if have_end_quote(quote,
+                                               line[var_assign_match.end(2)+2:]):
+                                               value = var_assign_match.group(4)
+                                       else:
+                                               value = [var_assign_match.group(4)]
+                                               for line in proc.stdout:
+                                                       line = _unicode_decode(line,
+                                                               encoding=_encodings['content'],
+                                                               errors='replace')
+                                                       value.append(line)
+                                                       if have_end_quote(quote, line):
+                                                               break
+                                               value = ''.join(value)
+                                       # remove trailing quote and whitespace
+                                       value = value.rstrip()[:-1]
+                               else:
+                                       value = var_assign_match.group(4).rstrip()
+
+                               if key in variables:
+                                       results[key] = value
+
+               proc.wait()
+               proc.stdout.close()
                return results
 
        def aux_update(self, cpv, values):
@@ -758,8 +886,7 @@ class vardbapi(dbapi):
 
                @param myroot: ignored, self._eroot is used instead
                """
-               myroot = None
-               new_vdb = False
+               del myroot
                counter = -1
                try:
                        cfile = io.open(
@@ -768,8 +895,9 @@ class vardbapi(dbapi):
                                mode='r', encoding=_encodings['repo.content'],
                                errors='replace')
                except EnvironmentError as e:
-                       new_vdb = not bool(self.cpv_all())
-                       if not new_vdb:
+                       # Silently allow ENOENT since files under
+                       # /var/cache/ are allowed to disappear.
+                       if e.errno != errno.ENOENT:
                                writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
                                        self._counter_path, noiselevel=-1)
                                writemsg("!!! %s\n" % str(e), noiselevel=-1)
@@ -806,10 +934,6 @@ class vardbapi(dbapi):
                                if pkg_counter > max_counter:
                                        max_counter = pkg_counter
 
-               if counter < 0 and not new_vdb:
-                       writemsg(_("!!! Initializing COUNTER to " \
-                               "value of %d\n") % max_counter, noiselevel=-1)
-
                return max_counter + 1
 
        def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
@@ -823,7 +947,7 @@ class vardbapi(dbapi):
                @param myroot: ignored, self._eroot is used instead
                @param mycpv: ignored
                @rtype: int
-               @returns: new counter value
+               @return: new counter value
                """
                myroot = None
                mycpv = None
@@ -959,7 +1083,7 @@ class vardbapi(dbapi):
                                counter = int(counter)
                        except ValueError:
                                counter = 0
-                       return (cpv, counter, mtime)
+                       return (_unicode(cpv), counter, mtime)
 
        class _owners_db(object):
 
@@ -1149,24 +1273,38 @@ class vardbapi(dbapi):
 
 class vartree(object):
        "this tree will scan a var/db/pkg database located at root (passed to init)"
-       def __init__(self, root=None, virtual=None, categories=None,
+       def __init__(self, root=None, virtual=DeprecationWarning, categories=None,
                settings=None):
 
                if settings is None:
                        settings = portage.settings
-               self.root = settings['ROOT']
 
-               if root is not None and root != self.root:
-                       warnings.warn("The 'root' parameter of the " + \
-                               "portage.dbapi.vartree.vartree" + \
-                               " constructor is now unused. Use " + \
+               if root is not None and root != settings['ROOT']:
+                       warnings.warn("The 'root' parameter of the "
+                               "portage.dbapi.vartree.vartree"
+                               " constructor is now unused. Use "
                                "settings['ROOT'] instead.",
                                DeprecationWarning, stacklevel=2)
 
+               if virtual is not DeprecationWarning:
+                       warnings.warn("The 'virtual' parameter of the "
+                               "portage.dbapi.vartree.vartree"
+                               " constructor is unused",
+                               DeprecationWarning, stacklevel=2)
+
                self.settings = settings
                self.dbapi = vardbapi(settings=settings, vartree=self)
                self.populated = 1
 
+       @property
+       def root(self):
+               warnings.warn("The root attribute of "
+                       "portage.dbapi.vartree.vartree"
+                       " is deprecated. Use "
+                       "settings['ROOT'] instead.",
+                       DeprecationWarning, stacklevel=3)
+               return self.settings['ROOT']
+
        def getpath(self, mykey, filename=None):
                return self.dbapi.getpath(mykey, filename=filename)
 
@@ -1276,6 +1414,20 @@ class dblink(object):
                r')$'
        )
 
+       # These files are generated by emerge, so we need to remove
+       # them when they are the only thing left in a directory.
+       _infodir_cleanup = frozenset(["dir", "dir.old"])
+
+       _ignored_unlink_errnos = (
+               errno.EBUSY, errno.ENOENT,
+               errno.ENOTDIR, errno.EISDIR)
+
+       _ignored_rmdir_errnos = (
+               errno.EEXIST, errno.ENOTEMPTY,
+               errno.EBUSY, errno.ENOENT,
+               errno.ENOTDIR, errno.EISDIR,
+               errno.EPERM)
+
        def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
                vartree=None, blockers=None, scheduler=None, pipe=None):
                """
@@ -1300,22 +1452,23 @@ class dblink(object):
                        raise TypeError("settings argument is required")
 
                mysettings = settings
-               myroot = settings['ROOT']
+               self._eroot = mysettings['EROOT']
                self.cat = cat
                self.pkg = pkg
                self.mycpv = self.cat + "/" + self.pkg
-               self.mysplit = list(catpkgsplit(self.mycpv)[1:])
-               self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0])
+               if self.mycpv == settings.mycpv and \
+                       isinstance(settings.mycpv, _pkg_str):
+                       self.mycpv = settings.mycpv
+               else:
+                       self.mycpv = _pkg_str(self.mycpv)
+               self.mysplit = list(self.mycpv.cpv_split[1:])
+               self.mysplit[0] = self.mycpv.cp
                self.treetype = treetype
                if vartree is None:
-                       vartree = portage.db[myroot]["vartree"]
+                       vartree = portage.db[self._eroot]["vartree"]
                self.vartree = vartree
                self._blockers = blockers
                self._scheduler = scheduler
-
-               # WARNING: EROOT support is experimental and may be incomplete
-               # for cases in which EPREFIX is non-empty.
-               self._eroot = mysettings['EROOT']
                self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
                self.dbcatdir = self.dbroot+"/"+cat
                self.dbpkgdir = self.dbcatdir+"/"+pkg
@@ -1324,14 +1477,14 @@ class dblink(object):
                self.settings = mysettings
                self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
 
-               self.myroot=myroot
+               self.myroot = self.settings['ROOT']
                self._installed_instance = None
                self.contentscache = None
                self._contents_inodes = None
                self._contents_basenames = None
                self._linkmap_broken = False
-               self._md5_merge_map = {}
-               self._hash_key = (self.myroot, self.mycpv)
+               self._hardlink_merge_map = {}
+               self._hash_key = (self._eroot, self.mycpv)
                self._protect_obj = None
                self._pipe = pipe
 
@@ -1610,7 +1763,7 @@ class dblink(object):
                        PreservedLibsRegistry yet.
                @type preserve_paths: set
                @rtype: Integer
-               @returns:
+               @return:
                1. os.EX_OK if everything went well.
                2. return code of the failed phase (for prerm, postrm, cleanrm)
                """
@@ -1642,6 +1795,11 @@ class dblink(object):
                showMessage = self._display_merge
                if self.vartree.dbapi._categories is not None:
                        self.vartree.dbapi._categories = None
+
+               # When others_in_slot is not None, the backup has already been
+               # handled by the caller.
+               caller_handles_backup = others_in_slot is not None
+
                # When others_in_slot is supplied, the security check has already been
                # done for this slot, so it shouldn't be repeated until the next
                # replacement or unmerge operation.
@@ -1690,9 +1848,6 @@ class dblink(object):
                except UnsupportedAPIException as e:
                        eapi_unsupported = e
 
-               self._prune_plib_registry(unmerge=True, needed=needed,
-                       preserve_paths=preserve_paths)
-
                builddir_lock = None
                scheduler = self._scheduler
                retval = os.EX_OK
@@ -1707,6 +1862,19 @@ class dblink(object):
                                prepare_build_dirs(settings=self.settings, cleanup=True)
                                log_path = self.settings.get("PORTAGE_LOG_FILE")
 
+                       # Do this before the following _prune_plib_registry call, since
+                       # that removes preserved libraries from our CONTENTS, and we
+                       # may want to backup those libraries first.
+                       if not caller_handles_backup:
+                               retval = self._pre_unmerge_backup(background)
+                               if retval != os.EX_OK:
+                                       showMessage(_("!!! FAILED prerm: quickpkg: %s\n") % retval,
+                                               level=logging.ERROR, noiselevel=-1)
+                                       return retval
+
+                       self._prune_plib_registry(unmerge=True, needed=needed,
+                               preserve_paths=preserve_paths)
+
                        # Log the error after PORTAGE_LOG_FILE is initialized
                        # by prepare_build_dirs above.
                        if eapi_unsupported:
@@ -1839,16 +2007,19 @@ class dblink(object):
                else:
                        self.settings.pop("PORTAGE_LOG_FILE", None)
 
-               # Lock the config memory file to prevent symlink creation
-               # in merge_contents from overlapping with env-update.
-               self.vartree.dbapi._fs_lock()
-               try:
-                       env_update(target_root=self.settings['ROOT'],
-                               prev_mtimes=ldpath_mtimes,
-                               contents=contents, env=self.settings,
-                               writemsg_level=self._display_merge)
-               finally:
-                       self.vartree.dbapi._fs_unlock()
+               env_update(target_root=self.settings['ROOT'],
+                       prev_mtimes=ldpath_mtimes,
+                       contents=contents, env=self.settings,
+                       writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
+
+               unmerge_with_replacement = preserve_paths is not None
+               if not unmerge_with_replacement:
+                       # When there's a replacement package which calls us via treewalk,
+                       # treewalk will automatically call _prune_plib_registry for us.
+                       # Otherwise, we need to call _prune_plib_registry ourselves.
+                       # Don't pass in the "unmerge=True" flag here, since that flag
+                       # is intended to be used _prior_ to unmerge, not after.
+                       self._prune_plib_registry()
 
                return os.EX_OK
 
@@ -1871,6 +2042,10 @@ class dblink(object):
                                        log_path=log_path, background=background,
                                        level=level, noiselevel=noiselevel)
 
+       def _show_unmerge(self, zing, desc, file_type, file_name):
+               self._display_merge("%s %s %s %s\n" % \
+                       (zing, desc.ljust(8), file_type, file_name))
+
        def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
                """
                
@@ -1887,6 +2062,9 @@ class dblink(object):
                os = _os_merge
                perf_md5 = perform_md5
                showMessage = self._display_merge
+               show_unmerge = self._show_unmerge
+               ignored_unlink_errnos = self._ignored_unlink_errnos
+               ignored_rmdir_errnos = self._ignored_rmdir_errnos
 
                if not pkgfiles:
                        showMessage(_("No package files given... Grabbing a set.\n"))
@@ -1904,9 +2082,6 @@ class dblink(object):
                                        settings=self.settings,
                                        vartree=self.vartree, treetype="vartree", pipe=self._pipe))
 
-               dest_root = self._eroot
-               dest_root_len = len(dest_root) - 1
-
                cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
                stale_confmem = []
                protected_symlinks = {}
@@ -1922,15 +2097,9 @@ class dblink(object):
 
                        #process symlinks second-to-last, directories last.
                        mydirs = set()
-                       ignored_unlink_errnos = (
-                               errno.EBUSY, errno.ENOENT,
-                               errno.ENOTDIR, errno.EISDIR)
-                       ignored_rmdir_errnos = (
-                               errno.EEXIST, errno.ENOTEMPTY,
-                               errno.EBUSY, errno.ENOENT,
-                               errno.ENOTDIR, errno.EISDIR,
-                               errno.EPERM)
-                       modprotect = os.path.join(self._eroot, "lib/modules/")
+
+                       uninstall_ignore = portage.util.shlex_split(
+                               self.settings.get("UNINSTALL_IGNORE", ""))
 
                        def unlink(file_name, lstatobj):
                                if bsd_chflags:
@@ -1965,10 +2134,6 @@ class dblink(object):
                                                # Restore the parent flags we saved before unlinking
                                                bsd_chflags.chflags(parent_name, pflags)
 
-                       def show_unmerge(zing, desc, file_type, file_name):
-                                       showMessage("%s %s %s %s\n" % \
-                                               (zing, desc.ljust(8), file_type, file_name))
-
                        unmerge_desc = {}
                        unmerge_desc["cfgpro"] = _("cfgpro")
                        unmerge_desc["replaced"] = _("replaced")
@@ -1986,9 +2151,6 @@ class dblink(object):
                        real_root_len = len(real_root) - 1
                        eroot = self.settings["EROOT"]
 
-                       # These files are generated by emerge, so we need to remove
-                       # them when they are the only thing left in a directory.
-                       infodir_cleanup = frozenset(["dir", "dir.old"])
                        infodirs = frozenset(infodir for infodir in chain(
                                self.settings.get("INFOPATH", "").split(":"),
                                self.settings.get("INFODIR", "").split(":")) if infodir)
@@ -2044,6 +2206,24 @@ class dblink(object):
                                if lstatobj is None:
                                                show_unmerge("---", unmerge_desc["!found"], file_type, obj)
                                                continue
+
+                               f_match = obj[len(eroot)-1:]
+                               ignore = False
+                               for pattern in uninstall_ignore:
+                                       if fnmatch.fnmatch(f_match, pattern):
+                                               ignore = True
+                                               break
+
+                               if not ignore:
+                                       if islink and f_match in \
+                                               ("/lib", "/usr/lib", "/usr/local/lib"):
+                                               # Ignore libdir symlinks for bug #423127.
+                                               ignore = True
+
+                               if ignore:
+                                       show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
+                                       continue
+
                                # don't use EROOT, CONTENTS entries already contain EPREFIX
                                if obj.startswith(real_root):
                                        relative_path = obj[real_root_len:]
@@ -2053,8 +2233,9 @@ class dblink(object):
                                                        is_owned = True
                                                        break
 
-                                       if file_type == "sym" and is_owned and \
-                                               (islink and statobj and stat.S_ISDIR(statobj.st_mode)):
+                                       if is_owned and islink and \
+                                               file_type in ("sym", "dir") and \
+                                               statobj and stat.S_ISDIR(statobj.st_mode):
                                                # A new instance of this package claims the file, so
                                                # don't unmerge it. If the file is symlink to a
                                                # directory and the unmerging package installed it as
@@ -2086,18 +2267,6 @@ class dblink(object):
                                                continue
                                        elif relative_path in cfgfiledict:
                                                stale_confmem.append(relative_path)
-                               # next line includes a tweak to protect modules from being unmerged,
-                               # but we don't protect modules from being overwritten if they are
-                               # upgraded. We effectively only want one half of the config protection
-                               # functionality for /lib/modules. For portage-ng both capabilities
-                               # should be able to be independently specified.
-                               # TODO: For rebuilds, re-parent previous modules to the new
-                               # installed instance (so they are not orphans). For normal
-                               # uninstall (not rebuild/reinstall), remove the modules along
-                               # with all other files (leave no orphans).
-                               if obj.startswith(modprotect):
-                                       show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
-                                       continue
 
                                # Don't unlink symlinks to directories here since that can
                                # remove /lib and /usr/lib symlinks.
@@ -2119,12 +2288,12 @@ class dblink(object):
                                        show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
                                        continue
 
-                               if pkgfiles[objkey][0] == "dir":
+                               if file_type == "dir" and not islink:
                                        if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
                                                show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
                                                continue
                                        mydirs.add((obj, (lstatobj.st_dev, lstatobj.st_ino)))
-                               elif pkgfiles[objkey][0] == "sym":
+                               elif file_type == "sym" or (file_type == "dir" and islink):
                                        if not islink:
                                                show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
                                                continue
@@ -2223,83 +2392,22 @@ class dblink(object):
                                elif pkgfiles[objkey][0] == "dev":
                                        show_unmerge("---", "", file_type, obj)
 
-                       mydirs = sorted(mydirs)
-                       mydirs.reverse()
+                       self._unmerge_dirs(mydirs, infodirs_inodes,
+                               protected_symlinks, unmerge_desc, unlink, os)
+                       mydirs.clear()
 
-                       for obj, inode_key in mydirs:
-                               # Treat any directory named "info" as a candidate here,
-                               # since it might have been in INFOPATH previously even
-                               # though it may not be there now.
-                               if inode_key in infodirs_inodes or \
-                                       os.path.basename(obj) == "info":
-                                       try:
-                                               remaining = os.listdir(obj)
-                                       except OSError:
-                                               pass
-                                       else:
-                                               cleanup_info_dir = ()
-                                               if remaining and \
-                                                       len(remaining) <= len(infodir_cleanup):
-                                                       if not set(remaining).difference(infodir_cleanup):
-                                                               cleanup_info_dir = remaining
-
-                                               for child in cleanup_info_dir:
-                                                       child = os.path.join(obj, child)
-                                                       try:
-                                                               lstatobj = os.lstat(child)
-                                                               if stat.S_ISREG(lstatobj.st_mode):
-                                                                       unlink(child, lstatobj)
-                                                                       show_unmerge("<<<", "", "obj", child)
-                                                       except EnvironmentError as e:
-                                                               if e.errno not in ignored_unlink_errnos:
-                                                                       raise
-                                                               del e
-                                                               show_unmerge("!!!", "", "obj", child)
-                               try:
-                                       if bsd_chflags:
-                                               lstatobj = os.lstat(obj)
-                                               if lstatobj.st_flags != 0:
-                                                       bsd_chflags.lchflags(obj, 0)
-                                               parent_name = os.path.dirname(obj)
-                                               # Use normal stat/chflags for the parent since we want to
-                                               # follow any symlinks to the real parent directory.
-                                               pflags = os.stat(parent_name).st_flags
-                                               if pflags != 0:
-                                                       bsd_chflags.chflags(parent_name, 0)
-                                       try:
-                                               os.rmdir(obj)
-                                       finally:
-                                               if bsd_chflags and pflags != 0:
-                                                       # Restore the parent flags we saved before unlinking
-                                                       bsd_chflags.chflags(parent_name, pflags)
-                                       show_unmerge("<<<", "", "dir", obj)
-                               except EnvironmentError as e:
-                                       if e.errno not in ignored_rmdir_errnos:
-                                               raise
-                                       if e.errno != errno.ENOENT:
-                                               show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
-                                       del e
-                               else:
-                                       # When a directory is successfully removed, there's
-                                       # no need to protect symlinks that point to it.
-                                       unmerge_syms = protected_symlinks.pop(inode_key, None)
-                                       if unmerge_syms is not None:
-                                               for relative_path in unmerge_syms:
-                                                       obj = os.path.join(real_root,
-                                                               relative_path.lstrip(os.sep))
-                                                       try:
-                                                               unlink(obj, os.lstat(obj))
-                                                               show_unmerge("<<<", "", "sym", obj)
-                                                       except (OSError, IOError) as e:
-                                                               if e.errno not in ignored_unlink_errnos:
-                                                                       raise
-                                                               del e
-                                                               show_unmerge("!!!", "", "sym", obj)
+               if protected_symlinks:
+                       self._unmerge_protected_symlinks(others_in_slot, infodirs_inodes,
+                               protected_symlinks, unmerge_desc, unlink, os)
 
                if protected_symlinks:
                        msg = "One or more symlinks to directories have been " + \
                                "preserved in order to ensure that files installed " + \
-                               "via these symlinks remain accessible:"
+                               "via these symlinks remain accessible. " + \
+                               "This indicates that the mentioned symlink(s) may " + \
+                               "be obsolete remnants of an old install, and it " + \
+                               "may be appropriate to replace a given symlink " + \
+                               "with the directory that it points to."
                        lines = textwrap.wrap(msg, 72)
                        lines.append("")
                        flat_list = set()
@@ -2309,7 +2417,7 @@ class dblink(object):
                                lines.append("\t%s" % (os.path.join(real_root,
                                        f.lstrip(os.sep))))
                        lines.append("")
-                       self._elog("eerror", "postrm", lines)
+                       self._elog("elog", "postrm", lines)
 
                # Remove stale entries from config memory.
                if stale_confmem:
@@ -2320,6 +2428,168 @@ class dblink(object):
                #remove self from vartree database so that our own virtual gets zapped if we're the last node
                self.vartree.zap(self.mycpv)
 
+       def _unmerge_protected_symlinks(self, others_in_slot, infodirs_inodes,
+               protected_symlinks, unmerge_desc, unlink, os):
+
+               real_root = self.settings['ROOT']
+               show_unmerge = self._show_unmerge
+               ignored_unlink_errnos = self._ignored_unlink_errnos
+
+               flat_list = set()
+               flat_list.update(*protected_symlinks.values())
+               flat_list = sorted(flat_list)
+
+               for f in flat_list:
+                       for dblnk in others_in_slot:
+                               if dblnk.isowner(f):
+                                       # If another package in the same slot installed
+                                       # a file via a protected symlink, return early
+                                       # and don't bother searching for any other owners.
+                                       return
+
+               msg = []
+               msg.append("")
+               msg.append(_("Directory symlink(s) may need protection:"))
+               msg.append("")
+
+               for f in flat_list:
+                       msg.append("\t%s" % \
+                               os.path.join(real_root, f.lstrip(os.path.sep)))
+
+               msg.append("")
+               msg.append(_("Searching all installed"
+                       " packages for files installed via above symlink(s)..."))
+               msg.append("")
+               self._elog("elog", "postrm", msg)
+
+               self.lockdb()
+               try:
+                       owners = self.vartree.dbapi._owners.get_owners(flat_list)
+                       self.vartree.dbapi.flush_cache()
+               finally:
+                       self.unlockdb()
+
+               for owner in list(owners):
+                       if owner.mycpv == self.mycpv:
+                               owners.pop(owner, None)
+
+               if not owners:
+                       msg = []
+                       msg.append(_("The above directory symlink(s) are all "
+                               "safe to remove. Removing them now..."))
+                       msg.append("")
+                       self._elog("elog", "postrm", msg)
+                       dirs = set()
+                       for unmerge_syms in protected_symlinks.values():
+                               for relative_path in unmerge_syms:
+                                       obj = os.path.join(real_root,
+                                               relative_path.lstrip(os.sep))
+                                       parent = os.path.dirname(obj)
+                                       while len(parent) > len(self._eroot):
+                                               try:
+                                                       lstatobj = os.lstat(parent)
+                                               except OSError:
+                                                       break
+                                               else:
+                                                       dirs.add((parent,
+                                                               (lstatobj.st_dev, lstatobj.st_ino)))
+                                                       parent = os.path.dirname(parent)
+                                       try:
+                                               unlink(obj, os.lstat(obj))
+                                               show_unmerge("<<<", "", "sym", obj)
+                                       except (OSError, IOError) as e:
+                                               if e.errno not in ignored_unlink_errnos:
+                                                       raise
+                                               del e
+                                               show_unmerge("!!!", "", "sym", obj)
+
+                       protected_symlinks.clear()
+                       self._unmerge_dirs(dirs, infodirs_inodes,
+                               protected_symlinks, unmerge_desc, unlink, os)
+                       dirs.clear()
+
+       def _unmerge_dirs(self, dirs, infodirs_inodes,
+               protected_symlinks, unmerge_desc, unlink, os):
+
+               show_unmerge = self._show_unmerge
+               infodir_cleanup = self._infodir_cleanup
+               ignored_unlink_errnos = self._ignored_unlink_errnos
+               ignored_rmdir_errnos = self._ignored_rmdir_errnos
+               real_root = self.settings['ROOT']
+
+               dirs = sorted(dirs)
+               dirs.reverse()
+
+               for obj, inode_key in dirs:
+                       # Treat any directory named "info" as a candidate here,
+                       # since it might have been in INFOPATH previously even
+                       # though it may not be there now.
+                       if inode_key in infodirs_inodes or \
+                               os.path.basename(obj) == "info":
+                               try:
+                                       remaining = os.listdir(obj)
+                               except OSError:
+                                       pass
+                               else:
+                                       cleanup_info_dir = ()
+                                       if remaining and \
+                                               len(remaining) <= len(infodir_cleanup):
+                                               if not set(remaining).difference(infodir_cleanup):
+                                                       cleanup_info_dir = remaining
+
+                                       for child in cleanup_info_dir:
+                                               child = os.path.join(obj, child)
+                                               try:
+                                                       lstatobj = os.lstat(child)
+                                                       if stat.S_ISREG(lstatobj.st_mode):
+                                                               unlink(child, lstatobj)
+                                                               show_unmerge("<<<", "", "obj", child)
+                                               except EnvironmentError as e:
+                                                       if e.errno not in ignored_unlink_errnos:
+                                                               raise
+                                                       del e
+                                                       show_unmerge("!!!", "", "obj", child)
+                       try:
+                               if bsd_chflags:
+                                       lstatobj = os.lstat(obj)
+                                       if lstatobj.st_flags != 0:
+                                               bsd_chflags.lchflags(obj, 0)
+                                       parent_name = os.path.dirname(obj)
+                                       # Use normal stat/chflags for the parent since we want to
+                                       # follow any symlinks to the real parent directory.
+                                       pflags = os.stat(parent_name).st_flags
+                                       if pflags != 0:
+                                               bsd_chflags.chflags(parent_name, 0)
+                               try:
+                                       os.rmdir(obj)
+                               finally:
+                                       if bsd_chflags and pflags != 0:
+                                               # Restore the parent flags we saved before unlinking
+                                               bsd_chflags.chflags(parent_name, pflags)
+                               show_unmerge("<<<", "", "dir", obj)
+                       except EnvironmentError as e:
+                               if e.errno not in ignored_rmdir_errnos:
+                                       raise
+                               if e.errno != errno.ENOENT:
+                                       show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
+                               del e
+                       else:
+                               # When a directory is successfully removed, there's
+                               # no need to protect symlinks that point to it.
+                               unmerge_syms = protected_symlinks.pop(inode_key, None)
+                               if unmerge_syms is not None:
+                                       for relative_path in unmerge_syms:
+                                               obj = os.path.join(real_root,
+                                                       relative_path.lstrip(os.sep))
+                                               try:
+                                                       unlink(obj, os.lstat(obj))
+                                                       show_unmerge("<<<", "", "sym", obj)
+                                               except (OSError, IOError) as e:
+                                                       if e.errno not in ignored_unlink_errnos:
+                                                               raise
+                                                       del e
+                                                       show_unmerge("!!!", "", "sym", obj)
+
        def isowner(self, filename, destroot=None):
                """ 
                Check if a file belongs to this package. This may
@@ -2335,7 +2605,7 @@ class dblink(object):
                @param destroot:
                @type destroot:
                @rtype: Boolean
-               @returns:
+               @return:
                1. True if this package owns the file.
                2. False if this package does not own the file.
                """
@@ -2864,9 +3134,13 @@ class dblink(object):
 
                        os = _os_merge
 
-                       collision_ignore = set([normalize_path(myignore) for myignore in \
-                               portage.util.shlex_split(
-                               self.settings.get("COLLISION_IGNORE", ""))])
+                       collision_ignore = []
+                       for x in portage.util.shlex_split(
+                               self.settings.get("COLLISION_IGNORE", "")):
+                               if os.path.isdir(os.path.join(self._eroot, x.lstrip(os.sep))):
+                                       x = normalize_path(x)
+                                       x += "/*"
+                               collision_ignore.append(x)
 
                        # For collisions with preserved libraries, the current package
                        # will assume ownership and the libraries will be unregistered.
@@ -2967,15 +3241,12 @@ class dblink(object):
                                if not isowned and self.isprotected(full_path):
                                        isowned = True
                                if not isowned:
+                                       f_match = full_path[len(self._eroot)-1:]
                                        stopmerge = True
-                                       if collision_ignore:
-                                               if f in collision_ignore:
+                                       for pattern in collision_ignore:
+                                               if fnmatch.fnmatch(f_match, pattern):
                                                        stopmerge = False
-                                               else:
-                                                       for myignore in collision_ignore:
-                                                               if f.startswith(myignore + os.path.sep):
-                                                                       stopmerge = False
-                                                                       break
+                                                       break
                                        if stopmerge:
                                                collisions.append(f)
                        return collisions, symlink_collisions, plib_collisions
@@ -3128,9 +3399,10 @@ class dblink(object):
                                        if isinstance(lines, basestring):
                                                lines = [lines]
                                        for line in lines:
-                                               fields = (funcname, phase, cpv, line.rstrip('\n'))
-                                               str_buffer.append(' '.join(fields))
-                                               str_buffer.append('\n')
+                                               for line in line.split('\n'):
+                                                       fields = (funcname, phase, cpv, line)
+                                                       str_buffer.append(' '.join(fields))
+                                                       str_buffer.append('\n')
                        if str_buffer:
                                os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
 
@@ -3164,7 +3436,7 @@ class dblink(object):
                @param prev_mtimes: { Filename:mtime } mapping for env_update
                @type prev_mtimes: Dictionary
                @rtype: Boolean
-               @returns:
+               @return:
                1. 0 on success
                2. 1 on failure
                
@@ -3238,6 +3510,10 @@ class dblink(object):
                if not os.path.exists(self.dbcatdir):
                        ensure_dirs(self.dbcatdir)
 
+               # NOTE: We use SLOT obtained from the inforoot
+               #       directory, in order to support USE=multislot.
+               # Use _pkg_str discard the sub-slot part if necessary.
+               slot = _pkg_str(self.mycpv, slot=slot).slot
                cp = self.mysplit[0]
                slot_atom = "%s:%s" % (cp, slot)
 
@@ -3278,22 +3554,49 @@ class dblink(object):
                                        max_dblnk = dblnk
                        self._installed_instance = max_dblnk
 
+               if self.settings.get("INSTALL_MASK") or \
+                       "nodoc" in self.settings.features or \
+                       "noinfo" in self.settings.features or \
+                       "noman" in self.settings.features:
+                       # Apply INSTALL_MASK before collision-protect, since it may
+                       # be useful to avoid collisions in some scenarios.
+                       phase = MiscFunctionsProcess(background=False,
+                               commands=["preinst_mask"], phase="preinst",
+                               scheduler=self._scheduler, settings=self.settings)
+                       phase.start()
+                       phase.wait()
+
                # We check for unicode encoding issues after src_install. However,
                # the check must be repeated here for binary packages (it's
                # inexpensive since we call os.walk() here anyway).
                unicode_errors = []
+               line_ending_re = re.compile('[\n\r]')
+               srcroot_len = len(srcroot)
+               ed_len = len(self.settings["ED"])
 
                while True:
 
                        unicode_error = False
+                       eagain_error = False
 
                        myfilelist = []
                        mylinklist = []
                        paths_with_newlines = []
-                       srcroot_len = len(srcroot)
                        def onerror(e):
                                raise
-                       for parent, dirs, files in os.walk(srcroot, onerror=onerror):
+                       walk_iter = os.walk(srcroot, onerror=onerror)
+                       while True:
+                               try:
+                                       parent, dirs, files = next(walk_iter)
+                               except StopIteration:
+                                       break
+                               except OSError as e:
+                                       if e.errno != errno.EAGAIN:
+                                               raise
+                                       # Observed with PyPy 1.8.
+                                       eagain_error = True
+                                       break
+
                                try:
                                        parent = _unicode_decode(parent,
                                                encoding=_encodings['merge'], errors='strict')
@@ -3301,12 +3604,12 @@ class dblink(object):
                                        new_parent = _unicode_decode(parent,
                                                encoding=_encodings['merge'], errors='replace')
                                        new_parent = _unicode_encode(new_parent,
-                                               encoding=_encodings['merge'], errors='backslashreplace')
+                                               encoding='ascii', errors='backslashreplace')
                                        new_parent = _unicode_decode(new_parent,
                                                encoding=_encodings['merge'], errors='replace')
                                        os.rename(parent, new_parent)
                                        unicode_error = True
-                                       unicode_errors.append(new_parent[srcroot_len:])
+                                       unicode_errors.append(new_parent[ed_len:])
                                        break
 
                                for fname in files:
@@ -3319,13 +3622,13 @@ class dblink(object):
                                                new_fname = _unicode_decode(fname,
                                                        encoding=_encodings['merge'], errors='replace')
                                                new_fname = _unicode_encode(new_fname,
-                                                       encoding=_encodings['merge'], errors='backslashreplace')
+                                                       encoding='ascii', errors='backslashreplace')
                                                new_fname = _unicode_decode(new_fname,
                                                        encoding=_encodings['merge'], errors='replace')
                                                new_fpath = os.path.join(parent, new_fname)
                                                os.rename(fpath, new_fpath)
                                                unicode_error = True
-                                               unicode_errors.append(new_fpath[srcroot_len:])
+                                               unicode_errors.append(new_fpath[ed_len:])
                                                fname = new_fname
                                                fpath = new_fpath
                                        else:
@@ -3333,7 +3636,7 @@ class dblink(object):
 
                                        relative_path = fpath[srcroot_len:]
 
-                                       if "\n" in relative_path:
+                                       if line_ending_re.search(relative_path) is not None:
                                                paths_with_newlines.append(relative_path)
 
                                        file_mode = os.lstat(fpath).st_mode
@@ -3348,19 +3651,20 @@ class dblink(object):
                                if unicode_error:
                                        break
 
-                       if not unicode_error:
+                       if not (unicode_error or eagain_error):
                                break
 
                if unicode_errors:
-                       eerror(_merge_unicode_error(unicode_errors))
+                       self._elog("eqawarn", "preinst",
+                               _merge_unicode_error(unicode_errors))
 
                if paths_with_newlines:
                        msg = []
-                       msg.append(_("This package installs one or more files containing a newline (\\n) character:"))
+                       msg.append(_("This package installs one or more files containing line ending characters:"))
                        msg.append("")
                        paths_with_newlines.sort()
                        for f in paths_with_newlines:
-                               msg.append("\t/%s" % (f.replace("\n", "\\n")))
+                               msg.append("\t/%s" % (f.replace("\n", "\\n").replace("\r", "\\r")))
                        msg.append("")
                        msg.append(_("package %s NOT merged") % self.mycpv)
                        msg.append("")
@@ -3402,14 +3706,6 @@ class dblink(object):
                        if installed_files:
                                return 1
 
-               # check for package collisions
-               blockers = self._blockers
-               if blockers is None:
-                       blockers = []
-               collisions, symlink_collisions, plib_collisions = \
-                       self._collision_protect(srcroot, destroot,
-                       others_in_slot + blockers, myfilelist, mylinklist)
-
                # Make sure the ebuild environment is initialized and that ${T}/elog
                # exists for logging of collision-protect eerror messages.
                if myebuild is None:
@@ -3421,6 +3717,29 @@ class dblink(object):
                        for other in others_in_slot])
                prepare_build_dirs(settings=self.settings, cleanup=cleanup)
 
+               # check for package collisions
+               blockers = self._blockers
+               if blockers is None:
+                       blockers = []
+               collisions, symlink_collisions, plib_collisions = \
+                       self._collision_protect(srcroot, destroot,
+                       others_in_slot + blockers, myfilelist, mylinklist)
+
+               if symlink_collisions:
+                       # Symlink collisions need to be distinguished from other types
+                       # of collisions, in order to avoid confusion (see bug #409359).
+                       msg = _("Package '%s' has one or more collisions "
+                               "between symlinks and directories, which is explicitly "
+                               "forbidden by PMS section 13.4 (see bug #326685):") % \
+                               (self.settings.mycpv,)
+                       msg = textwrap.wrap(msg, 70)
+                       msg.append("")
+                       for f in symlink_collisions:
+                               msg.append("\t%s" % os.path.join(destroot,
+                                       f.lstrip(os.path.sep)))
+                       msg.append("")
+                       self._elog("eerror", "preinst", msg)
+
                if collisions:
                        collision_protect = "collision-protect" in self.settings.features
                        protect_owned = "protect-owned" in self.settings.features
@@ -3502,12 +3821,20 @@ class dblink(object):
                                        eerror([_("None of the installed"
                                                " packages claim the file(s)."), ""])
 
+                       symlink_abort_msg =_("Package '%s' NOT merged since it has "
+                               "one or more collisions between symlinks and directories, "
+                               "which is explicitly forbidden by PMS section 13.4 "
+                               "(see bug #326685).")
+
                        # The explanation about the collision and how to solve
                        # it may not be visible via a scrollback buffer, especially
                        # if the number of file collisions is large. Therefore,
                        # show a summary at the end.
                        abort = False
-                       if collision_protect:
+                       if symlink_collisions:
+                               abort = True
+                               msg = symlink_abort_msg % (self.settings.mycpv,)
+                       elif collision_protect:
                                abort = True
                                msg = _("Package '%s' NOT merged due to file collisions.") % \
                                        self.settings.mycpv
@@ -3515,12 +3842,6 @@ class dblink(object):
                                abort = True
                                msg = _("Package '%s' NOT merged due to file collisions.") % \
                                        self.settings.mycpv
-                       elif symlink_collisions:
-                               abort = True
-                               msg = _("Package '%s' NOT merged due to collision " + \
-                               "between a symlink and a directory which is explicitly " + \
-                               "forbidden by PMS (see bug #326685).") % \
-                               (self.settings.mycpv,)
                        else:
                                msg = _("Package '%s' merged despite file collisions.") % \
                                        self.settings.mycpv
@@ -3545,6 +3866,20 @@ class dblink(object):
                self.delete()
                ensure_dirs(self.dbtmpdir)
 
+               downgrade = False
+               if self._installed_instance is not None and \
+                       vercmp(self.mycpv.version,
+                       self._installed_instance.mycpv.version) < 0:
+                       downgrade = True
+
+               if self._installed_instance is not None:
+                       rval = self._pre_merge_backup(self._installed_instance, downgrade)
+                       if rval != os.EX_OK:
+                               showMessage(_("!!! FAILED preinst: ") +
+                                       "quickpkg: %s\n" % rval,
+                                       level=logging.ERROR, noiselevel=-1)
+                               return rval
+
                # run preinst script
                showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % \
                        {"cpv":self.mycpv, "destroot":destroot})
@@ -3578,21 +3913,15 @@ class dblink(object):
                #if we have a file containing previously-merged config file md5sums, grab it.
                self.vartree.dbapi._fs_lock()
                try:
+                       # Always behave like --noconfmem is enabled for downgrades
+                       # so that people who don't know about this option are less
+                       # likely to get confused when doing upgrade/downgrade cycles.
                        cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
-                       if "NOCONFMEM" in self.settings:
+                       if "NOCONFMEM" in self.settings or downgrade:
                                cfgfiledict["IGNORE"]=1
                        else:
                                cfgfiledict["IGNORE"]=0
 
-                       # Always behave like --noconfmem is enabled for downgrades
-                       # so that people who don't know about this option are less
-                       # likely to get confused when doing upgrade/downgrade cycles.
-                       pv_split = catpkgsplit(self.mycpv)[1:]
-                       for other in others_in_slot:
-                               if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0:
-                                       cfgfiledict["IGNORE"] = 1
-                                       break
-
                        rval = self._merge_contents(srcroot, destroot, cfgfiledict)
                        if rval != os.EX_OK:
                                return rval
@@ -3808,17 +4137,11 @@ class dblink(object):
                        showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
                                level=logging.ERROR, noiselevel=-1)
 
-               # Lock the config memory file to prevent symlink creation
-               # in merge_contents from overlapping with env-update.
-               self.vartree.dbapi._fs_lock()
-               try:
-                       #update environment settings, library paths. DO NOT change symlinks.
-                       env_update(
-                               target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
-                               contents=contents, env=self.settings,
-                               writemsg_level=self._display_merge)
-               finally:
-                       self.vartree.dbapi._fs_unlock()
+               #update environment settings, library paths. DO NOT change symlinks.
+               env_update(
+                       target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
+                       contents=contents, env=self.settings,
+                       writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
 
                # For gcc upgrades, preserved libs have to be removed after the
                # the library path has been updated.
@@ -3939,10 +4262,11 @@ class dblink(object):
                @type stufftomerge: String or List
                @param cfgfiledict: { File:mtime } mapping for config_protected files
                @type cfgfiledict: Dictionary
-               @param thismtime: The current time (typically long(time.time())
-               @type thismtime: Long
+               @param thismtime: None or new mtime for merged files (expressed in seconds
+               in Python <3.3 and nanoseconds in Python >=3.3)
+               @type thismtime: None or Int
                @rtype: None or Boolean
-               @returns:
+               @return:
                1. True on failure
                2. None otherwise
                
@@ -3958,6 +4282,10 @@ class dblink(object):
                destroot = normalize_path(destroot).rstrip(sep) + sep
                calc_prelink = "prelink-checksums" in self.settings.features
 
+               protect_if_modified = \
+                       "config-protect-if-modified" in self.settings.features and \
+                       self._installed_instance is not None
+
                # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
                if isinstance(stufftomerge, basestring):
                        #A directory is specified.  Figure out protection paths, listdir() it and process it.
@@ -3991,14 +4319,37 @@ class dblink(object):
 
                        if stat.S_ISLNK(mymode):
                                # we are merging a symbolic link
-                               myabsto = abssymlink(mysrc)
+                               # The file name of mysrc and the actual file that it points to
+                               # will have earlier been forcefully converted to the 'merge'
+                               # encoding if necessary, but the content of the symbolic link
+                               # may need to be forcefully converted here.
+                               myto = _os.readlink(_unicode_encode(mysrc,
+                                       encoding=_encodings['merge'], errors='strict'))
+                               try:
+                                       myto = _unicode_decode(myto,
+                                               encoding=_encodings['merge'], errors='strict')
+                               except UnicodeDecodeError:
+                                       myto = _unicode_decode(myto, encoding=_encodings['merge'],
+                                               errors='replace')
+                                       myto = _unicode_encode(myto, encoding='ascii',
+                                               errors='backslashreplace')
+                                       myto = _unicode_decode(myto, encoding=_encodings['merge'],
+                                               errors='replace')
+                                       os.unlink(mysrc)
+                                       os.symlink(myto, mysrc)
+
+                               # Pass in the symlink target in order to bypass the
+                               # os.readlink() call inside abssymlink(), since that
+                               # call is unsafe if the merge encoding is not ascii
+                               # or utf_8 (see bug #382021).
+                               myabsto = abssymlink(mysrc, target=myto)
+
                                if myabsto.startswith(srcroot):
                                        myabsto = myabsto[len(srcroot):]
                                myabsto = myabsto.lstrip(sep)
-                               myto = os.readlink(mysrc)
                                if self.settings and self.settings["D"]:
                                        if myto.startswith(self.settings["D"]):
-                                               myto = myto[len(self.settings["D"]):]
+                                               myto = myto[len(self.settings["D"])-1:]
                                # myrealto contains the path of the real file to which this symlink points.
                                # we can simply test for existence of this file to see if the target has been merged yet
                                myrealto = normalize_path(os.path.join(destroot, myabsto))
@@ -4046,7 +4397,10 @@ class dblink(object):
                                        encoding=_encodings['merge'])
                                if mymtime != None:
                                        showMessage(">>> %s -> %s\n" % (mydest, myto))
-                                       outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
+                                       if sys.hexversion >= 0x3030000:
+                                               outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime // 1000000000)+"\n")
+                                       else:
+                                               outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
                                else:
                                        showMessage(_("!!! Failed to move file.\n"),
                                                level=logging.ERROR, noiselevel=-1)
@@ -4175,10 +4529,19 @@ class dblink(object):
                                                # or by a symlink to an existing regular file;
                                                # now, config file management may come into play.
                                                # we only need to tweak mydest if cfg file management is in play.
+                                               if protected:
+                                                       destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
+                                                       if protect_if_modified:
+                                                               contents_key = \
+                                                                       self._installed_instance._match_contents(myrealdest)
+                                                               if contents_key:
+                                                                       inst_info = self._installed_instance.getcontents()[contents_key]
+                                                                       if inst_info[0] == "obj" and inst_info[2] == destmd5:
+                                                                               protected = False
+
                                                if protected:
                                                        # we have a protection path; enable config file management.
                                                        cfgprot = 0
-                                                       destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
                                                        if mymd5 == destmd5:
                                                                #file already in place; simply update mtimes of destination
                                                                moveme = 1
@@ -4191,7 +4554,10 @@ class dblink(object):
                                                                        cfgprot = cfgfiledict["IGNORE"]
                                                                        if not moveme:
                                                                                zing = "---"
-                                                                               mymtime = mystat[stat.ST_MTIME]
+                                                                               if sys.hexversion >= 0x3030000:
+                                                                                       mymtime = mystat.st_mtime_ns
+                                                                               else:
+                                                                                       mymtime = mystat[stat.ST_MTIME]
                                                                else:
                                                                        moveme = 1
                                                                        cfgprot = 1
@@ -4213,10 +4579,10 @@ class dblink(object):
                                        # as hardlinks (having identical st_dev and st_ino).
                                        hardlink_key = (mystat.st_dev, mystat.st_ino)
 
-                                       hardlink_candidates = self._md5_merge_map.get(hardlink_key)
+                                       hardlink_candidates = self._hardlink_merge_map.get(hardlink_key)
                                        if hardlink_candidates is None:
                                                hardlink_candidates = []
-                                               self._md5_merge_map[hardlink_key] = hardlink_candidates
+                                               self._hardlink_merge_map[hardlink_key] = hardlink_candidates
 
                                        mymtime = movefile(mysrc, mydest, newmtime=thismtime,
                                                sstat=mystat, mysettings=self.settings,
@@ -4224,12 +4590,14 @@ class dblink(object):
                                                encoding=_encodings['merge'])
                                        if mymtime is None:
                                                return 1
-                                       if hardlink_candidates is not None:
-                                               hardlink_candidates.append(mydest)
+                                       hardlink_candidates.append(mydest)
                                        zing = ">>>"
 
                                if mymtime != None:
-                                       outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
+                                       if sys.hexversion >= 0x3030000:
+                                               outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime // 1000000000)+"\n")
+                                       else:
+                                               outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
                                showMessage("%s %s\n" % (zing,mydest))
                        else:
                                # we are merging a fifo or device node
@@ -4366,6 +4734,66 @@ class dblink(object):
                "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
                return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
 
+       def _pre_merge_backup(self, backup_dblink, downgrade):
+
+               if ("unmerge-backup" in self.settings.features or
+                       (downgrade and "downgrade-backup" in self.settings.features)):
+                       return self._quickpkg_dblink(backup_dblink, False, None)
+
+               return os.EX_OK
+
+       def _pre_unmerge_backup(self, background):
+
+               if "unmerge-backup" in self.settings.features :
+                       logfile = None
+                       if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
+                               logfile = self.settings.get("PORTAGE_LOG_FILE")
+                       return self._quickpkg_dblink(self, background, logfile)
+
+               return os.EX_OK
+
+       def _quickpkg_dblink(self, backup_dblink, background, logfile):
+
+               trees = QueryCommand.get_db()[self.settings["EROOT"]]
+               bintree = trees["bintree"]
+               binpkg_path = bintree.getname(backup_dblink.mycpv)
+               if os.path.exists(binpkg_path) and \
+                       catsplit(backup_dblink.mycpv)[1] not in bintree.invalids:
+                       return os.EX_OK
+
+               self.lockdb()
+               try:
+
+                       if not backup_dblink.exists():
+                               # It got unmerged by a concurrent process.
+                               return os.EX_OK
+
+                       # Call quickpkg for support of QUICKPKG_DEFAULT_OPTS and stuff.
+                       quickpkg_binary = os.path.join(self.settings["PORTAGE_BIN_PATH"],
+                               "quickpkg")
+
+                       # Let quickpkg inherit the global vartree config's env.
+                       env = dict(self.vartree.settings.items())
+                       env["__PORTAGE_INHERIT_VARDB_LOCK"] = "1"
+
+                       pythonpath = [x for x in env.get('PYTHONPATH', '').split(":") if x]
+                       if not pythonpath or \
+                               not os.path.samefile(pythonpath[0], portage._pym_path):
+                               pythonpath.insert(0, portage._pym_path)
+                       env['PYTHONPATH'] = ":".join(pythonpath)
+
+                       quickpkg_proc = SpawnProcess(
+                               args=[portage._python_interpreter, quickpkg_binary,
+                                       "=%s" % (backup_dblink.mycpv,)],
+                               background=background, env=env,
+                               scheduler=self._scheduler, logfile=logfile)
+                       quickpkg_proc.start()
+
+                       return quickpkg_proc.wait()
+
+               finally:
+                       self.unlockdb()
+
 def merge(mycat, mypkg, pkgloc, infloc,
        myroot=None, settings=None, myebuild=None,
        mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
@@ -4451,6 +4879,7 @@ def write_contents(contents, root, f):
 
 def tar_contents(contents, root, tar, protect=None, onProgress=None):
        os = _os_merge
+       encoding = _encodings['merge']
 
        try:
                for x in contents:
@@ -4470,7 +4899,9 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
                        pass
                else:
                        os = portage.os
+                       encoding = _encodings['fs']
 
+       tar.encoding = encoding
        root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
        id_strings = {}
        maxval = len(contents)
@@ -4492,7 +4923,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
                        continue
                contents_type = contents[path][0]
                if path.startswith(root):
-                       arcname = path[len(root):]
+                       arcname = "./" + path[len(root):]
                else:
                        raise ValueError("invalid root argument: '%s'" % root)
                live_path = path
@@ -4504,7 +4935,51 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
                        # recorded as a real directory in the tar file to ensure that tar
                        # can properly extract it's children.
                        live_path = os.path.realpath(live_path)
-               tarinfo = tar.gettarinfo(live_path, arcname)
+                       lst = os.lstat(live_path)
+
+               # Since os.lstat() inside TarFile.gettarinfo() can trigger a
+               # UnicodeEncodeError when python has something other than utf_8
+               # return from sys.getfilesystemencoding() (as in bug #388773),
+               # we implement the needed functionality here, using the result
+               # of our successful lstat call. An alternative to this would be
+               # to pass in the fileobj argument to TarFile.gettarinfo(), so
+               # that it could use fstat instead of lstat. However, that would
+               # have the unwanted effect of dereferencing symlinks.
+
+               tarinfo = tar.tarinfo()
+               tarinfo.name = arcname
+               tarinfo.mode = lst.st_mode
+               tarinfo.uid = lst.st_uid
+               tarinfo.gid = lst.st_gid
+               tarinfo.size = 0
+               tarinfo.mtime = lst.st_mtime
+               tarinfo.linkname = ""
+               if stat.S_ISREG(lst.st_mode):
+                       inode = (lst.st_ino, lst.st_dev)
+                       if (lst.st_nlink > 1 and
+                               inode in tar.inodes and
+                               arcname != tar.inodes[inode]):
+                               tarinfo.type = tarfile.LNKTYPE
+                               tarinfo.linkname = tar.inodes[inode]
+                       else:
+                               tar.inodes[inode] = arcname
+                               tarinfo.type = tarfile.REGTYPE
+                               tarinfo.size = lst.st_size
+               elif stat.S_ISDIR(lst.st_mode):
+                       tarinfo.type = tarfile.DIRTYPE
+               elif stat.S_ISLNK(lst.st_mode):
+                       tarinfo.type = tarfile.SYMTYPE
+                       tarinfo.linkname = os.readlink(live_path)
+               else:
+                       continue
+               try:
+                       tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+               except KeyError:
+                       pass
+               try:
+                       tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+               except KeyError:
+                       pass
 
                if stat.S_ISREG(lst.st_mode):
                        if protect and protect(path):
@@ -4521,7 +4996,7 @@ def tar_contents(contents, root, tar, protect=None, onProgress=None):
                                f.close()
                        else:
                                f = open(_unicode_encode(path,
-                                       encoding=object.__getattribute__(os, '_encoding'),
+                                       encoding=encoding,
                                        errors='strict'), 'rb')
                                try:
                                        tar.addfile(tarinfo, f)