ea62f6bcc72b449b4ea6603b049c3a0e1263e2c2
[portage.git] / pym / portage / dbapi / vartree.py
1 # Copyright 1998-2012 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = [
5         "vardbapi", "vartree", "dblink"] + \
6         ["write_contents", "tar_contents"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10         'portage.checksum:_perform_md5_merge@perform_md5',
11         'portage.data:portage_gid,portage_uid,secpass',
12         'portage.dbapi.dep_expand:dep_expand',
13         'portage.dbapi._MergeProcess:MergeProcess',
14         'portage.dep:dep_getkey,isjustname,isvalidatom,match_from_list,' + \
15                 'use_reduce,_get_slot_re',
16         'portage.eapi:_get_eapi_attrs',
17         'portage.elog:collect_ebuild_messages,collect_messages,' + \
18                 'elog_process,_merge_logentries',
19         'portage.locks:lockdir,unlockdir,lockfile,unlockfile',
20         'portage.output:bold,colorize',
21         'portage.package.ebuild.doebuild:doebuild_environment,' + \
22                 '_merge_unicode_error', '_spawn_phase',
23         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
24         'portage.package.ebuild._ipc.QueryCommand:QueryCommand',
25         'portage.update:fixdbentries',
26         'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
27                 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
28                 'grabdict,normalize_path,new_protect_filename',
29         'portage.util.digraph:digraph',
30         'portage.util.env_update:env_update',
31         'portage.util.listdir:dircache,listdir',
32         'portage.util.movefile:movefile',
33         'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
34         'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
35         'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,vercmp,' + \
36                 '_pkgsplit@pkgsplit,_pkg_str',
37         'subprocess',
38         'tarfile',
39 )
40
41 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
42         PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
43 from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
44 from portage.dbapi import dbapi
45 from portage.exception import CommandNotFound, \
46         InvalidData, InvalidLocation, InvalidPackageName, \
47         FileNotFound, PermissionDenied, UnsupportedAPIException
48 from portage.localization import _
49
50 from portage import abssymlink, _movefile, bsd_chflags
51
52 # This is a special version of the os module, wrapped for unicode support.
53 from portage import os
54 from portage import shutil
55 from portage import _encodings
56 from portage import _os_merge
57 from portage import _selinux_merge
58 from portage import _unicode_decode
59 from portage import _unicode_encode
60
61 from _emerge.EbuildBuildDir import EbuildBuildDir
62 from _emerge.EbuildPhase import EbuildPhase
63 from _emerge.emergelog import emergelog
64 from _emerge.PollScheduler import PollScheduler
65 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
66 from _emerge.SpawnProcess import SpawnProcess
67
68 import errno
69 import fnmatch
70 import gc
71 import grp
72 import io
73 from itertools import chain
74 import logging
75 import os as _os
76 import pwd
77 import re
78 import stat
79 import sys
80 import tempfile
81 import textwrap
82 import time
83 import warnings
84
85 try:
86         import cPickle as pickle
87 except ImportError:
88         import pickle
89
90 if sys.hexversion >= 0x3000000:
91         basestring = str
92         long = int
93         _unicode = str
94 else:
95         _unicode = unicode
96
97 class vardbapi(dbapi):
98
99         _excluded_dirs = ["CVS", "lost+found"]
100         _excluded_dirs = [re.escape(x) for x in _excluded_dirs]
101         _excluded_dirs = re.compile(r'^(\..*|-MERGING-.*|' + \
102                 "|".join(_excluded_dirs) + r')$')
103
104         _aux_cache_version        = "1"
105         _owners_cache_version     = "1"
106
107         # Number of uncached packages to trigger cache update, since
108         # it's wasteful to update it for every vdb change.
109         _aux_cache_threshold = 5
110
111         _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
112         _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
113
114         def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
115                 """
116                 The categories parameter is unused since the dbapi class
117                 now has a categories property that is generated from the
118                 available packages.
119                 """
120
121                 # Used by emerge to check whether any packages
122                 # have been added or removed.
123                 self._pkgs_changed = False
124
125                 # The _aux_cache_threshold doesn't work as designed
126                 # if the cache is flushed from a subprocess, so we
127                 # use this to avoid waste vdb cache updates.
128                 self._flush_cache_enabled = True
129
130                 #cache for category directory mtimes
131                 self.mtdircache = {}
132
133                 #cache for dependency checks
134                 self.matchcache = {}
135
136                 #cache for cp_list results
137                 self.cpcache = {}
138
139                 self.blockers = None
140                 if settings is None:
141                         settings = portage.settings
142                 self.settings = settings
143
144                 if _unused_param is not None and _unused_param != settings['ROOT']:
145                         warnings.warn("The first parameter of the "
146                                 "portage.dbapi.vartree.vardbapi"
147                                 " constructor is now unused. Use "
148                                 "settings['ROOT'] instead.",
149                                 DeprecationWarning, stacklevel=2)
150
151                 self._eroot = settings['EROOT']
152                 self._dbroot = self._eroot + VDB_PATH
153                 self._lock = None
154                 self._lock_count = 0
155
156                 self._conf_mem_file = self._eroot + CONFIG_MEMORY_FILE
157                 self._fs_lock_obj = None
158                 self._fs_lock_count = 0
159
160                 if vartree is None:
161                         vartree = portage.db[settings['EROOT']]['vartree']
162                 self.vartree = vartree
163                 self._aux_cache_keys = set(
164                         ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
165                         "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
166                         "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
167                         "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
168                         ])
169                 self._aux_cache_obj = None
170                 self._aux_cache_filename = os.path.join(self._eroot,
171                         CACHE_PATH, "vdb_metadata.pickle")
172                 self._counter_path = os.path.join(self._eroot,
173                         CACHE_PATH, "counter")
174
175                 self._plib_registry = None
176                 if _ENABLE_PRESERVE_LIBS:
177                         self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
178                                 os.path.join(self._eroot, PRIVATE_PATH,
179                                 "preserved_libs_registry"))
180
181                 self._linkmap = None
182                 if _ENABLE_DYN_LINK_MAP:
183                         self._linkmap = LinkageMap(self)
184                 self._owners = self._owners_db(self)
185
186                 self._cached_counter = None
187
188         @property
189         def root(self):
190                 warnings.warn("The root attribute of "
191                         "portage.dbapi.vartree.vardbapi"
192                         " is deprecated. Use "
193                         "settings['ROOT'] instead.",
194                         DeprecationWarning, stacklevel=3)
195                 return self.settings['ROOT']
196
197         def getpath(self, mykey, filename=None):
198                 # This is an optimized hotspot, so don't use unicode-wrapped
199                 # os module and don't use os.path.join().
200                 rValue = self._eroot + VDB_PATH + _os.sep + mykey
201                 if filename is not None:
202                         # If filename is always relative, we can do just
203                         # rValue += _os.sep + filename
204                         rValue = _os.path.join(rValue, filename)
205                 return rValue
206
207         def lock(self):
208                 """
209                 Acquire a reentrant lock, blocking, for cooperation with concurrent
210                 processes. State is inherited by subprocesses, allowing subprocesses
211                 to reenter a lock that was acquired by a parent process. However,
212                 a lock can be released only by the same process that acquired it.
213                 """
214                 if self._lock_count:
215                         self._lock_count += 1
216                 else:
217                         if self._lock is not None:
218                                 raise AssertionError("already locked")
219                         # At least the parent needs to exist for the lock file.
220                         ensure_dirs(self._dbroot)
221                         self._lock = lockdir(self._dbroot)
222                         self._lock_count += 1
223
224         def unlock(self):
225                 """
226                 Release a lock, decrementing the recursion level. Each unlock() call
227                 must be matched with a prior lock() call, or else an AssertionError
228                 will be raised if unlock() is called while not locked.
229                 """
230                 if self._lock_count > 1:
231                         self._lock_count -= 1
232                 else:
233                         if self._lock is None:
234                                 raise AssertionError("not locked")
235                         self._lock_count = 0
236                         unlockdir(self._lock)
237                         self._lock = None
238
239         def _fs_lock(self):
240                 """
241                 Acquire a reentrant lock, blocking, for cooperation with concurrent
242                 processes.
243                 """
244                 if self._fs_lock_count < 1:
245                         if self._fs_lock_obj is not None:
246                                 raise AssertionError("already locked")
247                         try:
248                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
249                         except InvalidLocation:
250                                 self.settings._init_dirs()
251                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
252                 self._fs_lock_count += 1
253
254         def _fs_unlock(self):
255                 """
256                 Release a lock, decrementing the recursion level.
257                 """
258                 if self._fs_lock_count <= 1:
259                         if self._fs_lock_obj is None:
260                                 raise AssertionError("not locked")
261                         unlockfile(self._fs_lock_obj)
262                         self._fs_lock_obj = None
263                 self._fs_lock_count -= 1
264
265         def _bump_mtime(self, cpv):
266                 """
267                 This is called before an after any modifications, so that consumers
268                 can use directory mtimes to validate caches. See bug #290428.
269                 """
270                 base = self._eroot + VDB_PATH
271                 cat = catsplit(cpv)[0]
272                 catdir = base + _os.sep + cat
273                 t = time.time()
274                 t = (t, t)
275                 try:
276                         for x in (catdir, base):
277                                 os.utime(x, t)
278                 except OSError:
279                         ensure_dirs(catdir)
280
281         def cpv_exists(self, mykey, myrepo=None):
282                 "Tells us whether an actual ebuild exists on disk (no masking)"
283                 return os.path.exists(self.getpath(mykey))
284
285         def cpv_counter(self, mycpv):
286                 "This method will grab the COUNTER. Returns a counter value."
287                 try:
288                         return long(self.aux_get(mycpv, ["COUNTER"])[0])
289                 except (KeyError, ValueError):
290                         pass
291                 writemsg_level(_("portage: COUNTER for %s was corrupted; " \
292                         "resetting to value of 0\n") % (mycpv,),
293                         level=logging.ERROR, noiselevel=-1)
294                 return 0
295
296         def cpv_inject(self, mycpv):
297                 "injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
298                 ensure_dirs(self.getpath(mycpv))
299                 counter = self.counter_tick(mycpv=mycpv)
300                 # write local package counter so that emerge clean does the right thing
301                 write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
302
303         def isInjected(self, mycpv):
304                 if self.cpv_exists(mycpv):
305                         if os.path.exists(self.getpath(mycpv, filename="INJECTED")):
306                                 return True
307                         if not os.path.exists(self.getpath(mycpv, filename="CONTENTS")):
308                                 return True
309                 return False
310
311         def move_ent(self, mylist, repo_match=None):
312                 origcp = mylist[1]
313                 newcp = mylist[2]
314
315                 # sanity check
316                 for atom in (origcp, newcp):
317                         if not isjustname(atom):
318                                 raise InvalidPackageName(str(atom))
319                 origmatches = self.match(origcp, use_cache=0)
320                 moves = 0
321                 if not origmatches:
322                         return moves
323                 for mycpv in origmatches:
324                         try:
325                                 mycpv = self._pkg_str(mycpv, None)
326                         except (KeyError, InvalidData):
327                                 continue
328                         mycpv_cp = cpv_getkey(mycpv)
329                         if mycpv_cp != origcp:
330                                 # Ignore PROVIDE virtual match.
331                                 continue
332                         if repo_match is not None \
333                                 and not repo_match(mycpv.repo):
334                                 continue
335
336                         # Use isvalidatom() to check if this move is valid for the
337                         # EAPI (characters allowed in package names may vary).
338                         if not isvalidatom(newcp, eapi=mycpv.eapi):
339                                 continue
340
341                         mynewcpv = mycpv.replace(mycpv_cp, _unicode(newcp), 1)
342                         mynewcat = catsplit(newcp)[0]
343                         origpath = self.getpath(mycpv)
344                         if not os.path.exists(origpath):
345                                 continue
346                         moves += 1
347                         if not os.path.exists(self.getpath(mynewcat)):
348                                 #create the directory
349                                 ensure_dirs(self.getpath(mynewcat))
350                         newpath = self.getpath(mynewcpv)
351                         if os.path.exists(newpath):
352                                 #dest already exists; keep this puppy where it is.
353                                 continue
354                         _movefile(origpath, newpath, mysettings=self.settings)
355                         self._clear_pkg_cache(self._dblink(mycpv))
356                         self._clear_pkg_cache(self._dblink(mynewcpv))
357
358                         # We need to rename the ebuild now.
359                         old_pf = catsplit(mycpv)[1]
360                         new_pf = catsplit(mynewcpv)[1]
361                         if new_pf != old_pf:
362                                 try:
363                                         os.rename(os.path.join(newpath, old_pf + ".ebuild"),
364                                                 os.path.join(newpath, new_pf + ".ebuild"))
365                                 except EnvironmentError as e:
366                                         if e.errno != errno.ENOENT:
367                                                 raise
368                                         del e
369                         write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
370                         write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
371                         fixdbentries([mylist], newpath, eapi=mycpv.eapi)
372                 return moves
373
374         def cp_list(self, mycp, use_cache=1):
375                 mysplit=catsplit(mycp)
376                 if mysplit[0] == '*':
377                         mysplit[0] = mysplit[0][1:]
378                 try:
379                         mystat = os.stat(self.getpath(mysplit[0])).st_mtime
380                 except OSError:
381                         mystat = 0
382                 if use_cache and mycp in self.cpcache:
383                         cpc = self.cpcache[mycp]
384                         if cpc[0] == mystat:
385                                 return cpc[1][:]
386                 cat_dir = self.getpath(mysplit[0])
387                 try:
388                         dir_list = os.listdir(cat_dir)
389                 except EnvironmentError as e:
390                         if e.errno == PermissionDenied.errno:
391                                 raise PermissionDenied(cat_dir)
392                         del e
393                         dir_list = []
394
395                 returnme = []
396                 for x in dir_list:
397                         if self._excluded_dirs.match(x) is not None:
398                                 continue
399                         ps = pkgsplit(x)
400                         if not ps:
401                                 self.invalidentry(os.path.join(self.getpath(mysplit[0]), x))
402                                 continue
403                         if len(mysplit) > 1:
404                                 if ps[0] == mysplit[1]:
405                                         returnme.append(_pkg_str(mysplit[0]+"/"+x))
406                 self._cpv_sort_ascending(returnme)
407                 if use_cache:
408                         self.cpcache[mycp] = [mystat, returnme[:]]
409                 elif mycp in self.cpcache:
410                         del self.cpcache[mycp]
411                 return returnme
412
413         def cpv_all(self, use_cache=1):
414                 """
415                 Set use_cache=0 to bypass the portage.cachedir() cache in cases
416                 when the accuracy of mtime staleness checks should not be trusted
417                 (generally this is only necessary in critical sections that
418                 involve merge or unmerge of packages).
419                 """
420                 returnme = []
421                 basepath = os.path.join(self._eroot, VDB_PATH) + os.path.sep
422
423                 if use_cache:
424                         from portage import listdir
425                 else:
426                         def listdir(p, **kwargs):
427                                 try:
428                                         return [x for x in os.listdir(p) \
429                                                 if os.path.isdir(os.path.join(p, x))]
430                                 except EnvironmentError as e:
431                                         if e.errno == PermissionDenied.errno:
432                                                 raise PermissionDenied(p)
433                                         del e
434                                         return []
435
436                 for x in listdir(basepath, EmptyOnError=1, ignorecvs=1, dirsonly=1):
437                         if self._excluded_dirs.match(x) is not None:
438                                 continue
439                         if not self._category_re.match(x):
440                                 continue
441                         for y in listdir(basepath + x, EmptyOnError=1, dirsonly=1):
442                                 if self._excluded_dirs.match(y) is not None:
443                                         continue
444                                 subpath = x + "/" + y
445                                 # -MERGING- should never be a cpv, nor should files.
446                                 try:
447                                         if catpkgsplit(subpath) is None:
448                                                 self.invalidentry(self.getpath(subpath))
449                                                 continue
450                                 except InvalidData:
451                                         self.invalidentry(self.getpath(subpath))
452                                         continue
453                                 returnme.append(subpath)
454
455                 return returnme
456
457         def cp_all(self, use_cache=1):
458                 mylist = self.cpv_all(use_cache=use_cache)
459                 d={}
460                 for y in mylist:
461                         if y[0] == '*':
462                                 y = y[1:]
463                         try:
464                                 mysplit = catpkgsplit(y)
465                         except InvalidData:
466                                 self.invalidentry(self.getpath(y))
467                                 continue
468                         if not mysplit:
469                                 self.invalidentry(self.getpath(y))
470                                 continue
471                         d[mysplit[0]+"/"+mysplit[1]] = None
472                 return list(d)
473
474         def checkblockers(self, origdep):
475                 pass
476
477         def _clear_cache(self):
478                 self.mtdircache.clear()
479                 self.matchcache.clear()
480                 self.cpcache.clear()
481                 self._aux_cache_obj = None
482
483         def _add(self, pkg_dblink):
484                 self._pkgs_changed = True
485                 self._clear_pkg_cache(pkg_dblink)
486
487         def _remove(self, pkg_dblink):
488                 self._pkgs_changed = True
489                 self._clear_pkg_cache(pkg_dblink)
490
491         def _clear_pkg_cache(self, pkg_dblink):
492                 # Due to 1 second mtime granularity in <python-2.5, mtime checks
493                 # are not always sufficient to invalidate vardbapi caches. Therefore,
494                 # the caches need to be actively invalidated here.
495                 self.mtdircache.pop(pkg_dblink.cat, None)
496                 self.matchcache.pop(pkg_dblink.cat, None)
497                 self.cpcache.pop(pkg_dblink.mysplit[0], None)
498                 dircache.pop(pkg_dblink.dbcatdir, None)
499
500         def match(self, origdep, use_cache=1):
501                 "caching match function"
502                 mydep = dep_expand(
503                         origdep, mydb=self, use_cache=use_cache, settings=self.settings)
504                 cache_key = (mydep, mydep.unevaluated_atom)
505                 mykey = dep_getkey(mydep)
506                 mycat = catsplit(mykey)[0]
507                 if not use_cache:
508                         if mycat in self.matchcache:
509                                 del self.mtdircache[mycat]
510                                 del self.matchcache[mycat]
511                         return list(self._iter_match(mydep,
512                                 self.cp_list(mydep.cp, use_cache=use_cache)))
513                 try:
514                         curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
515                 except (IOError, OSError):
516                         curmtime=0
517
518                 if mycat not in self.matchcache or \
519                         self.mtdircache[mycat] != curmtime:
520                         # clear cache entry
521                         self.mtdircache[mycat] = curmtime
522                         self.matchcache[mycat] = {}
523                 if mydep not in self.matchcache[mycat]:
524                         mymatch = list(self._iter_match(mydep,
525                                 self.cp_list(mydep.cp, use_cache=use_cache)))
526                         self.matchcache[mycat][cache_key] = mymatch
527                 return self.matchcache[mycat][cache_key][:]
528
529         def findname(self, mycpv, myrepo=None):
530                 return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
531
532         def flush_cache(self):
533                 """If the current user has permission and the internal aux_get cache has
534                 been updated, save it to disk and mark it unmodified.  This is called
535                 by emerge after it has loaded the full vdb for use in dependency
536                 calculations.  Currently, the cache is only written if the user has
537                 superuser privileges (since that's required to obtain a lock), but all
538                 users have read access and benefit from faster metadata lookups (as
539                 long as at least part of the cache is still valid)."""
540                 if self._flush_cache_enabled and \
541                         self._aux_cache is not None and \
542                         len(self._aux_cache["modified"]) >= self._aux_cache_threshold and \
543                         secpass >= 2:
544                         self._owners.populate() # index any unindexed contents
545                         valid_nodes = set(self.cpv_all())
546                         for cpv in list(self._aux_cache["packages"]):
547                                 if cpv not in valid_nodes:
548                                         del self._aux_cache["packages"][cpv]
549                         del self._aux_cache["modified"]
550                         try:
551                                 f = atomic_ofstream(self._aux_cache_filename, 'wb')
552                                 pickle.dump(self._aux_cache, f, protocol=2)
553                                 f.close()
554                                 apply_secpass_permissions(
555                                         self._aux_cache_filename, gid=portage_gid, mode=0o644)
556                         except (IOError, OSError) as e:
557                                 pass
558                         self._aux_cache["modified"] = set()
559
560         @property
561         def _aux_cache(self):
562                 if self._aux_cache_obj is None:
563                         self._aux_cache_init()
564                 return self._aux_cache_obj
565
566         def _aux_cache_init(self):
567                 aux_cache = None
568                 open_kwargs = {}
569                 if sys.hexversion >= 0x3000000:
570                         # Buffered io triggers extreme performance issues in
571                         # Unpickler.load() (problem observed with python-3.0.1).
572                         # Unfortunately, performance is still poor relative to
573                         # python-2.x, but buffering makes it much worse.
574                         open_kwargs["buffering"] = 0
575                 try:
576                         f = open(_unicode_encode(self._aux_cache_filename,
577                                 encoding=_encodings['fs'], errors='strict'),
578                                 mode='rb', **open_kwargs)
579                         mypickle = pickle.Unpickler(f)
580                         try:
581                                 mypickle.find_global = None
582                         except AttributeError:
583                                 # TODO: If py3k, override Unpickler.find_class().
584                                 pass
585                         aux_cache = mypickle.load()
586                         f.close()
587                         del f
588                 except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
589                         if isinstance(e, EnvironmentError) and \
590                                 getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
591                                 pass
592                         else:
593                                 writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
594                                         (self._aux_cache_filename, e), noiselevel=-1)
595                         del e
596
597                 if not aux_cache or \
598                         not isinstance(aux_cache, dict) or \
599                         aux_cache.get("version") != self._aux_cache_version or \
600                         not aux_cache.get("packages"):
601                         aux_cache = {"version": self._aux_cache_version}
602                         aux_cache["packages"] = {}
603
604                 owners = aux_cache.get("owners")
605                 if owners is not None:
606                         if not isinstance(owners, dict):
607                                 owners = None
608                         elif "version" not in owners:
609                                 owners = None
610                         elif owners["version"] != self._owners_cache_version:
611                                 owners = None
612                         elif "base_names" not in owners:
613                                 owners = None
614                         elif not isinstance(owners["base_names"], dict):
615                                 owners = None
616
617                 if owners is None:
618                         owners = {
619                                 "base_names" : {},
620                                 "version"    : self._owners_cache_version
621                         }
622                         aux_cache["owners"] = owners
623
624                 aux_cache["modified"] = set()
625                 self._aux_cache_obj = aux_cache
626
627         def aux_get(self, mycpv, wants, myrepo = None):
628                 """This automatically caches selected keys that are frequently needed
629                 by emerge for dependency calculations.  The cached metadata is
630                 considered valid if the mtime of the package directory has not changed
631                 since the data was cached.  The cache is stored in a pickled dict
632                 object with the following format:
633
634                 {version:"1", "packages":{cpv1:(mtime,{k1,v1, k2,v2, ...}), cpv2...}}
635
636                 If an error occurs while loading the cache pickle or the version is
637                 unrecognized, the cache will simple be recreated from scratch (it is
638                 completely disposable).
639                 """
640                 cache_these_wants = self._aux_cache_keys.intersection(wants)
641                 for x in wants:
642                         if self._aux_cache_keys_re.match(x) is not None:
643                                 cache_these_wants.add(x)
644
645                 if not cache_these_wants:
646                         mydata = self._aux_get(mycpv, wants)
647                         return [mydata[x] for x in wants]
648
649                 cache_these = set(self._aux_cache_keys)
650                 cache_these.update(cache_these_wants)
651
652                 mydir = self.getpath(mycpv)
653                 mydir_stat = None
654                 try:
655                         mydir_stat = os.stat(mydir)
656                 except OSError as e:
657                         if e.errno != errno.ENOENT:
658                                 raise
659                         raise KeyError(mycpv)
660                 # Use float mtime when available.
661                 mydir_mtime = mydir_stat.st_mtime
662                 pkg_data = self._aux_cache["packages"].get(mycpv)
663                 pull_me = cache_these.union(wants)
664                 mydata = {"_mtime_" : mydir_mtime}
665                 cache_valid = False
666                 cache_incomplete = False
667                 cache_mtime = None
668                 metadata = None
669                 if pkg_data is not None:
670                         if not isinstance(pkg_data, tuple) or len(pkg_data) != 2:
671                                 pkg_data = None
672                         else:
673                                 cache_mtime, metadata = pkg_data
674                                 if not isinstance(cache_mtime, (float, long, int)) or \
675                                         not isinstance(metadata, dict):
676                                         pkg_data = None
677
678                 if pkg_data:
679                         cache_mtime, metadata = pkg_data
680                         if isinstance(cache_mtime, float):
681                                 cache_valid = cache_mtime == mydir_stat.st_mtime
682                         else:
683                                 # Cache may contain integer mtime.
684                                 cache_valid = cache_mtime == mydir_stat[stat.ST_MTIME]
685
686                 if cache_valid:
687                         # Migrate old metadata to unicode.
688                         for k, v in metadata.items():
689                                 metadata[k] = _unicode_decode(v,
690                                         encoding=_encodings['repo.content'], errors='replace')
691
692                         mydata.update(metadata)
693                         pull_me.difference_update(mydata)
694
695                 if pull_me:
696                         # pull any needed data and cache it
697                         aux_keys = list(pull_me)
698                         mydata.update(self._aux_get(mycpv, aux_keys, st=mydir_stat))
699                         if not cache_valid or cache_these.difference(metadata):
700                                 cache_data = {}
701                                 if cache_valid and metadata:
702                                         cache_data.update(metadata)
703                                 for aux_key in cache_these:
704                                         cache_data[aux_key] = mydata[aux_key]
705                                 self._aux_cache["packages"][_unicode(mycpv)] = \
706                                         (mydir_mtime, cache_data)
707                                 self._aux_cache["modified"].add(mycpv)
708
709                 eapi_attrs = _get_eapi_attrs(mydata['EAPI'])
710                 if _get_slot_re(eapi_attrs).match(mydata['SLOT']) is None:
711                         # Empty or invalid slot triggers InvalidAtom exceptions when
712                         # generating slot atoms for packages, so translate it to '0' here.
713                         mydata['SLOT'] = _unicode_decode('0')
714
715                 return [mydata[x] for x in wants]
716
717         def _aux_get(self, mycpv, wants, st=None):
718                 mydir = self.getpath(mycpv)
719                 if st is None:
720                         try:
721                                 st = os.stat(mydir)
722                         except OSError as e:
723                                 if e.errno == errno.ENOENT:
724                                         raise KeyError(mycpv)
725                                 elif e.errno == PermissionDenied.errno:
726                                         raise PermissionDenied(mydir)
727                                 else:
728                                         raise
729                 if not stat.S_ISDIR(st.st_mode):
730                         raise KeyError(mycpv)
731                 results = {}
732                 env_keys = []
733                 for x in wants:
734                         if x == "_mtime_":
735                                 results[x] = st[stat.ST_MTIME]
736                                 continue
737                         try:
738                                 myf = io.open(
739                                         _unicode_encode(os.path.join(mydir, x),
740                                         encoding=_encodings['fs'], errors='strict'),
741                                         mode='r', encoding=_encodings['repo.content'],
742                                         errors='replace')
743                                 try:
744                                         myd = myf.read()
745                                 finally:
746                                         myf.close()
747                         except IOError:
748                                 if x not in self._aux_cache_keys and \
749                                         self._aux_cache_keys_re.match(x) is None:
750                                         env_keys.append(x)
751                                         continue
752                                 myd = _unicode_decode('')
753
754                         # Preserve \n for metadata that is known to
755                         # contain multiple lines.
756                         if self._aux_multi_line_re.match(x) is None:
757                                 myd = " ".join(myd.split())
758
759                         results[x] = myd
760
761                 if env_keys:
762                         env_results = self._aux_env_search(mycpv, env_keys)
763                         for k in env_keys:
764                                 v = env_results.get(k)
765                                 if v is None:
766                                         v = _unicode_decode('')
767                                 if self._aux_multi_line_re.match(k) is None:
768                                         v = " ".join(v.split())
769                                 results[k] = v
770
771                 if results.get("EAPI") == "":
772                         results[_unicode_decode("EAPI")] = _unicode_decode('0')
773
774                 return results
775
776         def _aux_env_search(self, cpv, variables):
777                 """
778                 Search environment.bz2 for the specified variables. Returns
779                 a dict mapping variables to values, and any variables not
780                 found in the environment will not be included in the dict.
781                 This is useful for querying variables like ${SRC_URI} and
782                 ${A}, which are not saved in separate files but are available
783                 in environment.bz2 (see bug #395463).
784                 """
785                 env_file = self.getpath(cpv, filename="environment.bz2")
786                 if not os.path.isfile(env_file):
787                         return {}
788                 bunzip2_cmd = portage.util.shlex_split(
789                         self.settings.get("PORTAGE_BUNZIP2_COMMAND", ""))
790                 if not bunzip2_cmd:
791                         bunzip2_cmd = portage.util.shlex_split(
792                                 self.settings["PORTAGE_BZIP2_COMMAND"])
793                         bunzip2_cmd.append("-d")
794                 args = bunzip2_cmd + ["-c", env_file]
795                 try:
796                         proc = subprocess.Popen(args, stdout=subprocess.PIPE)
797                 except EnvironmentError as e:
798                         if e.errno != errno.ENOENT:
799                                 raise
800                         raise portage.exception.CommandNotFound(args[0])
801
802                 # Parts of the following code are borrowed from
803                 # filter-bash-environment.py (keep them in sync).
804                 var_assign_re = re.compile(r'(^|^declare\s+-\S+\s+|^declare\s+|^export\s+)([^=\s]+)=("|\')?(.*)$')
805                 close_quote_re = re.compile(r'(\\"|"|\')\s*$')
806                 def have_end_quote(quote, line):
807                         close_quote_match = close_quote_re.search(line)
808                         return close_quote_match is not None and \
809                                 close_quote_match.group(1) == quote
810
811                 variables = frozenset(variables)
812                 results = {}
813                 for line in proc.stdout:
814                         line = _unicode_decode(line,
815                                 encoding=_encodings['content'], errors='replace')
816                         var_assign_match = var_assign_re.match(line)
817                         if var_assign_match is not None:
818                                 key = var_assign_match.group(2)
819                                 quote = var_assign_match.group(3)
820                                 if quote is not None:
821                                         if have_end_quote(quote,
822                                                 line[var_assign_match.end(2)+2:]):
823                                                 value = var_assign_match.group(4)
824                                         else:
825                                                 value = [var_assign_match.group(4)]
826                                                 for line in proc.stdout:
827                                                         line = _unicode_decode(line,
828                                                                 encoding=_encodings['content'],
829                                                                 errors='replace')
830                                                         value.append(line)
831                                                         if have_end_quote(quote, line):
832                                                                 break
833                                                 value = ''.join(value)
834                                         # remove trailing quote and whitespace
835                                         value = value.rstrip()[:-1]
836                                 else:
837                                         value = var_assign_match.group(4).rstrip()
838
839                                 if key in variables:
840                                         results[key] = value
841
842                 proc.wait()
843                 proc.stdout.close()
844                 return results
845
846         def aux_update(self, cpv, values):
847                 mylink = self._dblink(cpv)
848                 if not mylink.exists():
849                         raise KeyError(cpv)
850                 self._bump_mtime(cpv)
851                 self._clear_pkg_cache(mylink)
852                 for k, v in values.items():
853                         if v:
854                                 mylink.setfile(k, v)
855                         else:
856                                 try:
857                                         os.unlink(os.path.join(self.getpath(cpv), k))
858                                 except EnvironmentError:
859                                         pass
860                 self._bump_mtime(cpv)
861
862         def counter_tick(self, myroot=None, mycpv=None):
863                 """
864                 @param myroot: ignored, self._eroot is used instead
865                 """
866                 return self.counter_tick_core(incrementing=1, mycpv=mycpv)
867
868         def get_counter_tick_core(self, myroot=None, mycpv=None):
869                 """
870                 Use this method to retrieve the counter instead
871                 of having to trust the value of a global counter
872                 file that can lead to invalid COUNTER
873                 generation. When cache is valid, the package COUNTER
874                 files are not read and we rely on the timestamp of
875                 the package directory to validate cache. The stat
876                 calls should only take a short time, so performance
877                 is sufficient without having to rely on a potentially
878                 corrupt global counter file.
879
880                 The global counter file located at
881                 $CACHE_PATH/counter serves to record the
882                 counter of the last installed package and
883                 it also corresponds to the total number of
884                 installation actions that have occurred in
885                 the history of this package database.
886
887                 @param myroot: ignored, self._eroot is used instead
888                 """
889                 del myroot
890                 counter = -1
891                 try:
892                         cfile = io.open(
893                                 _unicode_encode(self._counter_path,
894                                 encoding=_encodings['fs'], errors='strict'),
895                                 mode='r', encoding=_encodings['repo.content'],
896                                 errors='replace')
897                 except EnvironmentError as e:
898                         # Silently allow ENOENT since files under
899                         # /var/cache/ are allowed to disappear.
900                         if e.errno != errno.ENOENT:
901                                 writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
902                                         self._counter_path, noiselevel=-1)
903                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
904                         del e
905                 else:
906                         try:
907                                 try:
908                                         counter = long(cfile.readline().strip())
909                                 finally:
910                                         cfile.close()
911                         except (OverflowError, ValueError) as e:
912                                 writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
913                                         self._counter_path, noiselevel=-1)
914                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
915                                 del e
916
917                 if self._cached_counter == counter:
918                         max_counter = counter
919                 else:
920                         # We must ensure that we return a counter
921                         # value that is at least as large as the
922                         # highest one from the installed packages,
923                         # since having a corrupt value that is too low
924                         # can trigger incorrect AUTOCLEAN behavior due
925                         # to newly installed packages having lower
926                         # COUNTERs than the previous version in the
927                         # same slot.
928                         max_counter = counter
929                         for cpv in self.cpv_all():
930                                 try:
931                                         pkg_counter = int(self.aux_get(cpv, ["COUNTER"])[0])
932                                 except (KeyError, OverflowError, ValueError):
933                                         continue
934                                 if pkg_counter > max_counter:
935                                         max_counter = pkg_counter
936
937                 return max_counter + 1
938
939         def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
940                 """
941                 This method will grab the next COUNTER value and record it back
942                 to the global file. Note that every package install must have
943                 a unique counter, since a slotmove update can move two packages
944                 into the same SLOT and in that case it's important that both
945                 packages have different COUNTER metadata.
946
947                 @param myroot: ignored, self._eroot is used instead
948                 @param mycpv: ignored
949                 @rtype: int
950                 @return: new counter value
951                 """
952                 myroot = None
953                 mycpv = None
954                 self.lock()
955                 try:
956                         counter = self.get_counter_tick_core() - 1
957                         if incrementing:
958                                 #increment counter
959                                 counter += 1
960                                 # update new global counter file
961                                 try:
962                                         write_atomic(self._counter_path, str(counter))
963                                 except InvalidLocation:
964                                         self.settings._init_dirs()
965                                         write_atomic(self._counter_path, str(counter))
966                         self._cached_counter = counter
967
968                         # Since we hold a lock, this is a good opportunity
969                         # to flush the cache. Note that this will only
970                         # flush the cache periodically in the main process
971                         # when _aux_cache_threshold is exceeded.
972                         self.flush_cache()
973                 finally:
974                         self.unlock()
975
976                 return counter
977
978         def _dblink(self, cpv):
979                 category, pf = catsplit(cpv)
980                 return dblink(category, pf, settings=self.settings,
981                         vartree=self.vartree, treetype="vartree")
982
983         def removeFromContents(self, pkg, paths, relative_paths=True):
984                 """
985                 @param pkg: cpv for an installed package
986                 @type pkg: string
987                 @param paths: paths of files to remove from contents
988                 @type paths: iterable
989                 """
990                 if not hasattr(pkg, "getcontents"):
991                         pkg = self._dblink(pkg)
992                 root = self.settings['ROOT']
993                 root_len = len(root) - 1
994                 new_contents = pkg.getcontents().copy()
995                 removed = 0
996
997                 for filename in paths:
998                         filename = _unicode_decode(filename,
999                                 encoding=_encodings['content'], errors='strict')
1000                         filename = normalize_path(filename)
1001                         if relative_paths:
1002                                 relative_filename = filename
1003                         else:
1004                                 relative_filename = filename[root_len:]
1005                         contents_key = pkg._match_contents(relative_filename)
1006                         if contents_key:
1007                                 del new_contents[contents_key]
1008                                 removed += 1
1009
1010                 if removed:
1011                         self._bump_mtime(pkg.mycpv)
1012                         f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
1013                         write_contents(new_contents, root, f)
1014                         f.close()
1015                         self._bump_mtime(pkg.mycpv)
1016                         pkg._clear_contents_cache()
1017
1018         class _owners_cache(object):
1019                 """
1020                 This class maintains an hash table that serves to index package
1021                 contents by mapping the basename of file to a list of possible
1022                 packages that own it. This is used to optimize owner lookups
1023                 by narrowing the search down to a smaller number of packages.
1024                 """
1025                 try:
1026                         from hashlib import md5 as _new_hash
1027                 except ImportError:
1028                         from md5 import new as _new_hash
1029
1030                 _hash_bits = 16
1031                 _hex_chars = int(_hash_bits / 4)
1032
1033                 def __init__(self, vardb):
1034                         self._vardb = vardb
1035
1036                 def add(self, cpv):
1037                         eroot_len = len(self._vardb._eroot)
1038                         contents = self._vardb._dblink(cpv).getcontents()
1039                         pkg_hash = self._hash_pkg(cpv)
1040                         if not contents:
1041                                 # Empty path is a code used to represent empty contents.
1042                                 self._add_path("", pkg_hash)
1043
1044                         for x in contents:
1045                                 self._add_path(x[eroot_len:], pkg_hash)
1046
1047                         self._vardb._aux_cache["modified"].add(cpv)
1048
1049                 def _add_path(self, path, pkg_hash):
1050                         """
1051                         Empty path is a code that represents empty contents.
1052                         """
1053                         if path:
1054                                 name = os.path.basename(path.rstrip(os.path.sep))
1055                                 if not name:
1056                                         return
1057                         else:
1058                                 name = path
1059                         name_hash = self._hash_str(name)
1060                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1061                         pkgs = base_names.get(name_hash)
1062                         if pkgs is None:
1063                                 pkgs = {}
1064                                 base_names[name_hash] = pkgs
1065                         pkgs[pkg_hash] = None
1066
1067                 def _hash_str(self, s):
1068                         h = self._new_hash()
1069                         # Always use a constant utf_8 encoding here, since
1070                         # the "default" encoding can change.
1071                         h.update(_unicode_encode(s,
1072                                 encoding=_encodings['repo.content'],
1073                                 errors='backslashreplace'))
1074                         h = h.hexdigest()
1075                         h = h[-self._hex_chars:]
1076                         h = int(h, 16)
1077                         return h
1078
1079                 def _hash_pkg(self, cpv):
1080                         counter, mtime = self._vardb.aux_get(
1081                                 cpv, ["COUNTER", "_mtime_"])
1082                         try:
1083                                 counter = int(counter)
1084                         except ValueError:
1085                                 counter = 0
1086                         return (_unicode(cpv), counter, mtime)
1087
1088         class _owners_db(object):
1089
1090                 def __init__(self, vardb):
1091                         self._vardb = vardb
1092
1093                 def populate(self):
1094                         self._populate()
1095
1096                 def _populate(self):
1097                         owners_cache = vardbapi._owners_cache(self._vardb)
1098                         cached_hashes = set()
1099                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1100
1101                         # Take inventory of all cached package hashes.
1102                         for name, hash_values in list(base_names.items()):
1103                                 if not isinstance(hash_values, dict):
1104                                         del base_names[name]
1105                                         continue
1106                                 cached_hashes.update(hash_values)
1107
1108                         # Create sets of valid package hashes and uncached packages.
1109                         uncached_pkgs = set()
1110                         hash_pkg = owners_cache._hash_pkg
1111                         valid_pkg_hashes = set()
1112                         for cpv in self._vardb.cpv_all():
1113                                 hash_value = hash_pkg(cpv)
1114                                 valid_pkg_hashes.add(hash_value)
1115                                 if hash_value not in cached_hashes:
1116                                         uncached_pkgs.add(cpv)
1117
1118                         # Cache any missing packages.
1119                         for cpv in uncached_pkgs:
1120                                 owners_cache.add(cpv)
1121
1122                         # Delete any stale cache.
1123                         stale_hashes = cached_hashes.difference(valid_pkg_hashes)
1124                         if stale_hashes:
1125                                 for base_name_hash, bucket in list(base_names.items()):
1126                                         for hash_value in stale_hashes.intersection(bucket):
1127                                                 del bucket[hash_value]
1128                                         if not bucket:
1129                                                 del base_names[base_name_hash]
1130
1131                         return owners_cache
1132
1133                 def get_owners(self, path_iter):
1134                         """
1135                         @return the owners as a dblink -> set(files) mapping.
1136                         """
1137                         owners = {}
1138                         for owner, f in self.iter_owners(path_iter):
1139                                 owned_files = owners.get(owner)
1140                                 if owned_files is None:
1141                                         owned_files = set()
1142                                         owners[owner] = owned_files
1143                                 owned_files.add(f)
1144                         return owners
1145
1146                 def getFileOwnerMap(self, path_iter):
1147                         owners = self.get_owners(path_iter)
1148                         file_owners = {}
1149                         for pkg_dblink, files in owners.items():
1150                                 for f in files:
1151                                         owner_set = file_owners.get(f)
1152                                         if owner_set is None:
1153                                                 owner_set = set()
1154                                                 file_owners[f] = owner_set
1155                                         owner_set.add(pkg_dblink)
1156                         return file_owners
1157
1158                 def iter_owners(self, path_iter):
1159                         """
1160                         Iterate over tuples of (dblink, path). In order to avoid
1161                         consuming too many resources for too much time, resources
1162                         are only allocated for the duration of a given iter_owners()
1163                         call. Therefore, to maximize reuse of resources when searching
1164                         for multiple files, it's best to search for them all in a single
1165                         call.
1166                         """
1167
1168                         if not isinstance(path_iter, list):
1169                                 path_iter = list(path_iter)
1170                         owners_cache = self._populate()
1171                         vardb = self._vardb
1172                         root = vardb._eroot
1173                         hash_pkg = owners_cache._hash_pkg
1174                         hash_str = owners_cache._hash_str
1175                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1176
1177                         dblink_cache = {}
1178
1179                         def dblink(cpv):
1180                                 x = dblink_cache.get(cpv)
1181                                 if x is None:
1182                                         if len(dblink_cache) > 20:
1183                                                 # Ensure that we don't run out of memory.
1184                                                 raise StopIteration()
1185                                         x = self._vardb._dblink(cpv)
1186                                         dblink_cache[cpv] = x
1187                                 return x
1188
1189                         while path_iter:
1190
1191                                 path = path_iter.pop()
1192                                 is_basename = os.sep != path[:1]
1193                                 if is_basename:
1194                                         name = path
1195                                 else:
1196                                         name = os.path.basename(path.rstrip(os.path.sep))
1197
1198                                 if not name:
1199                                         continue
1200
1201                                 name_hash = hash_str(name)
1202                                 pkgs = base_names.get(name_hash)
1203                                 owners = []
1204                                 if pkgs is not None:
1205                                         try:
1206                                                 for hash_value in pkgs:
1207                                                         if not isinstance(hash_value, tuple) or \
1208                                                                 len(hash_value) != 3:
1209                                                                 continue
1210                                                         cpv, counter, mtime = hash_value
1211                                                         if not isinstance(cpv, basestring):
1212                                                                 continue
1213                                                         try:
1214                                                                 current_hash = hash_pkg(cpv)
1215                                                         except KeyError:
1216                                                                 continue
1217
1218                                                         if current_hash != hash_value:
1219                                                                 continue
1220
1221                                                         if is_basename:
1222                                                                 for p in dblink(cpv).getcontents():
1223                                                                         if os.path.basename(p) == name:
1224                                                                                 owners.append((cpv, p[len(root):]))
1225                                                         else:
1226                                                                 if dblink(cpv).isowner(path):
1227                                                                         owners.append((cpv, path))
1228
1229                                         except StopIteration:
1230                                                 path_iter.append(path)
1231                                                 del owners[:]
1232                                                 dblink_cache.clear()
1233                                                 gc.collect()
1234                                                 for x in self._iter_owners_low_mem(path_iter):
1235                                                         yield x
1236                                                 return
1237                                         else:
1238                                                 for cpv, p in owners:
1239                                                         yield (dblink(cpv), p)
1240
1241                 def _iter_owners_low_mem(self, path_list):
1242                         """
1243                         This implemention will make a short-lived dblink instance (and
1244                         parse CONTENTS) for every single installed package. This is
1245                         slower and but uses less memory than the method which uses the
1246                         basename cache.
1247                         """
1248
1249                         if not path_list:
1250                                 return
1251
1252                         path_info_list = []
1253                         for path in path_list:
1254                                 is_basename = os.sep != path[:1]
1255                                 if is_basename:
1256                                         name = path
1257                                 else:
1258                                         name = os.path.basename(path.rstrip(os.path.sep))
1259                                 path_info_list.append((path, name, is_basename))
1260
1261                         root = self._vardb._eroot
1262                         for cpv in self._vardb.cpv_all():
1263                                 dblnk =  self._vardb._dblink(cpv)
1264
1265                                 for path, name, is_basename in path_info_list:
1266                                         if is_basename:
1267                                                 for p in dblnk.getcontents():
1268                                                         if os.path.basename(p) == name:
1269                                                                 yield dblnk, p[len(root):]
1270                                         else:
1271                                                 if dblnk.isowner(path):
1272                                                         yield dblnk, path
1273
1274 class vartree(object):
1275         "this tree will scan a var/db/pkg database located at root (passed to init)"
1276         def __init__(self, root=None, virtual=DeprecationWarning, categories=None,
1277                 settings=None):
1278
1279                 if settings is None:
1280                         settings = portage.settings
1281
1282                 if root is not None and root != settings['ROOT']:
1283                         warnings.warn("The 'root' parameter of the "
1284                                 "portage.dbapi.vartree.vartree"
1285                                 " constructor is now unused. Use "
1286                                 "settings['ROOT'] instead.",
1287                                 DeprecationWarning, stacklevel=2)
1288
1289                 if virtual is not DeprecationWarning:
1290                         warnings.warn("The 'virtual' parameter of the "
1291                                 "portage.dbapi.vartree.vartree"
1292                                 " constructor is unused",
1293                                 DeprecationWarning, stacklevel=2)
1294
1295                 self.settings = settings
1296                 self.dbapi = vardbapi(settings=settings, vartree=self)
1297                 self.populated = 1
1298
1299         @property
1300         def root(self):
1301                 warnings.warn("The root attribute of "
1302                         "portage.dbapi.vartree.vartree"
1303                         " is deprecated. Use "
1304                         "settings['ROOT'] instead.",
1305                         DeprecationWarning, stacklevel=3)
1306                 return self.settings['ROOT']
1307
1308         def getpath(self, mykey, filename=None):
1309                 return self.dbapi.getpath(mykey, filename=filename)
1310
1311         def zap(self, mycpv):
1312                 return
1313
1314         def inject(self, mycpv):
1315                 return
1316
1317         def get_provide(self, mycpv):
1318                 myprovides = []
1319                 mylines = None
1320                 try:
1321                         mylines, myuse = self.dbapi.aux_get(mycpv, ["PROVIDE", "USE"])
1322                         if mylines:
1323                                 myuse = myuse.split()
1324                                 mylines = use_reduce(mylines, uselist=myuse, flat=True)
1325                                 for myprovide in mylines:
1326                                         mys = catpkgsplit(myprovide)
1327                                         if not mys:
1328                                                 mys = myprovide.split("/")
1329                                         myprovides += [mys[0] + "/" + mys[1]]
1330                         return myprovides
1331                 except SystemExit as e:
1332                         raise
1333                 except Exception as e:
1334                         mydir = self.dbapi.getpath(mycpv)
1335                         writemsg(_("\nParse Error reading PROVIDE and USE in '%s'\n") % mydir,
1336                                 noiselevel=-1)
1337                         if mylines:
1338                                 writemsg(_("Possibly Invalid: '%s'\n") % str(mylines),
1339                                         noiselevel=-1)
1340                         writemsg(_("Exception: %s\n\n") % str(e), noiselevel=-1)
1341                         return []
1342
1343         def get_all_provides(self):
1344                 myprovides = {}
1345                 for node in self.getallcpv():
1346                         for mykey in self.get_provide(node):
1347                                 if mykey in myprovides:
1348                                         myprovides[mykey] += [node]
1349                                 else:
1350                                         myprovides[mykey] = [node]
1351                 return myprovides
1352
1353         def dep_bestmatch(self, mydep, use_cache=1):
1354                 "compatibility method -- all matches, not just visible ones"
1355                 #mymatch=best(match(dep_expand(mydep,self.dbapi),self.dbapi))
1356                 mymatch = best(self.dbapi.match(
1357                         dep_expand(mydep, mydb=self.dbapi, settings=self.settings),
1358                         use_cache=use_cache))
1359                 if mymatch is None:
1360                         return ""
1361                 else:
1362                         return mymatch
1363
1364         def dep_match(self, mydep, use_cache=1):
1365                 "compatibility method -- we want to see all matches, not just visible ones"
1366                 #mymatch = match(mydep,self.dbapi)
1367                 mymatch = self.dbapi.match(mydep, use_cache=use_cache)
1368                 if mymatch is None:
1369                         return []
1370                 else:
1371                         return mymatch
1372
1373         def exists_specific(self, cpv):
1374                 return self.dbapi.cpv_exists(cpv)
1375
1376         def getallcpv(self):
1377                 """temporary function, probably to be renamed --- Gets a list of all
1378                 category/package-versions installed on the system."""
1379                 return self.dbapi.cpv_all()
1380
1381         def getallnodes(self):
1382                 """new behavior: these are all *unmasked* nodes.  There may or may not be available
1383                 masked package for nodes in this nodes list."""
1384                 return self.dbapi.cp_all()
1385
1386         def getebuildpath(self, fullpackage):
1387                 cat, package = catsplit(fullpackage)
1388                 return self.getpath(fullpackage, filename=package+".ebuild")
1389
1390         def getslot(self, mycatpkg):
1391                 "Get a slot for a catpkg; assume it exists."
1392                 try:
1393                         return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1394                 except KeyError:
1395                         return ""
1396
1397         def populate(self):
1398                 self.populated=1
1399
1400 class dblink(object):
1401         """
1402         This class provides an interface to the installed package database
1403         At present this is implemented as a text backend in /var/db/pkg.
1404         """
1405
1406         import re
1407         _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
1408
1409         _contents_re = re.compile(r'^(' + \
1410                 r'(?P<dir>(dev|dir|fif) (.+))|' + \
1411                 r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
1412                 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \
1413                 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))' + \
1414                 r')$'
1415         )
1416
1417         # These files are generated by emerge, so we need to remove
1418         # them when they are the only thing left in a directory.
1419         _infodir_cleanup = frozenset(["dir", "dir.old"])
1420
1421         _ignored_unlink_errnos = (
1422                 errno.EBUSY, errno.ENOENT,
1423                 errno.ENOTDIR, errno.EISDIR)
1424
1425         _ignored_rmdir_errnos = (
1426                 errno.EEXIST, errno.ENOTEMPTY,
1427                 errno.EBUSY, errno.ENOENT,
1428                 errno.ENOTDIR, errno.EISDIR,
1429                 errno.EPERM)
1430
1431         def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
1432                 vartree=None, blockers=None, scheduler=None, pipe=None):
1433                 """
1434                 Creates a DBlink object for a given CPV.
1435                 The given CPV may not be present in the database already.
1436                 
1437                 @param cat: Category
1438                 @type cat: String
1439                 @param pkg: Package (PV)
1440                 @type pkg: String
1441                 @param myroot: ignored, settings['ROOT'] is used instead
1442                 @type myroot: String (Path)
1443                 @param settings: Typically portage.settings
1444                 @type settings: portage.config
1445                 @param treetype: one of ['porttree','bintree','vartree']
1446                 @type treetype: String
1447                 @param vartree: an instance of vartree corresponding to myroot.
1448                 @type vartree: vartree
1449                 """
1450
1451                 if settings is None:
1452                         raise TypeError("settings argument is required")
1453
1454                 mysettings = settings
1455                 self._eroot = mysettings['EROOT']
1456                 self.cat = cat
1457                 self.pkg = pkg
1458                 self.mycpv = self.cat + "/" + self.pkg
1459                 if self.mycpv == settings.mycpv and \
1460                         isinstance(settings.mycpv, _pkg_str):
1461                         self.mycpv = settings.mycpv
1462                 else:
1463                         self.mycpv = _pkg_str(self.mycpv)
1464                 self.mysplit = list(self.mycpv.cpv_split[1:])
1465                 self.mysplit[0] = self.mycpv.cp
1466                 self.treetype = treetype
1467                 if vartree is None:
1468                         vartree = portage.db[self._eroot]["vartree"]
1469                 self.vartree = vartree
1470                 self._blockers = blockers
1471                 self._scheduler = scheduler
1472                 self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
1473                 self.dbcatdir = self.dbroot+"/"+cat
1474                 self.dbpkgdir = self.dbcatdir+"/"+pkg
1475                 self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg
1476                 self.dbdir = self.dbpkgdir
1477                 self.settings = mysettings
1478                 self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
1479
1480                 self.myroot = self.settings['ROOT']
1481                 self._installed_instance = None
1482                 self.contentscache = None
1483                 self._contents_inodes = None
1484                 self._contents_basenames = None
1485                 self._linkmap_broken = False
1486                 self._hardlink_merge_map = {}
1487                 self._hash_key = (self._eroot, self.mycpv)
1488                 self._protect_obj = None
1489                 self._pipe = pipe
1490
1491         def __hash__(self):
1492                 return hash(self._hash_key)
1493
1494         def __eq__(self, other):
1495                 return isinstance(other, dblink) and \
1496                         self._hash_key == other._hash_key
1497
1498         def _get_protect_obj(self):
1499
1500                 if self._protect_obj is None:
1501                         self._protect_obj = ConfigProtect(self._eroot,
1502                         portage.util.shlex_split(
1503                                 self.settings.get("CONFIG_PROTECT", "")),
1504                         portage.util.shlex_split(
1505                                 self.settings.get("CONFIG_PROTECT_MASK", "")))
1506
1507                 return self._protect_obj
1508
1509         def isprotected(self, obj):
1510                 return self._get_protect_obj().isprotected(obj)
1511
1512         def updateprotect(self):
1513                 self._get_protect_obj().updateprotect()
1514
1515         def lockdb(self):
1516                 self.vartree.dbapi.lock()
1517
1518         def unlockdb(self):
1519                 self.vartree.dbapi.unlock()
1520
1521         def getpath(self):
1522                 "return path to location of db information (for >>> informational display)"
1523                 return self.dbdir
1524
1525         def exists(self):
1526                 "does the db entry exist?  boolean."
1527                 return os.path.exists(self.dbdir)
1528
1529         def delete(self):
1530                 """
1531                 Remove this entry from the database
1532                 """
1533                 if not os.path.exists(self.dbdir):
1534                         return
1535
1536                 # Check validity of self.dbdir before attempting to remove it.
1537                 if not self.dbdir.startswith(self.dbroot):
1538                         writemsg(_("portage.dblink.delete(): invalid dbdir: %s\n") % \
1539                                 self.dbdir, noiselevel=-1)
1540                         return
1541
1542                 shutil.rmtree(self.dbdir)
1543                 # If empty, remove parent category directory.
1544                 try:
1545                         os.rmdir(os.path.dirname(self.dbdir))
1546                 except OSError:
1547                         pass
1548                 self.vartree.dbapi._remove(self)
1549
1550         def clearcontents(self):
1551                 """
1552                 For a given db entry (self), erase the CONTENTS values.
1553                 """
1554                 self.lockdb()
1555                 try:
1556                         if os.path.exists(self.dbdir+"/CONTENTS"):
1557                                 os.unlink(self.dbdir+"/CONTENTS")
1558                 finally:
1559                         self.unlockdb()
1560
1561         def _clear_contents_cache(self):
1562                 self.contentscache = None
1563                 self._contents_inodes = None
1564                 self._contents_basenames = None
1565
1566         def getcontents(self):
1567                 """
1568                 Get the installed files of a given package (aka what that package installed)
1569                 """
1570                 contents_file = os.path.join(self.dbdir, "CONTENTS")
1571                 if self.contentscache is not None:
1572                         return self.contentscache
1573                 pkgfiles = {}
1574                 try:
1575                         myc = io.open(_unicode_encode(contents_file,
1576                                 encoding=_encodings['fs'], errors='strict'),
1577                                 mode='r', encoding=_encodings['repo.content'],
1578                                 errors='replace')
1579                 except EnvironmentError as e:
1580                         if e.errno != errno.ENOENT:
1581                                 raise
1582                         del e
1583                         self.contentscache = pkgfiles
1584                         return pkgfiles
1585                 mylines = myc.readlines()
1586                 myc.close()
1587                 null_byte = "\0"
1588                 normalize_needed = self._normalize_needed
1589                 contents_re = self._contents_re
1590                 obj_index = contents_re.groupindex['obj']
1591                 dir_index = contents_re.groupindex['dir']
1592                 sym_index = contents_re.groupindex['sym']
1593                 # The old symlink format may exist on systems that have packages
1594                 # which were installed many years ago (see bug #351814).
1595                 oldsym_index = contents_re.groupindex['oldsym']
1596                 # CONTENTS files already contain EPREFIX
1597                 myroot = self.settings['ROOT']
1598                 if myroot == os.path.sep:
1599                         myroot = None
1600                 # used to generate parent dir entries
1601                 dir_entry = (_unicode_decode("dir"),)
1602                 eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1603                 pos = 0
1604                 errors = []
1605                 for pos, line in enumerate(mylines):
1606                         if null_byte in line:
1607                                 # Null bytes are a common indication of corruption.
1608                                 errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1609                                 continue
1610                         line = line.rstrip("\n")
1611                         m = contents_re.match(line)
1612                         if m is None:
1613                                 errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1614                                 continue
1615
1616                         if m.group(obj_index) is not None:
1617                                 base = obj_index
1618                                 #format: type, mtime, md5sum
1619                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1620                         elif m.group(dir_index) is not None:
1621                                 base = dir_index
1622                                 #format: type
1623                                 data = (m.group(base+1),)
1624                         elif m.group(sym_index) is not None:
1625                                 base = sym_index
1626                                 if m.group(oldsym_index) is None:
1627                                         mtime = m.group(base+5)
1628                                 else:
1629                                         mtime = m.group(base+8)
1630                                 #format: type, mtime, dest
1631                                 data = (m.group(base+1), mtime, m.group(base+3))
1632                         else:
1633                                 # This won't happen as long the regular expression
1634                                 # is written to only match valid entries.
1635                                 raise AssertionError(_("required group not found " + \
1636                                         "in CONTENTS entry: '%s'") % line)
1637
1638                         path = m.group(base+2)
1639                         if normalize_needed.search(path) is not None:
1640                                 path = normalize_path(path)
1641                                 if not path.startswith(os.path.sep):
1642                                         path = os.path.sep + path
1643
1644                         if myroot is not None:
1645                                 path = os.path.join(myroot, path.lstrip(os.path.sep))
1646
1647                         # Implicitly add parent directories, since we can't necessarily
1648                         # assume that they are explicitly listed in CONTENTS, and it's
1649                         # useful for callers if they can rely on parent directory entries
1650                         # being generated here (crucial for things like dblink.isowner()).
1651                         path_split = path.split(os.sep)
1652                         path_split.pop()
1653                         while len(path_split) > eroot_split_len:
1654                                 parent = os.sep.join(path_split)
1655                                 if parent in pkgfiles:
1656                                         break
1657                                 pkgfiles[parent] = dir_entry
1658                                 path_split.pop()
1659
1660                         pkgfiles[path] = data
1661
1662                 if errors:
1663                         writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1664                         for pos, e in errors:
1665                                 writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1666                 self.contentscache = pkgfiles
1667                 return pkgfiles
1668
1669         def _prune_plib_registry(self, unmerge=False,
1670                 needed=None, preserve_paths=None):
1671                 # remove preserved libraries that don't have any consumers left
1672                 if not (self._linkmap_broken or
1673                         self.vartree.dbapi._linkmap is None or
1674                         self.vartree.dbapi._plib_registry is None):
1675                         self.vartree.dbapi._fs_lock()
1676                         plib_registry = self.vartree.dbapi._plib_registry
1677                         plib_registry.lock()
1678                         try:
1679                                 plib_registry.load()
1680
1681                                 unmerge_with_replacement = \
1682                                         unmerge and preserve_paths is not None
1683                                 if unmerge_with_replacement:
1684                                         # If self.mycpv is about to be unmerged and we
1685                                         # have a replacement package, we want to exclude
1686                                         # the irrelevant NEEDED data that belongs to
1687                                         # files which are being unmerged now.
1688                                         exclude_pkgs = (self.mycpv,)
1689                                 else:
1690                                         exclude_pkgs = None
1691
1692                                 self._linkmap_rebuild(exclude_pkgs=exclude_pkgs,
1693                                         include_file=needed, preserve_paths=preserve_paths)
1694
1695                                 if unmerge:
1696                                         unmerge_preserve = None
1697                                         if not unmerge_with_replacement:
1698                                                 unmerge_preserve = \
1699                                                         self._find_libs_to_preserve(unmerge=True)
1700                                         counter = self.vartree.dbapi.cpv_counter(self.mycpv)
1701                                         plib_registry.unregister(self.mycpv,
1702                                                 self.settings["SLOT"], counter)
1703                                         if unmerge_preserve:
1704                                                 for path in sorted(unmerge_preserve):
1705                                                         contents_key = self._match_contents(path)
1706                                                         if not contents_key:
1707                                                                 continue
1708                                                         obj_type = self.getcontents()[contents_key][0]
1709                                                         self._display_merge(_(">>> needed   %s %s\n") % \
1710                                                                 (obj_type, contents_key), noiselevel=-1)
1711                                                 plib_registry.register(self.mycpv,
1712                                                         self.settings["SLOT"], counter, unmerge_preserve)
1713                                                 # Remove the preserved files from our contents
1714                                                 # so that they won't be unmerged.
1715                                                 self.vartree.dbapi.removeFromContents(self,
1716                                                         unmerge_preserve)
1717
1718                                 unmerge_no_replacement = \
1719                                         unmerge and not unmerge_with_replacement
1720                                 cpv_lib_map = self._find_unused_preserved_libs(
1721                                         unmerge_no_replacement)
1722                                 if cpv_lib_map:
1723                                         self._remove_preserved_libs(cpv_lib_map)
1724                                         self.vartree.dbapi.lock()
1725                                         try:
1726                                                 for cpv, removed in cpv_lib_map.items():
1727                                                         if not self.vartree.dbapi.cpv_exists(cpv):
1728                                                                 continue
1729                                                         self.vartree.dbapi.removeFromContents(cpv, removed)
1730                                         finally:
1731                                                 self.vartree.dbapi.unlock()
1732
1733                                 plib_registry.store()
1734                         finally:
1735                                 plib_registry.unlock()
1736                                 self.vartree.dbapi._fs_unlock()
1737
1738         def unmerge(self, pkgfiles=None, trimworld=None, cleanup=True,
1739                 ldpath_mtimes=None, others_in_slot=None, needed=None,
1740                 preserve_paths=None):
1741                 """
1742                 Calls prerm
1743                 Unmerges a given package (CPV)
1744                 calls postrm
1745                 calls cleanrm
1746                 calls env_update
1747                 
1748                 @param pkgfiles: files to unmerge (generally self.getcontents() )
1749                 @type pkgfiles: Dictionary
1750                 @param trimworld: Unused
1751                 @type trimworld: Boolean
1752                 @param cleanup: cleanup to pass to doebuild (see doebuild)
1753                 @type cleanup: Boolean
1754                 @param ldpath_mtimes: mtimes to pass to env_update (see env_update)
1755                 @type ldpath_mtimes: Dictionary
1756                 @param others_in_slot: all dblink instances in this slot, excluding self
1757                 @type others_in_slot: list
1758                 @param needed: Filename containing libraries needed after unmerge.
1759                 @type needed: String
1760                 @param preserve_paths: Libraries preserved by a package instance that
1761                         is currently being merged. They need to be explicitly passed to the
1762                         LinkageMap, since they are not registered in the
1763                         PreservedLibsRegistry yet.
1764                 @type preserve_paths: set
1765                 @rtype: Integer
1766                 @return:
1767                 1. os.EX_OK if everything went well.
1768                 2. return code of the failed phase (for prerm, postrm, cleanrm)
1769                 """
1770
1771                 if trimworld is not None:
1772                         warnings.warn("The trimworld parameter of the " + \
1773                                 "portage.dbapi.vartree.dblink.unmerge()" + \
1774                                 " method is now unused.",
1775                                 DeprecationWarning, stacklevel=2)
1776
1777                 background = False
1778                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1779                 if self._scheduler is None:
1780                         # We create a scheduler instance and use it to
1781                         # log unmerge output separately from merge output.
1782                         self._scheduler = PollScheduler().sched_iface
1783                 if self.settings.get("PORTAGE_BACKGROUND") == "subprocess":
1784                         if self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "1":
1785                                 self.settings["PORTAGE_BACKGROUND"] = "1"
1786                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1787                                 background = True
1788                         elif self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "0":
1789                                 self.settings["PORTAGE_BACKGROUND"] = "0"
1790                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1791                 elif self.settings.get("PORTAGE_BACKGROUND") == "1":
1792                         background = True
1793
1794                 self.vartree.dbapi._bump_mtime(self.mycpv)
1795                 showMessage = self._display_merge
1796                 if self.vartree.dbapi._categories is not None:
1797                         self.vartree.dbapi._categories = None
1798
1799                 # When others_in_slot is not None, the backup has already been
1800                 # handled by the caller.
1801                 caller_handles_backup = others_in_slot is not None
1802
1803                 # When others_in_slot is supplied, the security check has already been
1804                 # done for this slot, so it shouldn't be repeated until the next
1805                 # replacement or unmerge operation.
1806                 if others_in_slot is None:
1807                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1808                         slot_matches = self.vartree.dbapi.match(
1809                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1810                         others_in_slot = []
1811                         for cur_cpv in slot_matches:
1812                                 if cur_cpv == self.mycpv:
1813                                         continue
1814                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1815                                         settings=self.settings, vartree=self.vartree,
1816                                         treetype="vartree", pipe=self._pipe))
1817
1818                         retval = self._security_check([self] + others_in_slot)
1819                         if retval:
1820                                 return retval
1821
1822                 contents = self.getcontents()
1823                 # Now, don't assume that the name of the ebuild is the same as the
1824                 # name of the dir; the package may have been moved.
1825                 myebuildpath = os.path.join(self.dbdir, self.pkg + ".ebuild")
1826                 failures = 0
1827                 ebuild_phase = "prerm"
1828                 mystuff = os.listdir(self.dbdir)
1829                 for x in mystuff:
1830                         if x.endswith(".ebuild"):
1831                                 if x[:-7] != self.pkg:
1832                                         # Clean up after vardbapi.move_ent() breakage in
1833                                         # portage versions before 2.1.2
1834                                         os.rename(os.path.join(self.dbdir, x), myebuildpath)
1835                                         write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n")
1836                                 break
1837
1838                 if self.mycpv != self.settings.mycpv or \
1839                         "EAPI" not in self.settings.configdict["pkg"]:
1840                         # We avoid a redundant setcpv call here when
1841                         # the caller has already taken care of it.
1842                         self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi)
1843
1844                 eapi_unsupported = False
1845                 try:
1846                         doebuild_environment(myebuildpath, "prerm",
1847                                 settings=self.settings, db=self.vartree.dbapi)
1848                 except UnsupportedAPIException as e:
1849                         eapi_unsupported = e
1850
1851                 builddir_lock = None
1852                 scheduler = self._scheduler
1853                 retval = os.EX_OK
1854                 try:
1855                         # Only create builddir_lock if the caller
1856                         # has not already acquired the lock.
1857                         if "PORTAGE_BUILDIR_LOCKED" not in self.settings:
1858                                 builddir_lock = EbuildBuildDir(
1859                                         scheduler=scheduler,
1860                                         settings=self.settings)
1861                                 builddir_lock.lock()
1862                                 prepare_build_dirs(settings=self.settings, cleanup=True)
1863                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1864
1865                         # Do this before the following _prune_plib_registry call, since
1866                         # that removes preserved libraries from our CONTENTS, and we
1867                         # may want to backup those libraries first.
1868                         if not caller_handles_backup:
1869                                 retval = self._pre_unmerge_backup(background)
1870                                 if retval != os.EX_OK:
1871                                         showMessage(_("!!! FAILED prerm: quickpkg: %s\n") % retval,
1872                                                 level=logging.ERROR, noiselevel=-1)
1873                                         return retval
1874
1875                         self._prune_plib_registry(unmerge=True, needed=needed,
1876                                 preserve_paths=preserve_paths)
1877
1878                         # Log the error after PORTAGE_LOG_FILE is initialized
1879                         # by prepare_build_dirs above.
1880                         if eapi_unsupported:
1881                                 # Sometimes this happens due to corruption of the EAPI file.
1882                                 failures += 1
1883                                 showMessage(_("!!! FAILED prerm: %s\n") % \
1884                                         os.path.join(self.dbdir, "EAPI"),
1885                                         level=logging.ERROR, noiselevel=-1)
1886                                 showMessage(_unicode_decode("%s\n") % (eapi_unsupported,),
1887                                         level=logging.ERROR, noiselevel=-1)
1888                         elif os.path.isfile(myebuildpath):
1889                                 phase = EbuildPhase(background=background,
1890                                         phase=ebuild_phase, scheduler=scheduler,
1891                                         settings=self.settings)
1892                                 phase.start()
1893                                 retval = phase.wait()
1894
1895                                 # XXX: Decide how to handle failures here.
1896                                 if retval != os.EX_OK:
1897                                         failures += 1
1898                                         showMessage(_("!!! FAILED prerm: %s\n") % retval,
1899                                                 level=logging.ERROR, noiselevel=-1)
1900
1901                         self.vartree.dbapi._fs_lock()
1902                         try:
1903                                 self._unmerge_pkgfiles(pkgfiles, others_in_slot)
1904                         finally:
1905                                 self.vartree.dbapi._fs_unlock()
1906                         self._clear_contents_cache()
1907
1908                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1909                                 ebuild_phase = "postrm"
1910                                 phase = EbuildPhase(background=background,
1911                                         phase=ebuild_phase, scheduler=scheduler,
1912                                         settings=self.settings)
1913                                 phase.start()
1914                                 retval = phase.wait()
1915
1916                                 # XXX: Decide how to handle failures here.
1917                                 if retval != os.EX_OK:
1918                                         failures += 1
1919                                         showMessage(_("!!! FAILED postrm: %s\n") % retval,
1920                                                 level=logging.ERROR, noiselevel=-1)
1921
1922                 finally:
1923                         self.vartree.dbapi._bump_mtime(self.mycpv)
1924                         try:
1925                                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1926                                                 if retval != os.EX_OK:
1927                                                         msg_lines = []
1928                                                         msg = _("The '%(ebuild_phase)s' "
1929                                                         "phase of the '%(cpv)s' package "
1930                                                         "has failed with exit value %(retval)s.") % \
1931                                                         {"ebuild_phase":ebuild_phase, "cpv":self.mycpv,
1932                                                         "retval":retval}
1933                                                         from textwrap import wrap
1934                                                         msg_lines.extend(wrap(msg, 72))
1935                                                         msg_lines.append("")
1936
1937                                                         ebuild_name = os.path.basename(myebuildpath)
1938                                                         ebuild_dir = os.path.dirname(myebuildpath)
1939                                                         msg = _("The problem occurred while executing "
1940                                                         "the ebuild file named '%(ebuild_name)s' "
1941                                                         "located in the '%(ebuild_dir)s' directory. "
1942                                                         "If necessary, manually remove "
1943                                                         "the environment.bz2 file and/or the "
1944                                                         "ebuild file located in that directory.") % \
1945                                                         {"ebuild_name":ebuild_name, "ebuild_dir":ebuild_dir}
1946                                                         msg_lines.extend(wrap(msg, 72))
1947                                                         msg_lines.append("")
1948
1949                                                         msg = _("Removal "
1950                                                         "of the environment.bz2 file is "
1951                                                         "preferred since it may allow the "
1952                                                         "removal phases to execute successfully. "
1953                                                         "The ebuild will be "
1954                                                         "sourced and the eclasses "
1955                                                         "from the current portage tree will be used "
1956                                                         "when necessary. Removal of "
1957                                                         "the ebuild file will cause the "
1958                                                         "pkg_prerm() and pkg_postrm() removal "
1959                                                         "phases to be skipped entirely.")
1960                                                         msg_lines.extend(wrap(msg, 72))
1961
1962                                                         self._eerror(ebuild_phase, msg_lines)
1963
1964                                         self._elog_process(phasefilter=("prerm", "postrm"))
1965
1966                                         if retval == os.EX_OK:
1967                                                 try:
1968                                                         doebuild_environment(myebuildpath, "cleanrm",
1969                                                                 settings=self.settings, db=self.vartree.dbapi)
1970                                                 except UnsupportedAPIException:
1971                                                         pass
1972                                                 phase = EbuildPhase(background=background,
1973                                                         phase="cleanrm", scheduler=scheduler,
1974                                                         settings=self.settings)
1975                                                 phase.start()
1976                                                 retval = phase.wait()
1977                         finally:
1978                                         if builddir_lock is not None:
1979                                                 builddir_lock.unlock()
1980
1981                 if log_path is not None:
1982
1983                         if not failures and 'unmerge-logs' not in self.settings.features:
1984                                 try:
1985                                         os.unlink(log_path)
1986                                 except OSError:
1987                                         pass
1988
1989                         try:
1990                                 st = os.stat(log_path)
1991                         except OSError:
1992                                 pass
1993                         else:
1994                                 if st.st_size == 0:
1995                                         try:
1996                                                 os.unlink(log_path)
1997                                         except OSError:
1998                                                 pass
1999
2000                 if log_path is not None and os.path.exists(log_path):
2001                         # Restore this since it gets lost somewhere above and it
2002                         # needs to be set for _display_merge() to be able to log.
2003                         # Note that the log isn't necessarily supposed to exist
2004                         # since if PORT_LOGDIR is unset then it's a temp file
2005                         # so it gets cleaned above.
2006                         self.settings["PORTAGE_LOG_FILE"] = log_path
2007                 else:
2008                         self.settings.pop("PORTAGE_LOG_FILE", None)
2009
2010                 env_update(target_root=self.settings['ROOT'],
2011                         prev_mtimes=ldpath_mtimes,
2012                         contents=contents, env=self.settings,
2013                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
2014
2015                 unmerge_with_replacement = preserve_paths is not None
2016                 if not unmerge_with_replacement:
2017                         # When there's a replacement package which calls us via treewalk,
2018                         # treewalk will automatically call _prune_plib_registry for us.
2019                         # Otherwise, we need to call _prune_plib_registry ourselves.
2020                         # Don't pass in the "unmerge=True" flag here, since that flag
2021                         # is intended to be used _prior_ to unmerge, not after.
2022                         self._prune_plib_registry()
2023
2024                 return os.EX_OK
2025
2026         def _display_merge(self, msg, level=0, noiselevel=0):
2027                 if not self._verbose and noiselevel >= 0 and level < logging.WARN:
2028                         return
2029                 if self._scheduler is None:
2030                         writemsg_level(msg, level=level, noiselevel=noiselevel)
2031                 else:
2032                         log_path = None
2033                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
2034                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
2035                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
2036
2037                         if background and log_path is None:
2038                                 if level >= logging.WARN:
2039                                         writemsg_level(msg, level=level, noiselevel=noiselevel)
2040                         else:
2041                                 self._scheduler.output(msg,
2042                                         log_path=log_path, background=background,
2043                                         level=level, noiselevel=noiselevel)
2044
2045         def _show_unmerge(self, zing, desc, file_type, file_name):
2046                 self._display_merge("%s %s %s %s\n" % \
2047                         (zing, desc.ljust(8), file_type, file_name))
2048
2049         def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2050                 """
2051                 
2052                 Unmerges the contents of a package from the liveFS
2053                 Removes the VDB entry for self
2054                 
2055                 @param pkgfiles: typically self.getcontents()
2056                 @type pkgfiles: Dictionary { filename: [ 'type', '?', 'md5sum' ] }
2057                 @param others_in_slot: all dblink instances in this slot, excluding self
2058                 @type others_in_slot: list
2059                 @rtype: None
2060                 """
2061
2062                 os = _os_merge
2063                 perf_md5 = perform_md5
2064                 showMessage = self._display_merge
2065                 show_unmerge = self._show_unmerge
2066                 ignored_unlink_errnos = self._ignored_unlink_errnos
2067                 ignored_rmdir_errnos = self._ignored_rmdir_errnos
2068
2069                 if not pkgfiles:
2070                         showMessage(_("No package files given... Grabbing a set.\n"))
2071                         pkgfiles = self.getcontents()
2072
2073                 if others_in_slot is None:
2074                         others_in_slot = []
2075                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
2076                         slot_matches = self.vartree.dbapi.match(
2077                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
2078                         for cur_cpv in slot_matches:
2079                                 if cur_cpv == self.mycpv:
2080                                         continue
2081                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
2082                                         settings=self.settings,
2083                                         vartree=self.vartree, treetype="vartree", pipe=self._pipe))
2084
2085                 cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
2086                 stale_confmem = []
2087                 protected_symlinks = {}
2088
2089                 unmerge_orphans = "unmerge-orphans" in self.settings.features
2090                 calc_prelink = "prelink-checksums" in self.settings.features
2091
2092                 if pkgfiles:
2093                         self.updateprotect()
2094                         mykeys = list(pkgfiles)
2095                         mykeys.sort()
2096                         mykeys.reverse()
2097
2098                         #process symlinks second-to-last, directories last.
2099                         mydirs = set()
2100
2101                         uninstall_ignore = portage.util.shlex_split(
2102                                 self.settings.get("UNINSTALL_IGNORE", ""))
2103
2104                         def unlink(file_name, lstatobj):
2105                                 if bsd_chflags:
2106                                         if lstatobj.st_flags != 0:
2107                                                 bsd_chflags.lchflags(file_name, 0)
2108                                         parent_name = os.path.dirname(file_name)
2109                                         # Use normal stat/chflags for the parent since we want to
2110                                         # follow any symlinks to the real parent directory.
2111                                         pflags = os.stat(parent_name).st_flags
2112                                         if pflags != 0:
2113                                                 bsd_chflags.chflags(parent_name, 0)
2114                                 try:
2115                                         if not stat.S_ISLNK(lstatobj.st_mode):
2116                                                 # Remove permissions to ensure that any hardlinks to
2117                                                 # suid/sgid files are rendered harmless.
2118                                                 os.chmod(file_name, 0)
2119                                         os.unlink(file_name)
2120                                 except OSError as ose:
2121                                         # If the chmod or unlink fails, you are in trouble.
2122                                         # With Prefix this can be because the file is owned
2123                                         # by someone else (a screwup by root?), on a normal
2124                                         # system maybe filesystem corruption.  In any case,
2125                                         # if we backtrace and die here, we leave the system
2126                                         # in a totally undefined state, hence we just bleed
2127                                         # like hell and continue to hopefully finish all our
2128                                         # administrative and pkg_postinst stuff.
2129                                         self._eerror("postrm", 
2130                                                 ["Could not chmod or unlink '%s': %s" % \
2131                                                 (file_name, ose)])
2132                                 finally:
2133                                         if bsd_chflags and pflags != 0:
2134                                                 # Restore the parent flags we saved before unlinking
2135                                                 bsd_chflags.chflags(parent_name, pflags)
2136
2137                         unmerge_desc = {}
2138                         unmerge_desc["cfgpro"] = _("cfgpro")
2139                         unmerge_desc["replaced"] = _("replaced")
2140                         unmerge_desc["!dir"] = _("!dir")
2141                         unmerge_desc["!empty"] = _("!empty")
2142                         unmerge_desc["!fif"] = _("!fif")
2143                         unmerge_desc["!found"] = _("!found")
2144                         unmerge_desc["!md5"] = _("!md5")
2145                         unmerge_desc["!mtime"] = _("!mtime")
2146                         unmerge_desc["!obj"] = _("!obj")
2147                         unmerge_desc["!sym"] = _("!sym")
2148                         unmerge_desc["!prefix"] = _("!prefix")
2149
2150                         real_root = self.settings['ROOT']
2151                         real_root_len = len(real_root) - 1
2152                         eroot = self.settings["EROOT"]
2153
2154                         infodirs = frozenset(infodir for infodir in chain(
2155                                 self.settings.get("INFOPATH", "").split(":"),
2156                                 self.settings.get("INFODIR", "").split(":")) if infodir)
2157                         infodirs_inodes = set()
2158                         for infodir in infodirs:
2159                                 infodir = os.path.join(real_root, infodir.lstrip(os.sep))
2160                                 try:
2161                                         statobj = os.stat(infodir)
2162                                 except OSError:
2163                                         pass
2164                                 else:
2165                                         infodirs_inodes.add((statobj.st_dev, statobj.st_ino))
2166
2167                         for i, objkey in enumerate(mykeys):
2168
2169                                 obj = normalize_path(objkey)
2170                                 if os is _os_merge:
2171                                         try:
2172                                                 _unicode_encode(obj,
2173                                                         encoding=_encodings['merge'], errors='strict')
2174                                         except UnicodeEncodeError:
2175                                                 # The package appears to have been merged with a 
2176                                                 # different value of sys.getfilesystemencoding(),
2177                                                 # so fall back to utf_8 if appropriate.
2178                                                 try:
2179                                                         _unicode_encode(obj,
2180                                                                 encoding=_encodings['fs'], errors='strict')
2181                                                 except UnicodeEncodeError:
2182                                                         pass
2183                                                 else:
2184                                                         os = portage.os
2185                                                         perf_md5 = portage.checksum.perform_md5
2186
2187                                 file_data = pkgfiles[objkey]
2188                                 file_type = file_data[0]
2189
2190                                 # don't try to unmerge the prefix offset itself
2191                                 if len(obj) <= len(eroot) or not obj.startswith(eroot):
2192                                         show_unmerge("---", unmerge_desc["!prefix"], file_type, obj)
2193                                         continue
2194
2195                                 statobj = None
2196                                 try:
2197                                         statobj = os.stat(obj)
2198                                 except OSError:
2199                                         pass
2200                                 lstatobj = None
2201                                 try:
2202                                         lstatobj = os.lstat(obj)
2203                                 except (OSError, AttributeError):
2204                                         pass
2205                                 islink = lstatobj is not None and stat.S_ISLNK(lstatobj.st_mode)
2206                                 if lstatobj is None:
2207                                                 show_unmerge("---", unmerge_desc["!found"], file_type, obj)
2208                                                 continue
2209
2210                                 f_match = obj[len(eroot)-1:]
2211                                 ignore = False
2212                                 for pattern in uninstall_ignore:
2213                                         if fnmatch.fnmatch(f_match, pattern):
2214                                                 ignore = True
2215                                                 break
2216
2217                                 if not ignore:
2218                                         if islink and f_match in \
2219                                                 ("/lib", "/usr/lib", "/usr/local/lib"):
2220                                                 # Ignore libdir symlinks for bug #423127.
2221                                                 ignore = True
2222
2223                                 if ignore:
2224                                         show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
2225                                         continue
2226
2227                                 # don't use EROOT, CONTENTS entries already contain EPREFIX
2228                                 if obj.startswith(real_root):
2229                                         relative_path = obj[real_root_len:]
2230                                         is_owned = False
2231                                         for dblnk in others_in_slot:
2232                                                 if dblnk.isowner(relative_path):
2233                                                         is_owned = True
2234                                                         break
2235
2236                                         if is_owned and islink and \
2237                                                 file_type in ("sym", "dir") and \
2238                                                 statobj and stat.S_ISDIR(statobj.st_mode):
2239                                                 # A new instance of this package claims the file, so
2240                                                 # don't unmerge it. If the file is symlink to a
2241                                                 # directory and the unmerging package installed it as
2242                                                 # a symlink, but the new owner has it listed as a
2243                                                 # directory, then we'll produce a warning since the
2244                                                 # symlink is a sort of orphan in this case (see
2245                                                 # bug #326685).
2246                                                 symlink_orphan = False
2247                                                 for dblnk in others_in_slot:
2248                                                         parent_contents_key = \
2249                                                                 dblnk._match_contents(relative_path)
2250                                                         if not parent_contents_key:
2251                                                                 continue
2252                                                         if not parent_contents_key.startswith(
2253                                                                 real_root):
2254                                                                 continue
2255                                                         if dblnk.getcontents()[
2256                                                                 parent_contents_key][0] == "dir":
2257                                                                 symlink_orphan = True
2258                                                                 break
2259
2260                                                 if symlink_orphan:
2261                                                         protected_symlinks.setdefault(
2262                                                                 (statobj.st_dev, statobj.st_ino),
2263                                                                 []).append(relative_path)
2264
2265                                         if is_owned:
2266                                                 show_unmerge("---", unmerge_desc["replaced"], file_type, obj)
2267                                                 continue
2268                                         elif relative_path in cfgfiledict:
2269                                                 stale_confmem.append(relative_path)
2270
2271                                 # Don't unlink symlinks to directories here since that can
2272                                 # remove /lib and /usr/lib symlinks.
2273                                 if unmerge_orphans and \
2274                                         lstatobj and not stat.S_ISDIR(lstatobj.st_mode) and \
2275                                         not (islink and statobj and stat.S_ISDIR(statobj.st_mode)) and \
2276                                         not self.isprotected(obj):
2277                                         try:
2278                                                 unlink(obj, lstatobj)
2279                                         except EnvironmentError as e:
2280                                                 if e.errno not in ignored_unlink_errnos:
2281                                                         raise
2282                                                 del e
2283                                         show_unmerge("<<<", "", file_type, obj)
2284                                         continue
2285
2286                                 lmtime = str(lstatobj[stat.ST_MTIME])
2287                                 if (pkgfiles[objkey][0] not in ("dir", "fif", "dev")) and (lmtime != pkgfiles[objkey][1]):
2288                                         show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
2289                                         continue
2290
2291                                 if file_type == "dir" and not islink:
2292                                         if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
2293                                                 show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
2294                                                 continue
2295                                         mydirs.add((obj, (lstatobj.st_dev, lstatobj.st_ino)))
2296                                 elif file_type == "sym" or (file_type == "dir" and islink):
2297                                         if not islink:
2298                                                 show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
2299                                                 continue
2300
2301                                         # If this symlink points to a directory then we don't want
2302                                         # to unmerge it if there are any other packages that
2303                                         # installed files into the directory via this symlink
2304                                         # (see bug #326685).
2305                                         # TODO: Resolving a symlink to a directory will require
2306                                         # simulation if $ROOT != / and the link is not relative.
2307                                         if islink and statobj and stat.S_ISDIR(statobj.st_mode) \
2308                                                 and obj.startswith(real_root):
2309
2310                                                 relative_path = obj[real_root_len:]
2311                                                 try:
2312                                                         target_dir_contents = os.listdir(obj)
2313                                                 except OSError:
2314                                                         pass
2315                                                 else:
2316                                                         if target_dir_contents:
2317                                                                 # If all the children are regular files owned
2318                                                                 # by this package, then the symlink should be
2319                                                                 # safe to unmerge.
2320                                                                 all_owned = True
2321                                                                 for child in target_dir_contents:
2322                                                                         child = os.path.join(relative_path, child)
2323                                                                         if not self.isowner(child):
2324                                                                                 all_owned = False
2325                                                                                 break
2326                                                                         try:
2327                                                                                 child_lstat = os.lstat(os.path.join(
2328                                                                                         real_root, child.lstrip(os.sep)))
2329                                                                         except OSError:
2330                                                                                 continue
2331
2332                                                                         if not stat.S_ISREG(child_lstat.st_mode):
2333                                                                                 # Nested symlinks or directories make
2334                                                                                 # the issue very complex, so just
2335                                                                                 # preserve the symlink in order to be
2336                                                                                 # on the safe side.
2337                                                                                 all_owned = False
2338                                                                                 break
2339
2340                                                                 if not all_owned:
2341                                                                         protected_symlinks.setdefault(
2342                                                                                 (statobj.st_dev, statobj.st_ino),
2343                                                                                 []).append(relative_path)
2344                                                                         show_unmerge("---", unmerge_desc["!empty"],
2345                                                                                 file_type, obj)
2346                                                                         continue
2347
2348                                         # Go ahead and unlink symlinks to directories here when
2349                                         # they're actually recorded as symlinks in the contents.
2350                                         # Normally, symlinks such as /lib -> lib64 are not recorded
2351                                         # as symlinks in the contents of a package.  If a package
2352                                         # installs something into ${D}/lib/, it is recorded in the
2353                                         # contents as a directory even if it happens to correspond
2354                                         # to a symlink when it's merged to the live filesystem.
2355                                         try:
2356                                                 unlink(obj, lstatobj)
2357                                                 show_unmerge("<<<", "", file_type, obj)
2358                                         except (OSError, IOError) as e:
2359                                                 if e.errno not in ignored_unlink_errnos:
2360                                                         raise
2361                                                 del e
2362                                                 show_unmerge("!!!", "", file_type, obj)
2363                                 elif pkgfiles[objkey][0] == "obj":
2364                                         if statobj is None or not stat.S_ISREG(statobj.st_mode):
2365                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2366                                                 continue
2367                                         mymd5 = None
2368                                         try:
2369                                                 mymd5 = perf_md5(obj, calc_prelink=calc_prelink)
2370                                         except FileNotFound as e:
2371                                                 # the file has disappeared between now and our stat call
2372                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2373                                                 continue
2374
2375                                         # string.lower is needed because db entries used to be in upper-case.  The
2376                                         # string.lower allows for backwards compatibility.
2377                                         if mymd5 != pkgfiles[objkey][2].lower():
2378                                                 show_unmerge("---", unmerge_desc["!md5"], file_type, obj)
2379                                                 continue
2380                                         try:
2381                                                 unlink(obj, lstatobj)
2382                                         except (OSError, IOError) as e:
2383                                                 if e.errno not in ignored_unlink_errnos:
2384                                                         raise
2385                                                 del e
2386                                         show_unmerge("<<<", "", file_type, obj)
2387                                 elif pkgfiles[objkey][0] == "fif":
2388                                         if not stat.S_ISFIFO(lstatobj[stat.ST_MODE]):
2389                                                 show_unmerge("---", unmerge_desc["!fif"], file_type, obj)
2390                                                 continue
2391                                         show_unmerge("---", "", file_type, obj)
2392                                 elif pkgfiles[objkey][0] == "dev":
2393                                         show_unmerge("---", "", file_type, obj)
2394
2395                         self._unmerge_dirs(mydirs, infodirs_inodes,
2396                                 protected_symlinks, unmerge_desc, unlink, os)
2397                         mydirs.clear()
2398
2399                 if protected_symlinks:
2400                         self._unmerge_protected_symlinks(others_in_slot, infodirs_inodes,
2401                                 protected_symlinks, unmerge_desc, unlink, os)
2402
2403                 if protected_symlinks:
2404                         msg = "One or more symlinks to directories have been " + \
2405                                 "preserved in order to ensure that files installed " + \
2406                                 "via these symlinks remain accessible. " + \
2407                                 "This indicates that the mentioned symlink(s) may " + \
2408                                 "be obsolete remnants of an old install, and it " + \
2409                                 "may be appropriate to replace a given symlink " + \
2410                                 "with the directory that it points to."
2411                         lines = textwrap.wrap(msg, 72)
2412                         lines.append("")
2413                         flat_list = set()
2414                         flat_list.update(*protected_symlinks.values())
2415                         flat_list = sorted(flat_list)
2416                         for f in flat_list:
2417                                 lines.append("\t%s" % (os.path.join(real_root,
2418                                         f.lstrip(os.sep))))
2419                         lines.append("")
2420                         self._elog("elog", "postrm", lines)
2421
2422                 # Remove stale entries from config memory.
2423                 if stale_confmem:
2424                         for filename in stale_confmem:
2425                                 del cfgfiledict[filename]
2426                         writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
2427
2428                 #remove self from vartree database so that our own virtual gets zapped if we're the last node
2429                 self.vartree.zap(self.mycpv)
2430
2431         def _unmerge_protected_symlinks(self, others_in_slot, infodirs_inodes,
2432                 protected_symlinks, unmerge_desc, unlink, os):
2433
2434                 real_root = self.settings['ROOT']
2435                 show_unmerge = self._show_unmerge
2436                 ignored_unlink_errnos = self._ignored_unlink_errnos
2437
2438                 flat_list = set()
2439                 flat_list.update(*protected_symlinks.values())
2440                 flat_list = sorted(flat_list)
2441
2442                 for f in flat_list:
2443                         for dblnk in others_in_slot:
2444                                 if dblnk.isowner(f):
2445                                         # If another package in the same slot installed
2446                                         # a file via a protected symlink, return early
2447                                         # and don't bother searching for any other owners.
2448                                         return
2449
2450                 msg = []
2451                 msg.append("")
2452                 msg.append(_("Directory symlink(s) may need protection:"))
2453                 msg.append("")
2454
2455                 for f in flat_list:
2456                         msg.append("\t%s" % \
2457                                 os.path.join(real_root, f.lstrip(os.path.sep)))
2458
2459                 msg.append("")
2460                 msg.append(_("Searching all installed"
2461                         " packages for files installed via above symlink(s)..."))
2462                 msg.append("")
2463                 self._elog("elog", "postrm", msg)
2464
2465                 self.lockdb()
2466                 try:
2467                         owners = self.vartree.dbapi._owners.get_owners(flat_list)
2468                         self.vartree.dbapi.flush_cache()
2469                 finally:
2470                         self.unlockdb()
2471
2472                 for owner in list(owners):
2473                         if owner.mycpv == self.mycpv:
2474                                 owners.pop(owner, None)
2475
2476                 if not owners:
2477                         msg = []
2478                         msg.append(_("The above directory symlink(s) are all "
2479                                 "safe to remove. Removing them now..."))
2480                         msg.append("")
2481                         self._elog("elog", "postrm", msg)
2482                         dirs = set()
2483                         for unmerge_syms in protected_symlinks.values():
2484                                 for relative_path in unmerge_syms:
2485                                         obj = os.path.join(real_root,
2486                                                 relative_path.lstrip(os.sep))
2487                                         parent = os.path.dirname(obj)
2488                                         while len(parent) > len(self._eroot):
2489                                                 try:
2490                                                         lstatobj = os.lstat(parent)
2491                                                 except OSError:
2492                                                         break
2493                                                 else:
2494                                                         dirs.add((parent,
2495                                                                 (lstatobj.st_dev, lstatobj.st_ino)))
2496                                                         parent = os.path.dirname(parent)
2497                                         try:
2498                                                 unlink(obj, os.lstat(obj))
2499                                                 show_unmerge("<<<", "", "sym", obj)
2500                                         except (OSError, IOError) as e:
2501                                                 if e.errno not in ignored_unlink_errnos:
2502                                                         raise
2503                                                 del e
2504                                                 show_unmerge("!!!", "", "sym", obj)
2505
2506                         protected_symlinks.clear()
2507                         self._unmerge_dirs(dirs, infodirs_inodes,
2508                                 protected_symlinks, unmerge_desc, unlink, os)
2509                         dirs.clear()
2510
2511         def _unmerge_dirs(self, dirs, infodirs_inodes,
2512                 protected_symlinks, unmerge_desc, unlink, os):
2513
2514                 show_unmerge = self._show_unmerge
2515                 infodir_cleanup = self._infodir_cleanup
2516                 ignored_unlink_errnos = self._ignored_unlink_errnos
2517                 ignored_rmdir_errnos = self._ignored_rmdir_errnos
2518                 real_root = self.settings['ROOT']
2519
2520                 dirs = sorted(dirs)
2521                 dirs.reverse()
2522
2523                 for obj, inode_key in dirs:
2524                         # Treat any directory named "info" as a candidate here,
2525                         # since it might have been in INFOPATH previously even
2526                         # though it may not be there now.
2527                         if inode_key in infodirs_inodes or \
2528                                 os.path.basename(obj) == "info":
2529                                 try:
2530                                         remaining = os.listdir(obj)
2531                                 except OSError:
2532                                         pass
2533                                 else:
2534                                         cleanup_info_dir = ()
2535                                         if remaining and \
2536                                                 len(remaining) <= len(infodir_cleanup):
2537                                                 if not set(remaining).difference(infodir_cleanup):
2538                                                         cleanup_info_dir = remaining
2539
2540                                         for child in cleanup_info_dir:
2541                                                 child = os.path.join(obj, child)
2542                                                 try:
2543                                                         lstatobj = os.lstat(child)
2544                                                         if stat.S_ISREG(lstatobj.st_mode):
2545                                                                 unlink(child, lstatobj)
2546                                                                 show_unmerge("<<<", "", "obj", child)
2547                                                 except EnvironmentError as e:
2548                                                         if e.errno not in ignored_unlink_errnos:
2549                                                                 raise
2550                                                         del e
2551                                                         show_unmerge("!!!", "", "obj", child)
2552                         try:
2553                                 if bsd_chflags:
2554                                         lstatobj = os.lstat(obj)
2555                                         if lstatobj.st_flags != 0:
2556                                                 bsd_chflags.lchflags(obj, 0)
2557                                         parent_name = os.path.dirname(obj)
2558                                         # Use normal stat/chflags for the parent since we want to
2559                                         # follow any symlinks to the real parent directory.
2560                                         pflags = os.stat(parent_name).st_flags
2561                                         if pflags != 0:
2562                                                 bsd_chflags.chflags(parent_name, 0)
2563                                 try:
2564                                         os.rmdir(obj)
2565                                 finally:
2566                                         if bsd_chflags and pflags != 0:
2567                                                 # Restore the parent flags we saved before unlinking
2568                                                 bsd_chflags.chflags(parent_name, pflags)
2569                                 show_unmerge("<<<", "", "dir", obj)
2570                         except EnvironmentError as e:
2571                                 if e.errno not in ignored_rmdir_errnos:
2572                                         raise
2573                                 if e.errno != errno.ENOENT:
2574                                         show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
2575                                 del e
2576                         else:
2577                                 # When a directory is successfully removed, there's
2578                                 # no need to protect symlinks that point to it.
2579                                 unmerge_syms = protected_symlinks.pop(inode_key, None)
2580                                 if unmerge_syms is not None:
2581                                         for relative_path in unmerge_syms:
2582                                                 obj = os.path.join(real_root,
2583                                                         relative_path.lstrip(os.sep))
2584                                                 try:
2585                                                         unlink(obj, os.lstat(obj))
2586                                                         show_unmerge("<<<", "", "sym", obj)
2587                                                 except (OSError, IOError) as e:
2588                                                         if e.errno not in ignored_unlink_errnos:
2589                                                                 raise
2590                                                         del e
2591                                                         show_unmerge("!!!", "", "sym", obj)
2592
2593         def isowner(self, filename, destroot=None):
2594                 """ 
2595                 Check if a file belongs to this package. This may
2596                 result in a stat call for the parent directory of
2597                 every installed file, since the inode numbers are
2598                 used to work around the problem of ambiguous paths
2599                 caused by symlinked directories. The results of
2600                 stat calls are cached to optimize multiple calls
2601                 to this method.
2602
2603                 @param filename:
2604                 @type filename:
2605                 @param destroot:
2606                 @type destroot:
2607                 @rtype: Boolean
2608                 @return:
2609                 1. True if this package owns the file.
2610                 2. False if this package does not own the file.
2611                 """
2612
2613                 if destroot is not None and destroot != self._eroot:
2614                         warnings.warn("The second parameter of the " + \
2615                                 "portage.dbapi.vartree.dblink.isowner()" + \
2616                                 " is now unused. Instead " + \
2617                                 "self.settings['EROOT'] will be used.",
2618                                 DeprecationWarning, stacklevel=2)
2619
2620                 return bool(self._match_contents(filename))
2621
2622         def _match_contents(self, filename, destroot=None):
2623                 """
2624                 The matching contents entry is returned, which is useful
2625                 since the path may differ from the one given by the caller,
2626                 due to symlinks.
2627
2628                 @rtype: String
2629                 @return: the contents entry corresponding to the given path, or False
2630                         if the file is not owned by this package.
2631                 """
2632
2633                 filename = _unicode_decode(filename,
2634                         encoding=_encodings['content'], errors='strict')
2635
2636                 if destroot is not None and destroot != self._eroot:
2637                         warnings.warn("The second parameter of the " + \
2638                                 "portage.dbapi.vartree.dblink._match_contents()" + \
2639                                 " is now unused. Instead " + \
2640                                 "self.settings['ROOT'] will be used.",
2641                                 DeprecationWarning, stacklevel=2)
2642
2643                 # don't use EROOT here, image already contains EPREFIX
2644                 destroot = self.settings['ROOT']
2645
2646                 # The given filename argument might have a different encoding than the
2647                 # the filenames contained in the contents, so use separate wrapped os
2648                 # modules for each. The basename is more likely to contain non-ascii
2649                 # characters than the directory path, so use os_filename_arg for all
2650                 # operations involving the basename of the filename arg.
2651                 os_filename_arg = _os_merge
2652                 os = _os_merge
2653
2654                 try:
2655                         _unicode_encode(filename,
2656                                 encoding=_encodings['merge'], errors='strict')
2657                 except UnicodeEncodeError:
2658                         # The package appears to have been merged with a
2659                         # different value of sys.getfilesystemencoding(),
2660                         # so fall back to utf_8 if appropriate.
2661                         try:
2662                                 _unicode_encode(filename,
2663                                         encoding=_encodings['fs'], errors='strict')
2664                         except UnicodeEncodeError:
2665                                 pass
2666                         else:
2667                                 os_filename_arg = portage.os
2668
2669                 destfile = normalize_path(
2670                         os_filename_arg.path.join(destroot,
2671                         filename.lstrip(os_filename_arg.path.sep)))
2672
2673                 pkgfiles = self.getcontents()
2674                 if pkgfiles and destfile in pkgfiles:
2675                         return destfile
2676                 if pkgfiles:
2677                         basename = os_filename_arg.path.basename(destfile)
2678                         if self._contents_basenames is None:
2679
2680                                 try:
2681                                         for x in pkgfiles:
2682                                                 _unicode_encode(x,
2683                                                         encoding=_encodings['merge'],
2684                                                         errors='strict')
2685                                 except UnicodeEncodeError:
2686                                         # The package appears to have been merged with a
2687                                         # different value of sys.getfilesystemencoding(),
2688                                         # so fall back to utf_8 if appropriate.
2689                                         try:
2690                                                 for x in pkgfiles:
2691                                                         _unicode_encode(x,
2692                                                                 encoding=_encodings['fs'],
2693                                                                 errors='strict')
2694                                         except UnicodeEncodeError:
2695                                                 pass
2696                                         else:
2697                                                 os = portage.os
2698
2699                                 self._contents_basenames = set(
2700                                         os.path.basename(x) for x in pkgfiles)
2701                         if basename not in self._contents_basenames:
2702                                 # This is a shortcut that, in most cases, allows us to
2703                                 # eliminate this package as an owner without the need
2704                                 # to examine inode numbers of parent directories.
2705                                 return False
2706
2707                         # Use stat rather than lstat since we want to follow
2708                         # any symlinks to the real parent directory.
2709                         parent_path = os_filename_arg.path.dirname(destfile)
2710                         try:
2711                                 parent_stat = os_filename_arg.stat(parent_path)
2712                         except EnvironmentError as e:
2713                                 if e.errno != errno.ENOENT:
2714                                         raise
2715                                 del e
2716                                 return False
2717                         if self._contents_inodes is None:
2718
2719                                 if os is _os_merge:
2720                                         try:
2721                                                 for x in pkgfiles:
2722                                                         _unicode_encode(x,
2723                                                                 encoding=_encodings['merge'],
2724                                                                 errors='strict')
2725                                         except UnicodeEncodeError:
2726                                                 # The package appears to have been merged with a 
2727                                                 # different value of sys.getfilesystemencoding(),
2728                                                 # so fall back to utf_8 if appropriate.
2729                                                 try:
2730                                                         for x in pkgfiles:
2731                                                                 _unicode_encode(x,
2732                                                                         encoding=_encodings['fs'],
2733                                                                         errors='strict')
2734                                                 except UnicodeEncodeError:
2735                                                         pass
2736                                                 else:
2737                                                         os = portage.os
2738
2739                                 self._contents_inodes = {}
2740                                 parent_paths = set()
2741                                 for x in pkgfiles:
2742                                         p_path = os.path.dirname(x)
2743                                         if p_path in parent_paths:
2744                                                 continue
2745                                         parent_paths.add(p_path)
2746                                         try:
2747                                                 s = os.stat(p_path)
2748                                         except OSError:
2749                                                 pass
2750                                         else:
2751                                                 inode_key = (s.st_dev, s.st_ino)
2752                                                 # Use lists of paths in case multiple
2753                                                 # paths reference the same inode.
2754                                                 p_path_list = self._contents_inodes.get(inode_key)
2755                                                 if p_path_list is None:
2756                                                         p_path_list = []
2757                                                         self._contents_inodes[inode_key] = p_path_list
2758                                                 if p_path not in p_path_list:
2759                                                         p_path_list.append(p_path)
2760
2761                         p_path_list = self._contents_inodes.get(
2762                                 (parent_stat.st_dev, parent_stat.st_ino))
2763                         if p_path_list:
2764                                 for p_path in p_path_list:
2765                                         x = os_filename_arg.path.join(p_path, basename)
2766                                         if x in pkgfiles:
2767                                                 return x
2768
2769                 return False
2770
2771         def _linkmap_rebuild(self, **kwargs):
2772                 """
2773                 Rebuild the self._linkmap if it's not broken due to missing
2774                 scanelf binary. Also, return early if preserve-libs is disabled
2775                 and the preserve-libs registry is empty.
2776                 """
2777                 if self._linkmap_broken or \
2778                         self.vartree.dbapi._linkmap is None or \
2779                         self.vartree.dbapi._plib_registry is None or \
2780                         ("preserve-libs" not in self.settings.features and \
2781                         not self.vartree.dbapi._plib_registry.hasEntries()):
2782                         return
2783                 try:
2784                         self.vartree.dbapi._linkmap.rebuild(**kwargs)
2785                 except CommandNotFound as e:
2786                         self._linkmap_broken = True
2787                         self._display_merge(_("!!! Disabling preserve-libs " \
2788                                 "due to error: Command Not Found: %s\n") % (e,),
2789                                 level=logging.ERROR, noiselevel=-1)
2790
2791         def _find_libs_to_preserve(self, unmerge=False):
2792                 """
2793                 Get set of relative paths for libraries to be preserved. When
2794                 unmerge is False, file paths to preserve are selected from
2795                 self._installed_instance. Otherwise, paths are selected from
2796                 self.
2797                 """
2798                 if self._linkmap_broken or \
2799                         self.vartree.dbapi._linkmap is None or \
2800                         self.vartree.dbapi._plib_registry is None or \
2801                         (not unmerge and self._installed_instance is None) or \
2802                         "preserve-libs" not in self.settings.features:
2803                         return set()
2804
2805                 os = _os_merge
2806                 linkmap = self.vartree.dbapi._linkmap
2807                 if unmerge:
2808                         installed_instance = self
2809                 else:
2810                         installed_instance = self._installed_instance
2811                 old_contents = installed_instance.getcontents()
2812                 root = self.settings['ROOT']
2813                 root_len = len(root) - 1
2814                 lib_graph = digraph()
2815                 path_node_map = {}
2816
2817                 def path_to_node(path):
2818                         node = path_node_map.get(path)
2819                         if node is None:
2820                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2821                                 alt_path_node = lib_graph.get(node)
2822                                 if alt_path_node is not None:
2823                                         node = alt_path_node
2824                                 node.alt_paths.add(path)
2825                                 path_node_map[path] = node
2826                         return node
2827
2828                 consumer_map = {}
2829                 provider_nodes = set()
2830                 # Create provider nodes and add them to the graph.
2831                 for f_abs in old_contents:
2832
2833                         if os is _os_merge:
2834                                 try:
2835                                         _unicode_encode(f_abs,
2836                                                 encoding=_encodings['merge'], errors='strict')
2837                                 except UnicodeEncodeError:
2838                                         # The package appears to have been merged with a 
2839                                         # different value of sys.getfilesystemencoding(),
2840                                         # so fall back to utf_8 if appropriate.
2841                                         try:
2842                                                 _unicode_encode(f_abs,
2843                                                         encoding=_encodings['fs'], errors='strict')
2844                                         except UnicodeEncodeError:
2845                                                 pass
2846                                         else:
2847                                                 os = portage.os
2848
2849                         f = f_abs[root_len:]
2850                         if not unmerge and self.isowner(f):
2851                                 # We have an indentically named replacement file,
2852                                 # so we don't try to preserve the old copy.
2853                                 continue
2854                         try:
2855                                 consumers = linkmap.findConsumers(f,
2856                                         exclude_providers=(installed_instance.isowner,))
2857                         except KeyError:
2858                                 continue
2859                         if not consumers:
2860                                 continue
2861                         provider_node = path_to_node(f)
2862                         lib_graph.add(provider_node, None)
2863                         provider_nodes.add(provider_node)
2864                         consumer_map[provider_node] = consumers
2865
2866                 # Create consumer nodes and add them to the graph.
2867                 # Note that consumers can also be providers.
2868                 for provider_node, consumers in consumer_map.items():
2869                         for c in consumers:
2870                                 consumer_node = path_to_node(c)
2871                                 if installed_instance.isowner(c) and \
2872                                         consumer_node not in provider_nodes:
2873                                         # This is not a provider, so it will be uninstalled.
2874                                         continue
2875                                 lib_graph.add(provider_node, consumer_node)
2876
2877                 # Locate nodes which should be preserved. They consist of all
2878                 # providers that are reachable from consumers that are not
2879                 # providers themselves.
2880                 preserve_nodes = set()
2881                 for consumer_node in lib_graph.root_nodes():
2882                         if consumer_node in provider_nodes:
2883                                 continue
2884                         # Preserve all providers that are reachable from this consumer.
2885                         node_stack = lib_graph.child_nodes(consumer_node)
2886                         while node_stack:
2887                                 provider_node = node_stack.pop()
2888                                 if provider_node in preserve_nodes:
2889                                         continue
2890                                 preserve_nodes.add(provider_node)
2891                                 node_stack.extend(lib_graph.child_nodes(provider_node))
2892
2893                 preserve_paths = set()
2894                 for preserve_node in preserve_nodes:
2895                         # Preserve the library itself, and also preserve the
2896                         # soname symlink which is the only symlink that is
2897                         # strictly required.
2898                         hardlinks = set()
2899                         soname_symlinks = set()
2900                         soname = linkmap.getSoname(next(iter(preserve_node.alt_paths)))
2901                         for f in preserve_node.alt_paths:
2902                                 f_abs = os.path.join(root, f.lstrip(os.sep))
2903                                 try:
2904                                         if stat.S_ISREG(os.lstat(f_abs).st_mode):
2905                                                 hardlinks.add(f)
2906                                         elif os.path.basename(f) == soname:
2907                                                 soname_symlinks.add(f)
2908                                 except OSError:
2909                                         pass
2910
2911                         if hardlinks:
2912                                 preserve_paths.update(hardlinks)
2913                                 preserve_paths.update(soname_symlinks)
2914
2915                 return preserve_paths
2916
2917         def _add_preserve_libs_to_contents(self, preserve_paths):
2918                 """
2919                 Preserve libs returned from _find_libs_to_preserve().
2920                 """
2921
2922                 if not preserve_paths:
2923                         return
2924
2925                 os = _os_merge
2926                 showMessage = self._display_merge
2927                 root = self.settings['ROOT']
2928
2929                 # Copy contents entries from the old package to the new one.
2930                 new_contents = self.getcontents().copy()
2931                 old_contents = self._installed_instance.getcontents()
2932                 for f in sorted(preserve_paths):
2933                         f = _unicode_decode(f,
2934                                 encoding=_encodings['content'], errors='strict')
2935                         f_abs = os.path.join(root, f.lstrip(os.sep))
2936                         contents_entry = old_contents.get(f_abs)
2937                         if contents_entry is None:
2938                                 # This will probably never happen, but it might if one of the
2939                                 # paths returned from findConsumers() refers to one of the libs
2940                                 # that should be preserved yet the path is not listed in the
2941                                 # contents. Such a path might belong to some other package, so
2942                                 # it shouldn't be preserved here.
2943                                 showMessage(_("!!! File '%s' will not be preserved "
2944                                         "due to missing contents entry\n") % (f_abs,),
2945                                         level=logging.ERROR, noiselevel=-1)
2946                                 preserve_paths.remove(f)
2947                                 continue
2948                         new_contents[f_abs] = contents_entry
2949                         obj_type = contents_entry[0]
2950                         showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
2951                                 noiselevel=-1)
2952                         # Add parent directories to contents if necessary.
2953                         parent_dir = os.path.dirname(f_abs)
2954                         while len(parent_dir) > len(root):
2955                                 new_contents[parent_dir] = ["dir"]
2956                                 prev = parent_dir
2957                                 parent_dir = os.path.dirname(parent_dir)
2958                                 if prev == parent_dir:
2959                                         break
2960                 outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
2961                 write_contents(new_contents, root, outfile)
2962                 outfile.close()
2963                 self._clear_contents_cache()
2964
2965         def _find_unused_preserved_libs(self, unmerge_no_replacement):
2966                 """
2967                 Find preserved libraries that don't have any consumers left.
2968                 """
2969
2970                 if self._linkmap_broken or \
2971                         self.vartree.dbapi._linkmap is None or \
2972                         self.vartree.dbapi._plib_registry is None or \
2973                         not self.vartree.dbapi._plib_registry.hasEntries():
2974                         return {}
2975
2976                 # Since preserved libraries can be consumers of other preserved
2977                 # libraries, use a graph to track consumer relationships.
2978                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2979                 linkmap = self.vartree.dbapi._linkmap
2980                 lib_graph = digraph()
2981                 preserved_nodes = set()
2982                 preserved_paths = set()
2983                 path_cpv_map = {}
2984                 path_node_map = {}
2985                 root = self.settings['ROOT']
2986
2987                 def path_to_node(path):
2988                         node = path_node_map.get(path)
2989                         if node is None:
2990                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2991                                 alt_path_node = lib_graph.get(node)
2992                                 if alt_path_node is not None:
2993                                         node = alt_path_node
2994                                 node.alt_paths.add(path)
2995                                 path_node_map[path] = node
2996                         return node
2997
2998                 for cpv, plibs in plib_dict.items():
2999                         for f in plibs:
3000                                 path_cpv_map[f] = cpv
3001                                 preserved_node = path_to_node(f)
3002                                 if not preserved_node.file_exists():
3003                                         continue
3004                                 lib_graph.add(preserved_node, None)
3005                                 preserved_paths.add(f)
3006                                 preserved_nodes.add(preserved_node)
3007                                 for c in self.vartree.dbapi._linkmap.findConsumers(f):
3008                                         consumer_node = path_to_node(c)
3009                                         if not consumer_node.file_exists():
3010                                                 continue
3011                                         # Note that consumers may also be providers.
3012                                         lib_graph.add(preserved_node, consumer_node)
3013
3014                 # Eliminate consumers having providers with the same soname as an
3015                 # installed library that is not preserved. This eliminates
3016                 # libraries that are erroneously preserved due to a move from one
3017                 # directory to another.
3018                 # Also eliminate consumers that are going to be unmerged if
3019                 # unmerge_no_replacement is True.
3020                 provider_cache = {}
3021                 for preserved_node in preserved_nodes:
3022                         soname = linkmap.getSoname(preserved_node)
3023                         for consumer_node in lib_graph.parent_nodes(preserved_node):
3024                                 if consumer_node in preserved_nodes:
3025                                         continue
3026                                 if unmerge_no_replacement:
3027                                         will_be_unmerged = True
3028                                         for path in consumer_node.alt_paths:
3029                                                 if not self.isowner(path):
3030                                                         will_be_unmerged = False
3031                                                         break
3032                                         if will_be_unmerged:
3033                                                 # This consumer is not preserved and it is
3034                                                 # being unmerged, so drop this edge.
3035                                                 lib_graph.remove_edge(preserved_node, consumer_node)
3036                                                 continue
3037
3038                                 providers = provider_cache.get(consumer_node)
3039                                 if providers is None:
3040                                         providers = linkmap.findProviders(consumer_node)
3041                                         provider_cache[consumer_node] = providers
3042                                 providers = providers.get(soname)
3043                                 if providers is None:
3044                                         continue
3045                                 for provider in providers:
3046                                         if provider in preserved_paths:
3047                                                 continue
3048                                         provider_node = path_to_node(provider)
3049                                         if not provider_node.file_exists():
3050                                                 continue
3051                                         if provider_node in preserved_nodes:
3052                                                 continue
3053                                         # An alternative provider seems to be
3054                                         # installed, so drop this edge.
3055                                         lib_graph.remove_edge(preserved_node, consumer_node)
3056                                         break
3057
3058                 cpv_lib_map = {}
3059                 while lib_graph:
3060                         root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
3061                         if not root_nodes:
3062                                 break
3063                         lib_graph.difference_update(root_nodes)
3064                         unlink_list = set()
3065                         for node in root_nodes:
3066                                 unlink_list.update(node.alt_paths)
3067                         unlink_list = sorted(unlink_list)
3068                         for obj in unlink_list:
3069                                 cpv = path_cpv_map.get(obj)
3070                                 if cpv is None:
3071                                         # This means that a symlink is in the preserved libs
3072                                         # registry, but the actual lib it points to is not.
3073                                         self._display_merge(_("!!! symlink to lib is preserved, "
3074                                                 "but not the lib itself:\n!!! '%s'\n") % (obj,),
3075                                                 level=logging.ERROR, noiselevel=-1)
3076                                         continue
3077                                 removed = cpv_lib_map.get(cpv)
3078                                 if removed is None:
3079                                         removed = set()
3080                                         cpv_lib_map[cpv] = removed
3081                                 removed.add(obj)
3082
3083                 return cpv_lib_map
3084
3085         def _remove_preserved_libs(self, cpv_lib_map):
3086                 """
3087                 Remove files returned from _find_unused_preserved_libs().
3088                 """
3089
3090                 os = _os_merge
3091
3092                 files_to_remove = set()
3093                 for files in cpv_lib_map.values():
3094                         files_to_remove.update(files)
3095                 files_to_remove = sorted(files_to_remove)
3096                 showMessage = self._display_merge
3097                 root = self.settings['ROOT']
3098
3099                 parent_dirs = set()
3100                 for obj in files_to_remove:
3101                         obj = os.path.join(root, obj.lstrip(os.sep))
3102                         parent_dirs.add(os.path.dirname(obj))
3103                         if os.path.islink(obj):
3104                                 obj_type = _("sym")
3105                         else:
3106                                 obj_type = _("obj")
3107                         try:
3108                                 os.unlink(obj)
3109                         except OSError as e:
3110                                 if e.errno != errno.ENOENT:
3111                                         raise
3112                                 del e
3113                         else:
3114                                 showMessage(_("<<< !needed  %s %s\n") % (obj_type, obj),
3115                                         noiselevel=-1)
3116
3117                 # Remove empty parent directories if possible.
3118                 while parent_dirs:
3119                         x = parent_dirs.pop()
3120                         while True:
3121                                 try:
3122                                         os.rmdir(x)
3123                                 except OSError:
3124                                         break
3125                                 prev = x
3126                                 x = os.path.dirname(x)
3127                                 if x == prev:
3128                                         break
3129
3130                 self.vartree.dbapi._plib_registry.pruneNonExisting()
3131
3132         def _collision_protect(self, srcroot, destroot, mypkglist,
3133                 file_list, symlink_list):
3134
3135                         os = _os_merge
3136
3137                         collision_ignore = []
3138                         for x in portage.util.shlex_split(
3139                                 self.settings.get("COLLISION_IGNORE", "")):
3140                                 if os.path.isdir(os.path.join(self._eroot, x.lstrip(os.sep))):
3141                                         x = normalize_path(x)
3142                                         x += "/*"
3143                                 collision_ignore.append(x)
3144
3145                         # For collisions with preserved libraries, the current package
3146                         # will assume ownership and the libraries will be unregistered.
3147                         if self.vartree.dbapi._plib_registry is None:
3148                                 # preserve-libs is entirely disabled
3149                                 plib_cpv_map = None
3150                                 plib_paths = None
3151                                 plib_inodes = {}
3152                         else:
3153                                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
3154                                 plib_cpv_map = {}
3155                                 plib_paths = set()
3156                                 for cpv, paths in plib_dict.items():
3157                                         plib_paths.update(paths)
3158                                         for f in paths:
3159                                                 plib_cpv_map[f] = cpv
3160                                 plib_inodes = self._lstat_inode_map(plib_paths)
3161
3162                         plib_collisions = {}
3163
3164                         showMessage = self._display_merge
3165                         stopmerge = False
3166                         collisions = []
3167                         symlink_collisions = []
3168                         destroot = self.settings['ROOT']
3169                         showMessage(_(" %s checking %d files for package collisions\n") % \
3170                                 (colorize("GOOD", "*"), len(file_list) + len(symlink_list)))
3171                         for i, (f, f_type) in enumerate(chain(
3172                                 ((f, "reg") for f in file_list),
3173                                 ((f, "sym") for f in symlink_list))):
3174                                 if i % 1000 == 0 and i != 0:
3175                                         showMessage(_("%d files checked ...\n") % i)
3176
3177                                 dest_path = normalize_path(
3178                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3179                                 try:
3180                                         dest_lstat = os.lstat(dest_path)
3181                                 except EnvironmentError as e:
3182                                         if e.errno == errno.ENOENT:
3183                                                 del e
3184                                                 continue
3185                                         elif e.errno == errno.ENOTDIR:
3186                                                 del e
3187                                                 # A non-directory is in a location where this package
3188                                                 # expects to have a directory.
3189                                                 dest_lstat = None
3190                                                 parent_path = dest_path
3191                                                 while len(parent_path) > len(destroot):
3192                                                         parent_path = os.path.dirname(parent_path)
3193                                                         try:
3194                                                                 dest_lstat = os.lstat(parent_path)
3195                                                                 break
3196                                                         except EnvironmentError as e:
3197                                                                 if e.errno != errno.ENOTDIR:
3198                                                                         raise
3199                                                                 del e
3200                                                 if not dest_lstat:
3201                                                         raise AssertionError(
3202                                                                 "unable to find non-directory " + \
3203                                                                 "parent for '%s'" % dest_path)
3204                                                 dest_path = parent_path
3205                                                 f = os.path.sep + dest_path[len(destroot):]
3206                                                 if f in collisions:
3207                                                         continue
3208                                         else:
3209                                                 raise
3210                                 if f[0] != "/":
3211                                         f="/"+f
3212
3213                                 if stat.S_ISDIR(dest_lstat.st_mode):
3214                                         if f_type == "sym":
3215                                                 # This case is explicitly banned
3216                                                 # by PMS (see bug #326685).
3217                                                 symlink_collisions.append(f)
3218                                                 collisions.append(f)
3219                                                 continue
3220
3221                                 plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
3222                                 if plibs:
3223                                         for path in plibs:
3224                                                 cpv = plib_cpv_map[path]
3225                                                 paths = plib_collisions.get(cpv)
3226                                                 if paths is None:
3227                                                         paths = set()
3228                                                         plib_collisions[cpv] = paths
3229                                                 paths.add(path)
3230                                         # The current package will assume ownership and the
3231                                         # libraries will be unregistered, so exclude this
3232                                         # path from the normal collisions.
3233                                         continue
3234
3235                                 isowned = False
3236                                 full_path = os.path.join(destroot, f.lstrip(os.path.sep))
3237                                 for ver in mypkglist:
3238                                         if ver.isowner(f):
3239                                                 isowned = True
3240                                                 break
3241                                 if not isowned and self.isprotected(full_path):
3242                                         isowned = True
3243                                 if not isowned:
3244                                         f_match = full_path[len(self._eroot)-1:]
3245                                         stopmerge = True
3246                                         for pattern in collision_ignore:
3247                                                 if fnmatch.fnmatch(f_match, pattern):
3248                                                         stopmerge = False
3249                                                         break
3250                                         if stopmerge:
3251                                                 collisions.append(f)
3252                         return collisions, symlink_collisions, plib_collisions
3253
3254         def _lstat_inode_map(self, path_iter):
3255                 """
3256                 Use lstat to create a map of the form:
3257                   {(st_dev, st_ino) : set([path1, path2, ...])}
3258                 Multiple paths may reference the same inode due to hardlinks.
3259                 All lstat() calls are relative to self.myroot.
3260                 """
3261
3262                 os = _os_merge
3263
3264                 root = self.settings['ROOT']
3265                 inode_map = {}
3266                 for f in path_iter:
3267                         path = os.path.join(root, f.lstrip(os.sep))
3268                         try:
3269                                 st = os.lstat(path)
3270                         except OSError as e:
3271                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3272                                         raise
3273                                 del e
3274                                 continue
3275                         key = (st.st_dev, st.st_ino)
3276                         paths = inode_map.get(key)
3277                         if paths is None:
3278                                 paths = set()
3279                                 inode_map[key] = paths
3280                         paths.add(f)
3281                 return inode_map
3282
3283         def _security_check(self, installed_instances):
3284                 if not installed_instances:
3285                         return 0
3286
3287                 os = _os_merge
3288
3289                 showMessage = self._display_merge
3290
3291                 file_paths = set()
3292                 for dblnk in installed_instances:
3293                         file_paths.update(dblnk.getcontents())
3294                 inode_map = {}
3295                 real_paths = set()
3296                 for i, path in enumerate(file_paths):
3297
3298                         if os is _os_merge:
3299                                 try:
3300                                         _unicode_encode(path,
3301                                                 encoding=_encodings['merge'], errors='strict')
3302                                 except UnicodeEncodeError:
3303                                         # The package appears to have been merged with a 
3304                                         # different value of sys.getfilesystemencoding(),
3305                                         # so fall back to utf_8 if appropriate.
3306                                         try:
3307                                                 _unicode_encode(path,
3308                                                         encoding=_encodings['fs'], errors='strict')
3309                                         except UnicodeEncodeError:
3310                                                 pass
3311                                         else:
3312                                                 os = portage.os
3313
3314                         try:
3315                                 s = os.lstat(path)
3316                         except OSError as e:
3317                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3318                                         raise
3319                                 del e
3320                                 continue
3321                         if not stat.S_ISREG(s.st_mode):
3322                                 continue
3323                         path = os.path.realpath(path)
3324                         if path in real_paths:
3325                                 continue
3326                         real_paths.add(path)
3327                         if s.st_nlink > 1 and \
3328                                 s.st_mode & (stat.S_ISUID | stat.S_ISGID):
3329                                 k = (s.st_dev, s.st_ino)
3330                                 inode_map.setdefault(k, []).append((path, s))
3331                 suspicious_hardlinks = []
3332                 for path_list in inode_map.values():
3333                         path, s = path_list[0]
3334                         if len(path_list) == s.st_nlink:
3335                                 # All hardlinks seem to be owned by this package.
3336                                 continue
3337                         suspicious_hardlinks.append(path_list)
3338                 if not suspicious_hardlinks:
3339                         return 0
3340
3341                 msg = []
3342                 msg.append(_("suid/sgid file(s) "
3343                         "with suspicious hardlink(s):"))
3344                 msg.append("")
3345                 for path_list in suspicious_hardlinks:
3346                         for path, s in path_list:
3347                                 msg.append("\t%s" % path)
3348                 msg.append("")
3349                 msg.append(_("See the Gentoo Security Handbook " 
3350                         "guide for advice on how to proceed."))
3351
3352                 self._eerror("preinst", msg)
3353
3354                 return 1
3355
3356         def _eqawarn(self, phase, lines):
3357                 self._elog("eqawarn", phase, lines)
3358
3359         def _eerror(self, phase, lines):
3360                 self._elog("eerror", phase, lines)
3361
3362         def _elog(self, funcname, phase, lines):
3363                 func = getattr(portage.elog.messages, funcname)
3364                 if self._scheduler is None:
3365                         for l in lines:
3366                                 func(l, phase=phase, key=self.mycpv)
3367                 else:
3368                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
3369                         log_path = None
3370                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
3371                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
3372                         out = io.StringIO()
3373                         for line in lines:
3374                                 func(line, phase=phase, key=self.mycpv, out=out)
3375                         msg = out.getvalue()
3376                         self._scheduler.output(msg,
3377                                 background=background, log_path=log_path)
3378
3379         def _elog_process(self, phasefilter=None):
3380                 cpv = self.mycpv
3381                 if self._pipe is None:
3382                         elog_process(cpv, self.settings, phasefilter=phasefilter)
3383                 else:
3384                         logdir = os.path.join(self.settings["T"], "logging")
3385                         ebuild_logentries = collect_ebuild_messages(logdir)
3386                         py_logentries = collect_messages(key=cpv).get(cpv, {})
3387                         logentries = _merge_logentries(py_logentries, ebuild_logentries)
3388                         funcnames = {
3389                                 "INFO": "einfo",
3390                                 "LOG": "elog",
3391                                 "WARN": "ewarn",
3392                                 "QA": "eqawarn",
3393                                 "ERROR": "eerror"
3394                         }
3395                         str_buffer = []
3396                         for phase, messages in logentries.items():
3397                                 for key, lines in messages:
3398                                         funcname = funcnames[key]
3399                                         if isinstance(lines, basestring):
3400                                                 lines = [lines]
3401                                         for line in lines:
3402                                                 for line in line.split('\n'):
3403                                                         fields = (funcname, phase, cpv, line)
3404                                                         str_buffer.append(' '.join(fields))
3405                                                         str_buffer.append('\n')
3406                         if str_buffer:
3407                                 os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
3408
3409         def _emerge_log(self, msg):
3410                 emergelog(False, msg)
3411
3412         def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0,
3413                 mydbapi=None, prev_mtimes=None, counter=None):
3414                 """
3415                 
3416                 This function does the following:
3417                 
3418                 calls self._preserve_libs if FEATURES=preserve-libs
3419                 calls self._collision_protect if FEATURES=collision-protect
3420                 calls doebuild(mydo=pkg_preinst)
3421                 Merges the package to the livefs
3422                 unmerges old version (if required)
3423                 calls doebuild(mydo=pkg_postinst)
3424                 calls env_update
3425                 
3426                 @param srcroot: Typically this is ${D}
3427                 @type srcroot: String (Path)
3428                 @param destroot: ignored, self.settings['ROOT'] is used instead
3429                 @type destroot: String (Path)
3430                 @param inforoot: root of the vardb entry ?
3431                 @type inforoot: String (Path)
3432                 @param myebuild: path to the ebuild that we are processing
3433                 @type myebuild: String (Path)
3434                 @param mydbapi: dbapi which is handed to doebuild.
3435                 @type mydbapi: portdbapi instance
3436                 @param prev_mtimes: { Filename:mtime } mapping for env_update
3437                 @type prev_mtimes: Dictionary
3438                 @rtype: Boolean
3439                 @return:
3440                 1. 0 on success
3441                 2. 1 on failure
3442                 
3443                 secondhand is a list of symlinks that have been skipped due to their target
3444                 not existing; we will merge these symlinks at a later time.
3445                 """
3446
3447                 os = _os_merge
3448
3449                 srcroot = _unicode_decode(srcroot,
3450                         encoding=_encodings['content'], errors='strict')
3451                 destroot = self.settings['ROOT']
3452                 inforoot = _unicode_decode(inforoot,
3453                         encoding=_encodings['content'], errors='strict')
3454                 myebuild = _unicode_decode(myebuild,
3455                         encoding=_encodings['content'], errors='strict')
3456
3457                 showMessage = self._display_merge
3458                 srcroot = normalize_path(srcroot).rstrip(os.path.sep) + os.path.sep
3459
3460                 if not os.path.isdir(srcroot):
3461                         showMessage(_("!!! Directory Not Found: D='%s'\n") % srcroot,
3462                                 level=logging.ERROR, noiselevel=-1)
3463                         return 1
3464
3465                 slot = ''
3466                 for var_name in ('CHOST', 'SLOT'):
3467                         if var_name == 'CHOST' and self.cat == 'virtual':
3468                                 try:
3469                                         os.unlink(os.path.join(inforoot, var_name))
3470                                 except OSError:
3471                                         pass
3472                                 continue
3473
3474                         f = None
3475                         try:
3476                                 f = io.open(_unicode_encode(
3477                                         os.path.join(inforoot, var_name),
3478                                         encoding=_encodings['fs'], errors='strict'),
3479                                         mode='r', encoding=_encodings['repo.content'],
3480                                         errors='replace')
3481                                 val = f.readline().strip()
3482                         except EnvironmentError as e:
3483                                 if e.errno != errno.ENOENT:
3484                                         raise
3485                                 del e
3486                                 val = ''
3487                         finally:
3488                                 if f is not None:
3489                                         f.close()
3490
3491                         if var_name == 'SLOT':
3492                                 slot = val
3493
3494                                 if not slot.strip():
3495                                         slot = self.settings.get(var_name, '')
3496                                         if not slot.strip():
3497                                                 showMessage(_("!!! SLOT is undefined\n"),
3498                                                         level=logging.ERROR, noiselevel=-1)
3499                                                 return 1
3500                                         write_atomic(os.path.join(inforoot, var_name), slot + '\n')
3501
3502                         if val != self.settings.get(var_name, ''):
3503                                 self._eqawarn('preinst',
3504                                         [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
3505                                         {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
3506
3507                 def eerror(lines):
3508                         self._eerror("preinst", lines)
3509
3510                 if not os.path.exists(self.dbcatdir):
3511                         ensure_dirs(self.dbcatdir)
3512
3513                 # NOTE: We use SLOT obtained from the inforoot
3514                 #       directory, in order to support USE=multislot.
3515                 # Use _pkg_str discard the sub-slot part if necessary.
3516                 slot = _pkg_str(self.mycpv, slot=slot).slot
3517                 cp = self.mysplit[0]
3518                 slot_atom = "%s:%s" % (cp, slot)
3519
3520                 # filter any old-style virtual matches
3521                 slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
3522                         if cpv_getkey(cpv) == cp]
3523
3524                 if self.mycpv not in slot_matches and \
3525                         self.vartree.dbapi.cpv_exists(self.mycpv):
3526                         # handle multislot or unapplied slotmove
3527                         slot_matches.append(self.mycpv)
3528
3529                 others_in_slot = []
3530                 from portage import config
3531                 for cur_cpv in slot_matches:
3532                         # Clone the config in case one of these has to be unmerged since
3533                         # we need it to have private ${T} etc... for things like elog.
3534                         settings_clone = config(clone=self.settings)
3535                         settings_clone.pop("PORTAGE_BUILDIR_LOCKED", None)
3536                         settings_clone.reset()
3537                         others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
3538                                 settings=settings_clone,
3539                                 vartree=self.vartree, treetype="vartree",
3540                                 scheduler=self._scheduler, pipe=self._pipe))
3541
3542                 retval = self._security_check(others_in_slot)
3543                 if retval:
3544                         return retval
3545
3546                 if slot_matches:
3547                         # Used by self.isprotected().
3548                         max_dblnk = None
3549                         max_counter = -1
3550                         for dblnk in others_in_slot:
3551                                 cur_counter = self.vartree.dbapi.cpv_counter(dblnk.mycpv)
3552                                 if cur_counter > max_counter:
3553                                         max_counter = cur_counter
3554                                         max_dblnk = dblnk
3555                         self._installed_instance = max_dblnk
3556
3557                 if self.settings.get("INSTALL_MASK") or \
3558                         "nodoc" in self.settings.features or \
3559                         "noinfo" in self.settings.features or \
3560                         "noman" in self.settings.features:
3561                         # Apply INSTALL_MASK before collision-protect, since it may
3562                         # be useful to avoid collisions in some scenarios.
3563                         phase = MiscFunctionsProcess(background=False,
3564                                 commands=["preinst_mask"], phase="preinst",
3565                                 scheduler=self._scheduler, settings=self.settings)
3566                         phase.start()
3567                         phase.wait()
3568
3569                 # We check for unicode encoding issues after src_install. However,
3570                 # the check must be repeated here for binary packages (it's
3571                 # inexpensive since we call os.walk() here anyway).
3572                 unicode_errors = []
3573                 line_ending_re = re.compile('[\n\r]')
3574                 srcroot_len = len(srcroot)
3575                 ed_len = len(self.settings["ED"])
3576
3577                 while True:
3578
3579                         unicode_error = False
3580                         eagain_error = False
3581
3582                         myfilelist = []
3583                         mylinklist = []
3584                         paths_with_newlines = []
3585                         def onerror(e):
3586                                 raise
3587                         walk_iter = os.walk(srcroot, onerror=onerror)
3588                         while True:
3589                                 try:
3590                                         parent, dirs, files = next(walk_iter)
3591                                 except StopIteration:
3592                                         break
3593                                 except OSError as e:
3594                                         if e.errno != errno.EAGAIN:
3595                                                 raise
3596                                         # Observed with PyPy 1.8.
3597                                         eagain_error = True
3598                                         break
3599
3600                                 try:
3601                                         parent = _unicode_decode(parent,
3602                                                 encoding=_encodings['merge'], errors='strict')
3603                                 except UnicodeDecodeError:
3604                                         new_parent = _unicode_decode(parent,
3605                                                 encoding=_encodings['merge'], errors='replace')
3606                                         new_parent = _unicode_encode(new_parent,
3607                                                 encoding='ascii', errors='backslashreplace')
3608                                         new_parent = _unicode_decode(new_parent,
3609                                                 encoding=_encodings['merge'], errors='replace')
3610                                         os.rename(parent, new_parent)
3611                                         unicode_error = True
3612                                         unicode_errors.append(new_parent[ed_len:])
3613                                         break
3614
3615                                 for fname in files:
3616                                         try:
3617                                                 fname = _unicode_decode(fname,
3618                                                         encoding=_encodings['merge'], errors='strict')
3619                                         except UnicodeDecodeError:
3620                                                 fpath = portage._os.path.join(
3621                                                         parent.encode(_encodings['merge']), fname)
3622                                                 new_fname = _unicode_decode(fname,
3623                                                         encoding=_encodings['merge'], errors='replace')
3624                                                 new_fname = _unicode_encode(new_fname,
3625                                                         encoding='ascii', errors='backslashreplace')
3626                                                 new_fname = _unicode_decode(new_fname,
3627                                                         encoding=_encodings['merge'], errors='replace')
3628                                                 new_fpath = os.path.join(parent, new_fname)
3629                                                 os.rename(fpath, new_fpath)
3630                                                 unicode_error = True
3631                                                 unicode_errors.append(new_fpath[ed_len:])
3632                                                 fname = new_fname
3633                                                 fpath = new_fpath
3634                                         else:
3635                                                 fpath = os.path.join(parent, fname)
3636
3637                                         relative_path = fpath[srcroot_len:]
3638
3639                                         if line_ending_re.search(relative_path) is not None:
3640                                                 paths_with_newlines.append(relative_path)
3641
3642                                         file_mode = os.lstat(fpath).st_mode
3643                                         if stat.S_ISREG(file_mode):
3644                                                 myfilelist.append(relative_path)
3645                                         elif stat.S_ISLNK(file_mode):
3646                                                 # Note: os.walk puts symlinks to directories in the "dirs"
3647                                                 # list and it does not traverse them since that could lead
3648                                                 # to an infinite recursion loop.
3649                                                 mylinklist.append(relative_path)
3650
3651                                 if unicode_error:
3652                                         break
3653
3654                         if not (unicode_error or eagain_error):
3655                                 break
3656
3657                 if unicode_errors:
3658                         self._elog("eqawarn", "preinst",
3659                                 _merge_unicode_error(unicode_errors))
3660
3661                 if paths_with_newlines:
3662                         msg = []
3663                         msg.append(_("This package installs one or more files containing line ending characters:"))
3664                         msg.append("")
3665                         paths_with_newlines.sort()
3666                         for f in paths_with_newlines:
3667                                 msg.append("\t/%s" % (f.replace("\n", "\\n").replace("\r", "\\r")))
3668                         msg.append("")
3669                         msg.append(_("package %s NOT merged") % self.mycpv)
3670                         msg.append("")
3671                         eerror(msg)
3672                         return 1
3673
3674                 # If there are no files to merge, and an installed package in the same
3675                 # slot has files, it probably means that something went wrong.
3676                 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
3677                         not myfilelist and not mylinklist and others_in_slot:
3678                         installed_files = None
3679                         for other_dblink in others_in_slot:
3680                                 installed_files = other_dblink.getcontents()
3681                                 if not installed_files:
3682                                         continue
3683                                 from textwrap import wrap
3684                                 wrap_width = 72
3685                                 msg = []
3686                                 d = {
3687                                         "new_cpv":self.mycpv,
3688                                         "old_cpv":other_dblink.mycpv
3689                                 }
3690                                 msg.extend(wrap(_("The '%(new_cpv)s' package will not install "
3691                                         "any files, but the currently installed '%(old_cpv)s'"
3692                                         " package has the following files: ") % d, wrap_width))
3693                                 msg.append("")
3694                                 msg.extend(sorted(installed_files))
3695                                 msg.append("")
3696                                 msg.append(_("package %s NOT merged") % self.mycpv)
3697                                 msg.append("")
3698                                 msg.extend(wrap(
3699                                         _("Manually run `emerge --unmerge =%s` if you "
3700                                         "really want to remove the above files. Set "
3701                                         "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
3702                                         "/etc/make.conf if you do not want to "
3703                                         "abort in cases like this.") % other_dblink.mycpv,
3704                                         wrap_width))
3705                                 eerror(msg)
3706                         if installed_files:
3707                                 return 1
3708
3709                 # Make sure the ebuild environment is initialized and that ${T}/elog
3710                 # exists for logging of collision-protect eerror messages.
3711                 if myebuild is None:
3712                         myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3713                 doebuild_environment(myebuild, "preinst",
3714                         settings=self.settings, db=mydbapi)
3715                 self.settings["REPLACING_VERSIONS"] = " ".join(
3716                         [portage.versions.cpv_getversion(other.mycpv)
3717                         for other in others_in_slot])
3718                 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
3719
3720                 # check for package collisions
3721                 blockers = self._blockers
3722                 if blockers is None:
3723                         blockers = []
3724                 collisions, symlink_collisions, plib_collisions = \
3725                         self._collision_protect(srcroot, destroot,
3726                         others_in_slot + blockers, myfilelist, mylinklist)
3727
3728                 if symlink_collisions:
3729                         # Symlink collisions need to be distinguished from other types
3730                         # of collisions, in order to avoid confusion (see bug #409359).
3731                         msg = _("Package '%s' has one or more collisions "
3732                                 "between symlinks and directories, which is explicitly "
3733                                 "forbidden by PMS section 13.4 (see bug #326685):") % \
3734                                 (self.settings.mycpv,)
3735                         msg = textwrap.wrap(msg, 70)
3736                         msg.append("")
3737                         for f in symlink_collisions:
3738                                 msg.append("\t%s" % os.path.join(destroot,
3739                                         f.lstrip(os.path.sep)))
3740                         msg.append("")
3741                         self._elog("eerror", "preinst", msg)
3742
3743                 if collisions:
3744                         collision_protect = "collision-protect" in self.settings.features
3745                         protect_owned = "protect-owned" in self.settings.features
3746                         msg = _("This package will overwrite one or more files that"
3747                         " may belong to other packages (see list below).")
3748                         if not (collision_protect or protect_owned):
3749                                 msg += _(" Add either \"collision-protect\" or" 
3750                                 " \"protect-owned\" to FEATURES in"
3751                                 " make.conf if you would like the merge to abort"
3752                                 " in cases like this. See the make.conf man page for"
3753                                 " more information about these features.")
3754                         if self.settings.get("PORTAGE_QUIET") != "1":
3755                                 msg += _(" You can use a command such as"
3756                                 " `portageq owners / <filename>` to identify the"
3757                                 " installed package that owns a file. If portageq"
3758                                 " reports that only one package owns a file then do NOT"
3759                                 " file a bug report. A bug report is only useful if it"
3760                                 " identifies at least two or more packages that are known"
3761                                 " to install the same file(s)."
3762                                 " If a collision occurs and you"
3763                                 " can not explain where the file came from then you"
3764                                 " should simply ignore the collision since there is not"
3765                                 " enough information to determine if a real problem"
3766                                 " exists. Please do NOT file a bug report at"
3767                                 " http://bugs.gentoo.org unless you report exactly which"
3768                                 " two packages install the same file(s). Once again,"
3769                                 " please do NOT file a bug report unless you have"
3770                                 " completely understood the above message.")
3771
3772                         self.settings["EBUILD_PHASE"] = "preinst"
3773                         from textwrap import wrap
3774                         msg = wrap(msg, 70)
3775                         if collision_protect:
3776                                 msg.append("")
3777                                 msg.append(_("package %s NOT merged") % self.settings.mycpv)
3778                         msg.append("")
3779                         msg.append(_("Detected file collision(s):"))
3780                         msg.append("")
3781
3782                         for f in collisions:
3783                                 msg.append("\t%s" % \
3784                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3785
3786                         eerror(msg)
3787
3788                         owners = None
3789                         if collision_protect or protect_owned or symlink_collisions:
3790                                 msg = []
3791                                 msg.append("")
3792                                 msg.append(_("Searching all installed"
3793                                         " packages for file collisions..."))
3794                                 msg.append("")
3795                                 msg.append(_("Press Ctrl-C to Stop"))
3796                                 msg.append("")
3797                                 eerror(msg)
3798
3799                                 if len(collisions) > 20:
3800                                         # get_owners is slow for large numbers of files, so
3801                                         # don't look them all up.
3802                                         collisions = collisions[:20]
3803                                 self.lockdb()
3804                                 try:
3805                                         owners = self.vartree.dbapi._owners.get_owners(collisions)
3806                                         self.vartree.dbapi.flush_cache()
3807                                 finally:
3808                                         self.unlockdb()
3809
3810                                 for pkg, owned_files in owners.items():
3811                                         cpv = pkg.mycpv
3812                                         msg = []
3813                                         msg.append("%s" % cpv)
3814                                         for f in sorted(owned_files):
3815                                                 msg.append("\t%s" % os.path.join(destroot,
3816                                                         f.lstrip(os.path.sep)))
3817                                         msg.append("")
3818                                         eerror(msg)
3819
3820                                 if not owners:
3821                                         eerror([_("None of the installed"
3822                                                 " packages claim the file(s)."), ""])
3823
3824                         symlink_abort_msg =_("Package '%s' NOT merged since it has "
3825                                 "one or more collisions between symlinks and directories, "
3826                                 "which is explicitly forbidden by PMS section 13.4 "
3827                                 "(see bug #326685).")
3828
3829                         # The explanation about the collision and how to solve
3830                         # it may not be visible via a scrollback buffer, especially
3831                         # if the number of file collisions is large. Therefore,
3832                         # show a summary at the end.
3833                         abort = False
3834                         if symlink_collisions:
3835                                 abort = True
3836                                 msg = symlink_abort_msg % (self.settings.mycpv,)
3837                         elif collision_protect:
3838                                 abort = True
3839                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3840                                         self.settings.mycpv
3841                         elif protect_owned and owners:
3842                                 abort = True
3843                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3844                                         self.settings.mycpv
3845                         else:
3846                                 msg = _("Package '%s' merged despite file collisions.") % \
3847                                         self.settings.mycpv
3848                         msg += _(" If necessary, refer to your elog "
3849                                 "messages for the whole content of the above message.")
3850                         eerror(wrap(msg, 70))
3851
3852                         if abort:
3853                                 return 1
3854
3855                 # The merge process may move files out of the image directory,
3856                 # which causes invalidation of the .installed flag.
3857                 try:
3858                         os.unlink(os.path.join(
3859                                 os.path.dirname(normalize_path(srcroot)), ".installed"))
3860                 except OSError as e:
3861                         if e.errno != errno.ENOENT:
3862                                 raise
3863                         del e
3864
3865                 self.dbdir = self.dbtmpdir
3866                 self.delete()
3867                 ensure_dirs(self.dbtmpdir)
3868
3869                 downgrade = False
3870                 if self._installed_instance is not None and \
3871                         vercmp(self.mycpv.version,
3872                         self._installed_instance.mycpv.version) < 0:
3873                         downgrade = True
3874
3875                 if self._installed_instance is not None:
3876                         rval = self._pre_merge_backup(self._installed_instance, downgrade)
3877                         if rval != os.EX_OK:
3878                                 showMessage(_("!!! FAILED preinst: ") +
3879                                         "quickpkg: %s\n" % rval,
3880                                         level=logging.ERROR, noiselevel=-1)
3881                                 return rval
3882
3883                 # run preinst script
3884                 showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % \
3885                         {"cpv":self.mycpv, "destroot":destroot})
3886                 phase = EbuildPhase(background=False, phase="preinst",
3887                         scheduler=self._scheduler, settings=self.settings)
3888                 phase.start()
3889                 a = phase.wait()
3890
3891                 # XXX: Decide how to handle failures here.
3892                 if a != os.EX_OK:
3893                         showMessage(_("!!! FAILED preinst: ")+str(a)+"\n",
3894                                 level=logging.ERROR, noiselevel=-1)
3895                         return a
3896
3897                 # copy "info" files (like SLOT, CFLAGS, etc.) into the database
3898                 for x in os.listdir(inforoot):
3899                         self.copyfile(inforoot+"/"+x)
3900
3901                 # write local package counter for recording
3902                 if counter is None:
3903                         counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
3904                 f = io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
3905                         encoding=_encodings['fs'], errors='strict'),
3906                         mode='w', encoding=_encodings['repo.content'],
3907                         errors='backslashreplace')
3908                 f.write(_unicode_decode(str(counter)))
3909                 f.close()
3910
3911                 self.updateprotect()
3912
3913                 #if we have a file containing previously-merged config file md5sums, grab it.
3914                 self.vartree.dbapi._fs_lock()
3915                 try:
3916                         # Always behave like --noconfmem is enabled for downgrades
3917                         # so that people who don't know about this option are less
3918                         # likely to get confused when doing upgrade/downgrade cycles.
3919                         cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
3920                         if "NOCONFMEM" in self.settings or downgrade:
3921                                 cfgfiledict["IGNORE"]=1
3922                         else:
3923                                 cfgfiledict["IGNORE"]=0
3924
3925                         rval = self._merge_contents(srcroot, destroot, cfgfiledict)
3926                         if rval != os.EX_OK:
3927                                 return rval
3928                 finally:
3929                         self.vartree.dbapi._fs_unlock()
3930
3931                 # These caches are populated during collision-protect and the data
3932                 # they contain is now invalid. It's very important to invalidate
3933                 # the contents_inodes cache so that FEATURES=unmerge-orphans
3934                 # doesn't unmerge anything that belongs to this package that has
3935                 # just been merged.
3936                 for dblnk in others_in_slot:
3937                         dblnk._clear_contents_cache()
3938                 self._clear_contents_cache()
3939
3940                 linkmap = self.vartree.dbapi._linkmap
3941                 plib_registry = self.vartree.dbapi._plib_registry
3942                 # We initialize preserve_paths to an empty set rather
3943                 # than None here because it plays an important role
3944                 # in prune_plib_registry logic by serving to indicate
3945                 # that we have a replacement for a package that's
3946                 # being unmerged.
3947
3948                 preserve_paths = set()
3949                 needed = None
3950                 if not (self._linkmap_broken or linkmap is None or
3951                         plib_registry is None):
3952                         self.vartree.dbapi._fs_lock()
3953                         plib_registry.lock()
3954                         try:
3955                                 plib_registry.load()
3956                                 needed = os.path.join(inforoot, linkmap._needed_aux_key)
3957                                 self._linkmap_rebuild(include_file=needed)
3958
3959                                 # Preserve old libs if they are still in use
3960                                 # TODO: Handle cases where the previous instance
3961                                 # has already been uninstalled but it still has some
3962                                 # preserved libraries in the registry that we may
3963                                 # want to preserve here.
3964                                 preserve_paths = self._find_libs_to_preserve()
3965                         finally:
3966                                 plib_registry.unlock()
3967                                 self.vartree.dbapi._fs_unlock()
3968
3969                         if preserve_paths:
3970                                 self._add_preserve_libs_to_contents(preserve_paths)
3971
3972                 # If portage is reinstalling itself, remove the old
3973                 # version now since we want to use the temporary
3974                 # PORTAGE_BIN_PATH that will be removed when we return.
3975                 reinstall_self = False
3976                 if self.myroot == "/" and \
3977                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]):
3978                         reinstall_self = True
3979
3980                 emerge_log = self._emerge_log
3981
3982                 # If we have any preserved libraries then autoclean
3983                 # is forced so that preserve-libs logic doesn't have
3984                 # to account for the additional complexity of the
3985                 # AUTOCLEAN=no mode.
3986                 autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes" \
3987                         or preserve_paths
3988
3989                 if autoclean:
3990                         emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
3991
3992                 others_in_slot.append(self)  # self has just been merged
3993                 for dblnk in list(others_in_slot):
3994                         if dblnk is self:
3995                                 continue
3996                         if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
3997                                 continue
3998                         showMessage(_(">>> Safely unmerging already-installed instance...\n"))
3999                         emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
4000                         others_in_slot.remove(dblnk) # dblnk will unmerge itself now
4001                         dblnk._linkmap_broken = self._linkmap_broken
4002                         dblnk.settings["REPLACED_BY_VERSION"] = portage.versions.cpv_getversion(self.mycpv)
4003                         dblnk.settings.backup_changes("REPLACED_BY_VERSION")
4004                         unmerge_rval = dblnk.unmerge(ldpath_mtimes=prev_mtimes,
4005                                 others_in_slot=others_in_slot, needed=needed,
4006                                 preserve_paths=preserve_paths)
4007                         dblnk.settings.pop("REPLACED_BY_VERSION", None)
4008
4009                         if unmerge_rval == os.EX_OK:
4010                                 emerge_log(_(" >>> unmerge success: %s") % (dblnk.mycpv,))
4011                         else:
4012                                 emerge_log(_(" !!! unmerge FAILURE: %s") % (dblnk.mycpv,))
4013
4014                         self.lockdb()
4015                         try:
4016                                 # TODO: Check status and abort if necessary.
4017                                 dblnk.delete()
4018                         finally:
4019                                 self.unlockdb()
4020                         showMessage(_(">>> Original instance of package unmerged safely.\n"))
4021
4022                 if len(others_in_slot) > 1:
4023                         showMessage(colorize("WARN", _("WARNING:"))
4024                                 + _(" AUTOCLEAN is disabled.  This can cause serious"
4025                                 " problems due to overlapping packages.\n"),
4026                                 level=logging.WARN, noiselevel=-1)
4027
4028                 # We hold both directory locks.
4029                 self.dbdir = self.dbpkgdir
4030                 self.lockdb()
4031                 try:
4032                         self.delete()
4033                         _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
4034                 finally:
4035                         self.unlockdb()
4036
4037                 # Check for file collisions with blocking packages
4038                 # and remove any colliding files from their CONTENTS
4039                 # since they now belong to this package.
4040                 self._clear_contents_cache()
4041                 contents = self.getcontents()
4042                 destroot_len = len(destroot) - 1
4043                 self.lockdb()
4044                 try:
4045                         for blocker in blockers:
4046                                 self.vartree.dbapi.removeFromContents(blocker, iter(contents),
4047                                         relative_paths=False)
4048                 finally:
4049                         self.unlockdb()
4050
4051                 plib_registry = self.vartree.dbapi._plib_registry
4052                 if plib_registry:
4053                         self.vartree.dbapi._fs_lock()
4054                         plib_registry.lock()
4055                         try:
4056                                 plib_registry.load()
4057
4058                                 if preserve_paths:
4059                                         # keep track of the libs we preserved
4060                                         plib_registry.register(self.mycpv, slot, counter,
4061                                                 sorted(preserve_paths))
4062
4063                                 # Unregister any preserved libs that this package has overwritten
4064                                 # and update the contents of the packages that owned them.
4065                                 plib_dict = plib_registry.getPreservedLibs()
4066                                 for cpv, paths in plib_collisions.items():
4067                                         if cpv not in plib_dict:
4068                                                 continue
4069                                         has_vdb_entry = False
4070                                         if cpv != self.mycpv:
4071                                                 # If we've replaced another instance with the
4072                                                 # same cpv then the vdb entry no longer belongs
4073                                                 # to it, so we'll have to get the slot and counter
4074                                                 # from plib_registry._data instead.
4075                                                 self.vartree.dbapi.lock()
4076                                                 try:
4077                                                         try:
4078                                                                 slot, counter = self.vartree.dbapi.aux_get(
4079                                                                         cpv, ["SLOT", "COUNTER"])
4080                                                         except KeyError:
4081                                                                 pass
4082                                                         else:
4083                                                                 has_vdb_entry = True
4084                                                                 self.vartree.dbapi.removeFromContents(
4085                                                                         cpv, paths)
4086                                                 finally:
4087                                                         self.vartree.dbapi.unlock()
4088
4089                                         if not has_vdb_entry:
4090                                                 # It's possible for previously unmerged packages
4091                                                 # to have preserved libs in the registry, so try
4092                                                 # to retrieve the slot and counter from there.
4093                                                 has_registry_entry = False
4094                                                 for plib_cps, (plib_cpv, plib_counter, plib_paths) in \
4095                                                         plib_registry._data.items():
4096                                                         if plib_cpv != cpv:
4097                                                                 continue
4098                                                         try:
4099                                                                 cp, slot = plib_cps.split(":", 1)
4100                                                         except ValueError:
4101                                                                 continue
4102                                                         counter = plib_counter
4103                                                         has_registry_entry = True
4104                                                         break
4105
4106                                                 if not has_registry_entry:
4107                                                         continue
4108
4109                                         remaining = [f for f in plib_dict[cpv] if f not in paths]
4110                                         plib_registry.register(cpv, slot, counter, remaining)
4111
4112                                 plib_registry.store()
4113                         finally:
4114                                 plib_registry.unlock()
4115                                 self.vartree.dbapi._fs_unlock()
4116
4117                 self.vartree.dbapi._add(self)
4118                 contents = self.getcontents()
4119
4120                 #do postinst script
4121                 self.settings["PORTAGE_UPDATE_ENV"] = \
4122                         os.path.join(self.dbpkgdir, "environment.bz2")
4123                 self.settings.backup_changes("PORTAGE_UPDATE_ENV")
4124                 try:
4125                         phase = EbuildPhase(background=False, phase="postinst",
4126                                 scheduler=self._scheduler, settings=self.settings)
4127                         phase.start()
4128                         a = phase.wait()
4129                         if a == os.EX_OK:
4130                                 showMessage(_(">>> %s merged.\n") % self.mycpv)
4131                 finally:
4132                         self.settings.pop("PORTAGE_UPDATE_ENV", None)
4133
4134                 if a != os.EX_OK:
4135                         # It's stupid to bail out here, so keep going regardless of
4136                         # phase return code.
4137                         showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
4138                                 level=logging.ERROR, noiselevel=-1)
4139
4140                 #update environment settings, library paths. DO NOT change symlinks.
4141                 env_update(
4142                         target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
4143                         contents=contents, env=self.settings,
4144                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
4145
4146                 # For gcc upgrades, preserved libs have to be removed after the
4147                 # the library path has been updated.
4148                 self._prune_plib_registry()
4149
4150                 return os.EX_OK
4151
4152         def _new_backup_path(self, p):
4153                 """
4154                 The works for any type path, such as a regular file, symlink,
4155                 or directory. The parent directory is assumed to exist.
4156                 The returned filename is of the form p + '.backup.' + x, where
4157                 x guarantees that the returned path does not exist yet.
4158                 """
4159                 os = _os_merge
4160
4161                 x = -1
4162                 while True:
4163                         x += 1
4164                         backup_p = p + '.backup.' + str(x).rjust(4, '0')
4165                         try:
4166                                 os.lstat(backup_p)
4167                         except OSError:
4168                                 break
4169
4170                 return backup_p
4171
4172         def _merge_contents(self, srcroot, destroot, cfgfiledict):
4173
4174                 cfgfiledict_orig = cfgfiledict.copy()
4175
4176                 # open CONTENTS file (possibly overwriting old one) for recording
4177                 # Use atomic_ofstream for automatic coercion of raw bytes to
4178                 # unicode, in order to prevent TypeError when writing raw bytes
4179                 # to TextIOWrapper with python2.
4180                 outfile = atomic_ofstream(_unicode_encode(
4181                         os.path.join(self.dbtmpdir, 'CONTENTS'),
4182                         encoding=_encodings['fs'], errors='strict'),
4183                         mode='w', encoding=_encodings['repo.content'],
4184                         errors='backslashreplace')
4185
4186                 # Don't bump mtimes on merge since some application require
4187                 # preservation of timestamps.  This means that the unmerge phase must
4188                 # check to see if file belongs to an installed instance in the same
4189                 # slot.
4190                 mymtime = None
4191
4192                 # set umask to 0 for merging; back up umask, save old one in prevmask (since this is a global change)
4193                 prevmask = os.umask(0)
4194                 secondhand = []
4195
4196                 # we do a first merge; this will recurse through all files in our srcroot but also build up a
4197                 # "second hand" of symlinks to merge later
4198                 if self.mergeme(srcroot, destroot, outfile, secondhand,
4199                         self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4200                         return 1
4201
4202                 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore.  The rest are
4203                 # broken symlinks.  We'll merge them too.
4204                 lastlen = 0
4205                 while len(secondhand) and len(secondhand)!=lastlen:
4206                         # clear the thirdhand.  Anything from our second hand that
4207                         # couldn't get merged will be added to thirdhand.
4208
4209                         thirdhand = []
4210                         if self.mergeme(srcroot, destroot, outfile, thirdhand,
4211                                 secondhand, cfgfiledict, mymtime):
4212                                 return 1
4213
4214                         #swap hands
4215                         lastlen = len(secondhand)
4216
4217                         # our thirdhand now becomes our secondhand.  It's ok to throw
4218                         # away secondhand since thirdhand contains all the stuff that
4219                         # couldn't be merged.
4220                         secondhand = thirdhand
4221
4222                 if len(secondhand):
4223                         # force merge of remaining symlinks (broken or circular; oh well)
4224                         if self.mergeme(srcroot, destroot, outfile, None,
4225                                 secondhand, cfgfiledict, mymtime):
4226                                 return 1
4227
4228                 #restore umask
4229                 os.umask(prevmask)
4230
4231                 #if we opened it, close it
4232                 outfile.flush()
4233                 outfile.close()
4234
4235                 # write out our collection of md5sums
4236                 if cfgfiledict != cfgfiledict_orig:
4237                         cfgfiledict.pop("IGNORE", None)
4238                         try:
4239                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
4240                         except InvalidLocation:
4241                                 self.settings._init_dirs()
4242                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
4243
4244                 return os.EX_OK
4245
4246         def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
4247                 """
4248                 
4249                 This function handles actual merging of the package contents to the livefs.
4250                 It also handles config protection.
4251                 
4252                 @param srcroot: Where are we copying files from (usually ${D})
4253                 @type srcroot: String (Path)
4254                 @param destroot: Typically ${ROOT}
4255                 @type destroot: String (Path)
4256                 @param outfile: File to log operations to
4257                 @type outfile: File Object
4258                 @param secondhand: A set of items to merge in pass two (usually
4259                 or symlinks that point to non-existing files that may get merged later)
4260                 @type secondhand: List
4261                 @param stufftomerge: Either a diretory to merge, or a list of items.
4262                 @type stufftomerge: String or List
4263                 @param cfgfiledict: { File:mtime } mapping for config_protected files
4264                 @type cfgfiledict: Dictionary
4265                 @param thismtime: The current time (typically long(time.time())
4266                 @type thismtime: Long
4267                 @rtype: None or Boolean
4268                 @return:
4269                 1. True on failure
4270                 2. None otherwise
4271                 
4272                 """
4273
4274                 showMessage = self._display_merge
4275                 writemsg = self._display_merge
4276
4277                 os = _os_merge
4278                 sep = os.sep
4279                 join = os.path.join
4280                 srcroot = normalize_path(srcroot).rstrip(sep) + sep
4281                 destroot = normalize_path(destroot).rstrip(sep) + sep
4282                 calc_prelink = "prelink-checksums" in self.settings.features
4283
4284                 protect_if_modified = \
4285                         "config-protect-if-modified" in self.settings.features and \
4286                         self._installed_instance is not None
4287
4288                 # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
4289                 if isinstance(stufftomerge, basestring):
4290                         #A directory is specified.  Figure out protection paths, listdir() it and process it.
4291                         mergelist = os.listdir(join(srcroot, stufftomerge))
4292                         offset = stufftomerge
4293                 else:
4294                         mergelist = stufftomerge
4295                         offset = ""
4296
4297                 for i, x in enumerate(mergelist):
4298
4299                         mysrc = join(srcroot, offset, x)
4300                         mydest = join(destroot, offset, x)
4301                         # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
4302                         myrealdest = join(sep, offset, x)
4303                         # stat file once, test using S_* macros many times (faster that way)
4304                         mystat = os.lstat(mysrc)
4305                         mymode = mystat[stat.ST_MODE]
4306                         # handy variables; mydest is the target object on the live filesystems;
4307                         # mysrc is the source object in the temporary install dir
4308                         try:
4309                                 mydstat = os.lstat(mydest)
4310                                 mydmode = mydstat.st_mode
4311                         except OSError as e:
4312                                 if e.errno != errno.ENOENT:
4313                                         raise
4314                                 del e
4315                                 #dest file doesn't exist
4316                                 mydstat = None
4317                                 mydmode = None
4318
4319                         if stat.S_ISLNK(mymode):
4320                                 # we are merging a symbolic link
4321                                 # The file name of mysrc and the actual file that it points to
4322                                 # will have earlier been forcefully converted to the 'merge'
4323                                 # encoding if necessary, but the content of the symbolic link
4324                                 # may need to be forcefully converted here.
4325                                 myto = _os.readlink(_unicode_encode(mysrc,
4326                                         encoding=_encodings['merge'], errors='strict'))
4327                                 try:
4328                                         myto = _unicode_decode(myto,
4329                                                 encoding=_encodings['merge'], errors='strict')
4330                                 except UnicodeDecodeError:
4331                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4332                                                 errors='replace')
4333                                         myto = _unicode_encode(myto, encoding='ascii',
4334                                                 errors='backslashreplace')
4335                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4336                                                 errors='replace')
4337                                         os.unlink(mysrc)
4338                                         os.symlink(myto, mysrc)
4339
4340                                 # Pass in the symlink target in order to bypass the
4341                                 # os.readlink() call inside abssymlink(), since that
4342                                 # call is unsafe if the merge encoding is not ascii
4343                                 # or utf_8 (see bug #382021).
4344                                 myabsto = abssymlink(mysrc, target=myto)
4345
4346                                 if myabsto.startswith(srcroot):
4347                                         myabsto = myabsto[len(srcroot):]
4348                                 myabsto = myabsto.lstrip(sep)
4349                                 if self.settings and self.settings["D"]:
4350                                         if myto.startswith(self.settings["D"]):
4351                                                 myto = myto[len(self.settings["D"])-1:]
4352                                 # myrealto contains the path of the real file to which this symlink points.
4353                                 # we can simply test for existence of this file to see if the target has been merged yet
4354                                 myrealto = normalize_path(os.path.join(destroot, myabsto))
4355                                 if mydmode!=None:
4356                                         #destination exists
4357                                         if stat.S_ISDIR(mydmode):
4358                                                 # we can't merge a symlink over a directory
4359                                                 newdest = self._new_backup_path(mydest)
4360                                                 msg = []
4361                                                 msg.append("")
4362                                                 msg.append(_("Installation of a symlink is blocked by a directory:"))
4363                                                 msg.append("  '%s'" % mydest)
4364                                                 msg.append(_("This symlink will be merged with a different name:"))
4365                                                 msg.append("  '%s'" % newdest)
4366                                                 msg.append("")
4367                                                 self._eerror("preinst", msg)
4368                                                 mydest = newdest
4369
4370                                         elif not stat.S_ISLNK(mydmode):
4371                                                 if os.path.exists(mysrc) and stat.S_ISDIR(os.stat(mysrc)[stat.ST_MODE]):
4372                                                         # Kill file blocking installation of symlink to dir #71787
4373                                                         pass
4374                                                 elif self.isprotected(mydest):
4375                                                         # Use md5 of the target in ${D} if it exists...
4376                                                         try:
4377                                                                 newmd5 = perform_md5(join(srcroot, myabsto))
4378                                                         except FileNotFound:
4379                                                                 # Maybe the target is merged already.
4380                                                                 try:
4381                                                                         newmd5 = perform_md5(myrealto)
4382                                                                 except FileNotFound:
4383                                                                         newmd5 = None
4384                                                         mydest = new_protect_filename(mydest, newmd5=newmd5)
4385
4386                                 # if secondhand is None it means we're operating in "force" mode and should not create a second hand.
4387                                 if (secondhand != None) and (not os.path.exists(myrealto)):
4388                                         # either the target directory doesn't exist yet or the target file doesn't exist -- or
4389                                         # the target is a broken symlink.  We will add this file to our "second hand" and merge
4390                                         # it later.
4391                                         secondhand.append(mysrc[len(srcroot):])
4392                                         continue
4393                                 # unlinking no longer necessary; "movefile" will overwrite symlinks atomically and correctly
4394                                 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4395                                         sstat=mystat, mysettings=self.settings,
4396                                         encoding=_encodings['merge'])
4397                                 if mymtime != None:
4398                                         showMessage(">>> %s -> %s\n" % (mydest, myto))
4399                                         outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
4400                                 else:
4401                                         showMessage(_("!!! Failed to move file.\n"),
4402                                                 level=logging.ERROR, noiselevel=-1)
4403                                         showMessage("!!! %s -> %s\n" % (mydest, myto),
4404                                                 level=logging.ERROR, noiselevel=-1)
4405                                         return 1
4406                         elif stat.S_ISDIR(mymode):
4407                                 # we are merging a directory
4408                                 if mydmode != None:
4409                                         # destination exists
4410
4411                                         if bsd_chflags:
4412                                                 # Save then clear flags on dest.
4413                                                 dflags = mydstat.st_flags
4414                                                 if dflags != 0:
4415                                                         bsd_chflags.lchflags(mydest, 0)
4416
4417                                         if not os.access(mydest, os.W_OK):
4418                                                 pkgstuff = pkgsplit(self.pkg)
4419                                                 writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1)
4420                                                 writemsg(_("!!! Please check permissions and directories for broken symlinks.\n"))
4421                                                 writemsg(_("!!! You may start the merge process again by using ebuild:\n"))
4422                                                 writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n")
4423                                                 writemsg(_("!!! And finish by running this: env-update\n\n"))
4424                                                 return 1
4425
4426                                         if stat.S_ISDIR(mydmode) or \
4427                                                 (stat.S_ISLNK(mydmode) and os.path.isdir(mydest)):
4428                                                 # a symlink to an existing directory will work for us; keep it:
4429                                                 showMessage("--- %s/\n" % mydest)
4430                                                 if bsd_chflags:
4431                                                         bsd_chflags.lchflags(mydest, dflags)
4432                                         else:
4433                                                 # a non-directory and non-symlink-to-directory.  Won't work for us.  Move out of the way.
4434                                                 backup_dest = self._new_backup_path(mydest)
4435                                                 msg = []
4436                                                 msg.append("")
4437                                                 msg.append(_("Installation of a directory is blocked by a file:"))
4438                                                 msg.append("  '%s'" % mydest)
4439                                                 msg.append(_("This file will be renamed to a different name:"))
4440                                                 msg.append("  '%s'" % backup_dest)
4441                                                 msg.append("")
4442                                                 self._eerror("preinst", msg)
4443                                                 if movefile(mydest, backup_dest,
4444                                                         mysettings=self.settings,
4445                                                         encoding=_encodings['merge']) is None:
4446                                                         return 1
4447                                                 showMessage(_("bak %s %s.backup\n") % (mydest, mydest),
4448                                                         level=logging.ERROR, noiselevel=-1)
4449                                                 #now create our directory
4450                                                 try:
4451                                                         if self.settings.selinux_enabled():
4452                                                                 _selinux_merge.mkdir(mydest, mysrc)
4453                                                         else:
4454                                                                 os.mkdir(mydest)
4455                                                 except OSError as e:
4456                                                         # Error handling should be equivalent to
4457                                                         # portage.util.ensure_dirs() for cases
4458                                                         # like bug #187518.
4459                                                         if e.errno in (errno.EEXIST,):
4460                                                                 pass
4461                                                         elif os.path.isdir(mydest):
4462                                                                 pass
4463                                                         else:
4464                                                                 raise
4465                                                         del e
4466
4467                                                 if bsd_chflags:
4468                                                         bsd_chflags.lchflags(mydest, dflags)
4469                                                 os.chmod(mydest, mystat[0])
4470                                                 os.chown(mydest, mystat[4], mystat[5])
4471                                                 showMessage(">>> %s/\n" % mydest)
4472                                 else:
4473                                         try:
4474                                                 #destination doesn't exist
4475                                                 if self.settings.selinux_enabled():
4476                                                         _selinux_merge.mkdir(mydest, mysrc)
4477                                                 else:
4478                                                         os.mkdir(mydest)
4479                                         except OSError as e:
4480                                                 # Error handling should be equivalent to
4481                                                 # portage.util.ensure_dirs() for cases
4482                                                 # like bug #187518.
4483                                                 if e.errno in (errno.EEXIST,):
4484                                                         pass
4485                                                 elif os.path.isdir(mydest):
4486                                                         pass
4487                                                 else:
4488                                                         raise
4489                                                 del e
4490                                         os.chmod(mydest, mystat[0])
4491                                         os.chown(mydest, mystat[4], mystat[5])
4492                                         showMessage(">>> %s/\n" % mydest)
4493                                 outfile.write("dir "+myrealdest+"\n")
4494                                 # recurse and merge this directory
4495                                 if self.mergeme(srcroot, destroot, outfile, secondhand,
4496                                         join(offset, x), cfgfiledict, thismtime):
4497                                         return 1
4498                         elif stat.S_ISREG(mymode):
4499                                 # we are merging a regular file
4500                                 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
4501                                 # calculate config file protection stuff
4502                                 mydestdir = os.path.dirname(mydest)
4503                                 moveme = 1
4504                                 zing = "!!!"
4505                                 mymtime = None
4506                                 protected = self.isprotected(mydest)
4507                                 if mydmode != None:
4508                                         # destination file exists
4509                                         
4510                                         if stat.S_ISDIR(mydmode):
4511                                                 # install of destination is blocked by an existing directory with the same name
4512                                                 newdest = self._new_backup_path(mydest)
4513                                                 msg = []
4514                                                 msg.append("")
4515                                                 msg.append(_("Installation of a regular file is blocked by a directory:"))
4516                                                 msg.append("  '%s'" % mydest)
4517                                                 msg.append(_("This file will be merged with a different name:"))
4518                                                 msg.append("  '%s'" % newdest)
4519                                                 msg.append("")
4520                                                 self._eerror("preinst", msg)
4521                                                 mydest = newdest
4522
4523                                         elif stat.S_ISREG(mydmode) or (stat.S_ISLNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST_MODE])):
4524                                                 # install of destination is blocked by an existing regular file,
4525                                                 # or by a symlink to an existing regular file;
4526                                                 # now, config file management may come into play.
4527                                                 # we only need to tweak mydest if cfg file management is in play.
4528                                                 if protected:
4529                                                         destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
4530                                                         if protect_if_modified:
4531                                                                 contents_key = \
4532                                                                         self._installed_instance._match_contents(myrealdest)
4533                                                                 if contents_key:
4534                                                                         inst_info = self._installed_instance.getcontents()[contents_key]
4535                                                                         if inst_info[0] == "obj" and inst_info[2] == destmd5:
4536                                                                                 protected = False
4537
4538                                                 if protected:
4539                                                         # we have a protection path; enable config file management.
4540                                                         cfgprot = 0
4541                                                         if mymd5 == destmd5:
4542                                                                 #file already in place; simply update mtimes of destination
4543                                                                 moveme = 1
4544                                                         else:
4545                                                                 if mymd5 == cfgfiledict.get(myrealdest, [None])[0]:
4546                                                                         """ An identical update has previously been
4547                                                                         merged.  Skip it unless the user has chosen
4548                                                                         --noconfmem."""
4549                                                                         moveme = cfgfiledict["IGNORE"]
4550                                                                         cfgprot = cfgfiledict["IGNORE"]
4551                                                                         if not moveme:
4552                                                                                 zing = "---"
4553                                                                                 mymtime = mystat[stat.ST_MTIME]
4554                                                                 else:
4555                                                                         moveme = 1
4556                                                                         cfgprot = 1
4557                                                         if moveme:
4558                                                                 # Merging a new file, so update confmem.
4559                                                                 cfgfiledict[myrealdest] = [mymd5]
4560                                                         elif destmd5 == cfgfiledict.get(myrealdest, [None])[0]:
4561                                                                 """A previously remembered update has been
4562                                                                 accepted, so it is removed from confmem."""
4563                                                                 del cfgfiledict[myrealdest]
4564
4565                                                         if cfgprot:
4566                                                                 mydest = new_protect_filename(mydest, newmd5=mymd5)
4567
4568                                 # whether config protection or not, we merge the new file the
4569                                 # same way.  Unless moveme=0 (blocking directory)
4570                                 if moveme:
4571                                         # Create hardlinks only for source files that already exist
4572                                         # as hardlinks (having identical st_dev and st_ino).
4573                                         hardlink_key = (mystat.st_dev, mystat.st_ino)
4574
4575                                         hardlink_candidates = self._hardlink_merge_map.get(hardlink_key)
4576                                         if hardlink_candidates is None:
4577                                                 hardlink_candidates = []
4578                                                 self._hardlink_merge_map[hardlink_key] = hardlink_candidates
4579
4580                                         mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4581                                                 sstat=mystat, mysettings=self.settings,
4582                                                 hardlink_candidates=hardlink_candidates,
4583                                                 encoding=_encodings['merge'])
4584                                         if mymtime is None:
4585                                                 return 1
4586                                         hardlink_candidates.append(mydest)
4587                                         zing = ">>>"
4588
4589                                 if mymtime != None:
4590                                         outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
4591                                 showMessage("%s %s\n" % (zing,mydest))
4592                         else:
4593                                 # we are merging a fifo or device node
4594                                 zing = "!!!"
4595                                 if mydmode is None:
4596                                         # destination doesn't exist
4597                                         if movefile(mysrc, mydest, newmtime=thismtime,
4598                                                 sstat=mystat, mysettings=self.settings,
4599                                                 encoding=_encodings['merge']) is not None:
4600                                                 zing = ">>>"
4601                                         else:
4602                                                 return 1
4603                                 if stat.S_ISFIFO(mymode):
4604                                         outfile.write("fif %s\n" % myrealdest)
4605                                 else:
4606                                         outfile.write("dev %s\n" % myrealdest)
4607                                 showMessage(zing + " " + mydest + "\n")
4608
4609         def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
4610                 mydbapi=None, prev_mtimes=None, counter=None):
4611                 """
4612                 @param myroot: ignored, self._eroot is used instead
4613                 """
4614                 myroot = None
4615                 retval = -1
4616                 parallel_install = "parallel-install" in self.settings.features
4617                 if not parallel_install:
4618                         self.lockdb()
4619                 self.vartree.dbapi._bump_mtime(self.mycpv)
4620                 if self._scheduler is None:
4621                         self._scheduler = PollScheduler().sched_iface
4622                 try:
4623                         retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
4624                                 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes,
4625                                 counter=counter)
4626
4627                         # If PORTAGE_BUILDDIR doesn't exist, then it probably means
4628                         # fail-clean is enabled, and the success/die hooks have
4629                         # already been called by EbuildPhase.
4630                         if os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
4631
4632                                 if retval == os.EX_OK:
4633                                         phase = 'success_hooks'
4634                                 else:
4635                                         phase = 'die_hooks'
4636
4637                                 ebuild_phase = MiscFunctionsProcess(
4638                                         background=False, commands=[phase],
4639                                         scheduler=self._scheduler, settings=self.settings)
4640                                 ebuild_phase.start()
4641                                 ebuild_phase.wait()
4642                                 self._elog_process()
4643
4644                                 if 'noclean' not in self.settings.features and \
4645                                         (retval == os.EX_OK or \
4646                                         'fail-clean' in self.settings.features):
4647                                         if myebuild is None:
4648                                                 myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
4649
4650                                         doebuild_environment(myebuild, "clean",
4651                                                 settings=self.settings, db=mydbapi)
4652                                         phase = EbuildPhase(background=False, phase="clean",
4653                                                 scheduler=self._scheduler, settings=self.settings)
4654                                         phase.start()
4655                                         phase.wait()
4656                 finally:
4657                         self.settings.pop('REPLACING_VERSIONS', None)
4658                         if self.vartree.dbapi._linkmap is None:
4659                                 # preserve-libs is entirely disabled
4660                                 pass
4661                         else:
4662                                 self.vartree.dbapi._linkmap._clear_cache()
4663                         self.vartree.dbapi._bump_mtime(self.mycpv)
4664                         if not parallel_install:
4665                                 self.unlockdb()
4666                 return retval
4667
4668         def getstring(self,name):
4669                 "returns contents of a file with whitespace converted to spaces"
4670                 if not os.path.exists(self.dbdir+"/"+name):
4671                         return ""
4672                 mydata = io.open(
4673                         _unicode_encode(os.path.join(self.dbdir, name),
4674                         encoding=_encodings['fs'], errors='strict'),
4675                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4676                         ).read().split()
4677                 return " ".join(mydata)
4678
4679         def copyfile(self,fname):
4680                 shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname))
4681
4682         def getfile(self,fname):
4683                 if not os.path.exists(self.dbdir+"/"+fname):
4684                         return ""
4685                 return io.open(_unicode_encode(os.path.join(self.dbdir, fname),
4686                         encoding=_encodings['fs'], errors='strict'), 
4687                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4688                         ).read()
4689
4690         def setfile(self,fname,data):
4691                 kwargs = {}
4692                 if fname == 'environment.bz2' or not isinstance(data, basestring):
4693                         kwargs['mode'] = 'wb'
4694                 else:
4695                         kwargs['mode'] = 'w'
4696                         kwargs['encoding'] = _encodings['repo.content']
4697                 write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
4698
4699         def getelements(self,ename):
4700                 if not os.path.exists(self.dbdir+"/"+ename):
4701                         return []
4702                 mylines = io.open(_unicode_encode(
4703                         os.path.join(self.dbdir, ename),
4704                         encoding=_encodings['fs'], errors='strict'),
4705                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4706                         ).readlines()
4707                 myreturn = []
4708                 for x in mylines:
4709                         for y in x[:-1].split():
4710                                 myreturn.append(y)
4711                 return myreturn
4712
4713         def setelements(self,mylist,ename):
4714                 myelement = io.open(_unicode_encode(
4715                         os.path.join(self.dbdir, ename),
4716                         encoding=_encodings['fs'], errors='strict'),
4717                         mode='w', encoding=_encodings['repo.content'],
4718                         errors='backslashreplace')
4719                 for x in mylist:
4720                         myelement.write(_unicode_decode(x+"\n"))
4721                 myelement.close()
4722
4723         def isregular(self):
4724                 "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
4725                 return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
4726
4727         def _pre_merge_backup(self, backup_dblink, downgrade):
4728
4729                 if ("unmerge-backup" in self.settings.features or
4730                         (downgrade and "downgrade-backup" in self.settings.features)):
4731                         return self._quickpkg_dblink(backup_dblink, False, None)
4732
4733                 return os.EX_OK
4734
4735         def _pre_unmerge_backup(self, background):
4736
4737                 if "unmerge-backup" in self.settings.features :
4738                         logfile = None
4739                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
4740                                 logfile = self.settings.get("PORTAGE_LOG_FILE")
4741                         return self._quickpkg_dblink(self, background, logfile)
4742
4743                 return os.EX_OK
4744
4745         def _quickpkg_dblink(self, backup_dblink, background, logfile):
4746
4747                 trees = QueryCommand.get_db()[self.settings["EROOT"]]
4748                 bintree = trees["bintree"]
4749                 binpkg_path = bintree.getname(backup_dblink.mycpv)
4750                 if os.path.exists(binpkg_path) and \
4751                         catsplit(backup_dblink.mycpv)[1] not in bintree.invalids:
4752                         return os.EX_OK
4753
4754                 self.lockdb()
4755                 try:
4756
4757                         if not backup_dblink.exists():
4758                                 # It got unmerged by a concurrent process.
4759                                 return os.EX_OK
4760
4761                         # Call quickpkg for support of QUICKPKG_DEFAULT_OPTS and stuff.
4762                         quickpkg_binary = os.path.join(self.settings["PORTAGE_BIN_PATH"],
4763                                 "quickpkg")
4764
4765                         # Let quickpkg inherit the global vartree config's env.
4766                         env = dict(self.vartree.settings.items())
4767                         env["__PORTAGE_INHERIT_VARDB_LOCK"] = "1"
4768
4769                         pythonpath = [x for x in env.get('PYTHONPATH', '').split(":") if x]
4770                         if not pythonpath or \
4771                                 not os.path.samefile(pythonpath[0], portage._pym_path):
4772                                 pythonpath.insert(0, portage._pym_path)
4773                         env['PYTHONPATH'] = ":".join(pythonpath)
4774
4775                         quickpkg_proc = SpawnProcess(
4776                                 args=[portage._python_interpreter, quickpkg_binary,
4777                                         "=%s" % (backup_dblink.mycpv,)],
4778                                 background=background, env=env,
4779                                 scheduler=self._scheduler, logfile=logfile)
4780                         quickpkg_proc.start()
4781
4782                         return quickpkg_proc.wait()
4783
4784                 finally:
4785                         self.unlockdb()
4786
4787 def merge(mycat, mypkg, pkgloc, infloc,
4788         myroot=None, settings=None, myebuild=None,
4789         mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
4790         scheduler=None):
4791         """
4792         @param myroot: ignored, settings['EROOT'] is used instead
4793         """
4794         myroot = None
4795         if settings is None:
4796                 raise TypeError("settings argument is required")
4797         if not os.access(settings['EROOT'], os.W_OK):
4798                 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings['EROOT'],
4799                         noiselevel=-1)
4800                 return errno.EACCES
4801         background = (settings.get('PORTAGE_BACKGROUND') == '1')
4802         merge_task = MergeProcess(
4803                 mycat=mycat, mypkg=mypkg, settings=settings,
4804                 treetype=mytree, vartree=vartree,
4805                 scheduler=(scheduler or PollScheduler().sched_iface),
4806                 background=background, blockers=blockers, pkgloc=pkgloc,
4807                 infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
4808                 prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'))
4809         merge_task.start()
4810         retcode = merge_task.wait()
4811         return retcode
4812
4813 def unmerge(cat, pkg, myroot=None, settings=None,
4814         mytrimworld=None, vartree=None,
4815         ldpath_mtimes=None, scheduler=None):
4816         """
4817         @param myroot: ignored, settings['EROOT'] is used instead
4818         @param mytrimworld: ignored
4819         """
4820         myroot = None
4821         if settings is None:
4822                 raise TypeError("settings argument is required")
4823         mylink = dblink(cat, pkg, settings=settings, treetype="vartree",
4824                 vartree=vartree, scheduler=scheduler)
4825         vartree = mylink.vartree
4826         parallel_install = "parallel-install" in settings.features
4827         if not parallel_install:
4828                 mylink.lockdb()
4829         try:
4830                 if mylink.exists():
4831                         retval = mylink.unmerge(ldpath_mtimes=ldpath_mtimes)
4832                         if retval == os.EX_OK:
4833                                 mylink.lockdb()
4834                                 try:
4835                                         mylink.delete()
4836                                 finally:
4837                                         mylink.unlockdb()
4838                         return retval
4839                 return os.EX_OK
4840         finally:
4841                 if vartree.dbapi._linkmap is None:
4842                         # preserve-libs is entirely disabled
4843                         pass
4844                 else:
4845                         vartree.dbapi._linkmap._clear_cache()
4846                 if not parallel_install:
4847                         mylink.unlockdb()
4848
4849 def write_contents(contents, root, f):
4850         """
4851         Write contents to any file like object. The file will be left open.
4852         """
4853         root_len = len(root) - 1
4854         for filename in sorted(contents):
4855                 entry_data = contents[filename]
4856                 entry_type = entry_data[0]
4857                 relative_filename = filename[root_len:]
4858                 if entry_type == "obj":
4859                         entry_type, mtime, md5sum = entry_data
4860                         line = "%s %s %s %s\n" % \
4861                                 (entry_type, relative_filename, md5sum, mtime)
4862                 elif entry_type == "sym":
4863                         entry_type, mtime, link = entry_data
4864                         line = "%s %s -> %s %s\n" % \
4865                                 (entry_type, relative_filename, link, mtime)
4866                 else: # dir, dev, fif
4867                         line = "%s %s\n" % (entry_type, relative_filename)
4868                 f.write(line)
4869
4870 def tar_contents(contents, root, tar, protect=None, onProgress=None):
4871         os = _os_merge
4872         encoding = _encodings['merge']
4873
4874         try:
4875                 for x in contents:
4876                         _unicode_encode(x,
4877                                 encoding=_encodings['merge'],
4878                                 errors='strict')
4879         except UnicodeEncodeError:
4880                 # The package appears to have been merged with a
4881                 # different value of sys.getfilesystemencoding(),
4882                 # so fall back to utf_8 if appropriate.
4883                 try:
4884                         for x in contents:
4885                                 _unicode_encode(x,
4886                                         encoding=_encodings['fs'],
4887                                         errors='strict')
4888                 except UnicodeEncodeError:
4889                         pass
4890                 else:
4891                         os = portage.os
4892                         encoding = _encodings['fs']
4893
4894         tar.encoding = encoding
4895         root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
4896         id_strings = {}
4897         maxval = len(contents)
4898         curval = 0
4899         if onProgress:
4900                 onProgress(maxval, 0)
4901         paths = list(contents)
4902         paths.sort()
4903         for path in paths:
4904                 curval += 1
4905                 try:
4906                         lst = os.lstat(path)
4907                 except OSError as e:
4908                         if e.errno != errno.ENOENT:
4909                                 raise
4910                         del e
4911                         if onProgress:
4912                                 onProgress(maxval, curval)
4913                         continue
4914                 contents_type = contents[path][0]
4915                 if path.startswith(root):
4916                         arcname = "./" + path[len(root):]
4917                 else:
4918                         raise ValueError("invalid root argument: '%s'" % root)
4919                 live_path = path
4920                 if 'dir' == contents_type and \
4921                         not stat.S_ISDIR(lst.st_mode) and \
4922                         os.path.isdir(live_path):
4923                         # Even though this was a directory in the original ${D}, it exists
4924                         # as a symlink to a directory in the live filesystem.  It must be
4925                         # recorded as a real directory in the tar file to ensure that tar
4926                         # can properly extract it's children.
4927                         live_path = os.path.realpath(live_path)
4928                         lst = os.lstat(live_path)
4929
4930                 # Since os.lstat() inside TarFile.gettarinfo() can trigger a
4931                 # UnicodeEncodeError when python has something other than utf_8
4932                 # return from sys.getfilesystemencoding() (as in bug #388773),
4933                 # we implement the needed functionality here, using the result
4934                 # of our successful lstat call. An alternative to this would be
4935                 # to pass in the fileobj argument to TarFile.gettarinfo(), so
4936                 # that it could use fstat instead of lstat. However, that would
4937                 # have the unwanted effect of dereferencing symlinks.
4938
4939                 tarinfo = tar.tarinfo()
4940                 tarinfo.name = arcname
4941                 tarinfo.mode = lst.st_mode
4942                 tarinfo.uid = lst.st_uid
4943                 tarinfo.gid = lst.st_gid
4944                 tarinfo.size = 0
4945                 tarinfo.mtime = lst.st_mtime
4946                 tarinfo.linkname = ""
4947                 if stat.S_ISREG(lst.st_mode):
4948                         inode = (lst.st_ino, lst.st_dev)
4949                         if (lst.st_nlink > 1 and
4950                                 inode in tar.inodes and
4951                                 arcname != tar.inodes[inode]):
4952                                 tarinfo.type = tarfile.LNKTYPE
4953                                 tarinfo.linkname = tar.inodes[inode]
4954                         else:
4955                                 tar.inodes[inode] = arcname
4956                                 tarinfo.type = tarfile.REGTYPE
4957                                 tarinfo.size = lst.st_size
4958                 elif stat.S_ISDIR(lst.st_mode):
4959                         tarinfo.type = tarfile.DIRTYPE
4960                 elif stat.S_ISLNK(lst.st_mode):
4961                         tarinfo.type = tarfile.SYMTYPE
4962                         tarinfo.linkname = os.readlink(live_path)
4963                 else:
4964                         continue
4965                 try:
4966                         tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
4967                 except KeyError:
4968                         pass
4969                 try:
4970                         tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
4971                 except KeyError:
4972                         pass
4973
4974                 if stat.S_ISREG(lst.st_mode):
4975                         if protect and protect(path):
4976                                 # Create an empty file as a place holder in order to avoid
4977                                 # potential collision-protect issues.
4978                                 f = tempfile.TemporaryFile()
4979                                 f.write(_unicode_encode(
4980                                         "# empty file because --include-config=n " + \
4981                                         "when `quickpkg` was used\n"))
4982                                 f.flush()
4983                                 f.seek(0)
4984                                 tarinfo.size = os.fstat(f.fileno()).st_size
4985                                 tar.addfile(tarinfo, f)
4986                                 f.close()
4987                         else:
4988                                 f = open(_unicode_encode(path,
4989                                         encoding=encoding,
4990                                         errors='strict'), 'rb')
4991                                 try:
4992                                         tar.addfile(tarinfo, f)
4993                                 finally:
4994                                         f.close()
4995                 else:
4996                         tar.addfile(tarinfo)
4997                 if onProgress:
4998                         onProgress(maxval, curval)