Use EROOT instead of ROOT for keys everywhere.
[portage.git] / pym / portage / dbapi / vartree.py
1 # Copyright 1998-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = [
5         "vardbapi", "vartree", "dblink"] + \
6         ["write_contents", "tar_contents"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10         'portage.checksum:_perform_md5_merge@perform_md5',
11         'portage.data:portage_gid,portage_uid,secpass',
12         'portage.dbapi.dep_expand:dep_expand',
13         'portage.dbapi._MergeProcess:MergeProcess',
14         'portage.dep:dep_getkey,isjustname,match_from_list,' + \
15                 'use_reduce,_slot_re',
16         'portage.elog:collect_ebuild_messages,collect_messages,' + \
17                 'elog_process,_merge_logentries',
18         'portage.locks:lockdir,unlockdir,lockfile,unlockfile',
19         'portage.output:bold,colorize',
20         'portage.package.ebuild.doebuild:doebuild_environment,' + \
21                 '_merge_unicode_error', '_spawn_phase',
22         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
23         'portage.update:fixdbentries',
24         'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
25                 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
26                 'grabdict,normalize_path,new_protect_filename',
27         'portage.util.digraph:digraph',
28         'portage.util.env_update:env_update',
29         'portage.util.listdir:dircache,listdir',
30         'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
31         'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
32         'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \
33                 '_pkgsplit@pkgsplit',
34 )
35
36 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
37         PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
38 from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
39 from portage.dbapi import dbapi
40 from portage.exception import CommandNotFound, \
41         InvalidData, InvalidLocation, InvalidPackageName, \
42         FileNotFound, PermissionDenied, UnsupportedAPIException
43 from portage.localization import _
44 from portage.util.movefile import movefile
45
46 from portage import abssymlink, _movefile, bsd_chflags
47
48 # This is a special version of the os module, wrapped for unicode support.
49 from portage import os
50 from portage import _encodings
51 from portage import _os_merge
52 from portage import _selinux_merge
53 from portage import _unicode_decode
54 from portage import _unicode_encode
55
56 from _emerge.EbuildBuildDir import EbuildBuildDir
57 from _emerge.EbuildPhase import EbuildPhase
58 from _emerge.emergelog import emergelog
59 from _emerge.PollScheduler import PollScheduler
60 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
61
62 import errno
63 import gc
64 import io
65 from itertools import chain
66 import logging
67 import os as _os
68 import re
69 import shutil
70 import stat
71 import sys
72 import tempfile
73 import textwrap
74 import time
75 import warnings
76
77 try:
78         import cPickle as pickle
79 except ImportError:
80         import pickle
81
82 if sys.hexversion >= 0x3000000:
83         basestring = str
84         long = int
85
86 class vardbapi(dbapi):
87
88         _excluded_dirs = ["CVS", "lost+found"]
89         _excluded_dirs = [re.escape(x) for x in _excluded_dirs]
90         _excluded_dirs = re.compile(r'^(\..*|-MERGING-.*|' + \
91                 "|".join(_excluded_dirs) + r')$')
92
93         _aux_cache_version        = "1"
94         _owners_cache_version     = "1"
95
96         # Number of uncached packages to trigger cache update, since
97         # it's wasteful to update it for every vdb change.
98         _aux_cache_threshold = 5
99
100         _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
101         _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
102
103         def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
104                 """
105                 The categories parameter is unused since the dbapi class
106                 now has a categories property that is generated from the
107                 available packages.
108                 """
109
110                 # Used by emerge to check whether any packages
111                 # have been added or removed.
112                 self._pkgs_changed = False
113
114                 # The _aux_cache_threshold doesn't work as designed
115                 # if the cache is flushed from a subprocess, so we
116                 # use this to avoid waste vdb cache updates.
117                 self._flush_cache_enabled = True
118
119                 #cache for category directory mtimes
120                 self.mtdircache = {}
121
122                 #cache for dependency checks
123                 self.matchcache = {}
124
125                 #cache for cp_list results
126                 self.cpcache = {}
127
128                 self.blockers = None
129                 if settings is None:
130                         settings = portage.settings
131                 self.settings = settings
132
133                 if _unused_param is not None and _unused_param != settings['ROOT']:
134                         warnings.warn("The first parameter of the "
135                                 "portage.dbapi.vartree.vardbapi"
136                                 " constructor is now unused. Use "
137                                 "settings['ROOT'] instead.",
138                                 DeprecationWarning, stacklevel=2)
139
140                 self._eroot = settings['EROOT']
141                 self._dbroot = self._eroot + VDB_PATH
142                 self._lock = None
143                 self._lock_count = 0
144
145                 self._conf_mem_file = self._eroot + CONFIG_MEMORY_FILE
146                 self._fs_lock_obj = None
147                 self._fs_lock_count = 0
148
149                 if vartree is None:
150                         vartree = portage.db[settings['EROOT']]['vartree']
151                 self.vartree = vartree
152                 self._aux_cache_keys = set(
153                         ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
154                         "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
155                         "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
156                         "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
157                         "REQUIRED_USE"])
158                 self._aux_cache_obj = None
159                 self._aux_cache_filename = os.path.join(self._eroot,
160                         CACHE_PATH, "vdb_metadata.pickle")
161                 self._counter_path = os.path.join(self._eroot,
162                         CACHE_PATH, "counter")
163
164                 self._plib_registry = None
165                 if _ENABLE_PRESERVE_LIBS:
166                         self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
167                                 os.path.join(self._eroot, PRIVATE_PATH,
168                                 "preserved_libs_registry"))
169
170                 self._linkmap = None
171                 if _ENABLE_DYN_LINK_MAP:
172                         self._linkmap = LinkageMap(self)
173                 self._owners = self._owners_db(self)
174
175                 self._cached_counter = None
176
177         @property
178         def root(self):
179                 warnings.warn("The root attribute of "
180                         "portage.dbapi.vartree.vardbapi"
181                         " is deprecated. Use "
182                         "settings['ROOT'] instead.",
183                         DeprecationWarning, stacklevel=2)
184                 return self.settings['ROOT']
185
186         def getpath(self, mykey, filename=None):
187                 # This is an optimized hotspot, so don't use unicode-wrapped
188                 # os module and don't use os.path.join().
189                 rValue = self._eroot + VDB_PATH + _os.sep + mykey
190                 if filename is not None:
191                         # If filename is always relative, we can do just
192                         # rValue += _os.sep + filename
193                         rValue = _os.path.join(rValue, filename)
194                 return rValue
195
196         def lock(self):
197                 """
198                 Acquire a reentrant lock, blocking, for cooperation with concurrent
199                 processes. State is inherited by subprocesses, allowing subprocesses
200                 to reenter a lock that was acquired by a parent process. However,
201                 a lock can be released only by the same process that acquired it.
202                 """
203                 if self._lock_count:
204                         self._lock_count += 1
205                 else:
206                         if self._lock is not None:
207                                 raise AssertionError("already locked")
208                         # At least the parent needs to exist for the lock file.
209                         ensure_dirs(self._dbroot)
210                         self._lock = lockdir(self._dbroot)
211                         self._lock_count += 1
212
213         def unlock(self):
214                 """
215                 Release a lock, decrementing the recursion level. Each unlock() call
216                 must be matched with a prior lock() call, or else an AssertionError
217                 will be raised if unlock() is called while not locked.
218                 """
219                 if self._lock_count > 1:
220                         self._lock_count -= 1
221                 else:
222                         if self._lock is None:
223                                 raise AssertionError("not locked")
224                         self._lock_count = 0
225                         unlockdir(self._lock)
226                         self._lock = None
227
228         def _fs_lock(self):
229                 """
230                 Acquire a reentrant lock, blocking, for cooperation with concurrent
231                 processes.
232                 """
233                 if self._fs_lock_count < 1:
234                         if self._fs_lock_obj is not None:
235                                 raise AssertionError("already locked")
236                         try:
237                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
238                         except InvalidLocation:
239                                 self.settings._init_dirs()
240                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
241                 self._fs_lock_count += 1
242
243         def _fs_unlock(self):
244                 """
245                 Release a lock, decrementing the recursion level.
246                 """
247                 if self._fs_lock_count <= 1:
248                         if self._fs_lock_obj is None:
249                                 raise AssertionError("not locked")
250                         unlockfile(self._fs_lock_obj)
251                         self._fs_lock_obj = None
252                 self._fs_lock_count -= 1
253
254         def _bump_mtime(self, cpv):
255                 """
256                 This is called before an after any modifications, so that consumers
257                 can use directory mtimes to validate caches. See bug #290428.
258                 """
259                 base = self._eroot + VDB_PATH
260                 cat = catsplit(cpv)[0]
261                 catdir = base + _os.sep + cat
262                 t = time.time()
263                 t = (t, t)
264                 try:
265                         for x in (catdir, base):
266                                 os.utime(x, t)
267                 except OSError:
268                         ensure_dirs(catdir)
269
270         def cpv_exists(self, mykey, myrepo=None):
271                 "Tells us whether an actual ebuild exists on disk (no masking)"
272                 return os.path.exists(self.getpath(mykey))
273
274         def cpv_counter(self, mycpv):
275                 "This method will grab the COUNTER. Returns a counter value."
276                 try:
277                         return long(self.aux_get(mycpv, ["COUNTER"])[0])
278                 except (KeyError, ValueError):
279                         pass
280                 writemsg_level(_("portage: COUNTER for %s was corrupted; " \
281                         "resetting to value of 0\n") % (mycpv,),
282                         level=logging.ERROR, noiselevel=-1)
283                 return 0
284
285         def cpv_inject(self, mycpv):
286                 "injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
287                 ensure_dirs(self.getpath(mycpv))
288                 counter = self.counter_tick(mycpv=mycpv)
289                 # write local package counter so that emerge clean does the right thing
290                 write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
291
292         def isInjected(self, mycpv):
293                 if self.cpv_exists(mycpv):
294                         if os.path.exists(self.getpath(mycpv, filename="INJECTED")):
295                                 return True
296                         if not os.path.exists(self.getpath(mycpv, filename="CONTENTS")):
297                                 return True
298                 return False
299
300         def move_ent(self, mylist, repo_match=None):
301                 origcp = mylist[1]
302                 newcp = mylist[2]
303
304                 # sanity check
305                 for atom in (origcp, newcp):
306                         if not isjustname(atom):
307                                 raise InvalidPackageName(str(atom))
308                 origmatches = self.match(origcp, use_cache=0)
309                 moves = 0
310                 if not origmatches:
311                         return moves
312                 for mycpv in origmatches:
313                         mycpv_cp = cpv_getkey(mycpv)
314                         if mycpv_cp != origcp:
315                                 # Ignore PROVIDE virtual match.
316                                 continue
317                         if repo_match is not None \
318                                 and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
319                                 continue
320                         mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
321                         mynewcat = catsplit(newcp)[0]
322                         origpath = self.getpath(mycpv)
323                         if not os.path.exists(origpath):
324                                 continue
325                         moves += 1
326                         if not os.path.exists(self.getpath(mynewcat)):
327                                 #create the directory
328                                 ensure_dirs(self.getpath(mynewcat))
329                         newpath = self.getpath(mynewcpv)
330                         if os.path.exists(newpath):
331                                 #dest already exists; keep this puppy where it is.
332                                 continue
333                         _movefile(origpath, newpath, mysettings=self.settings)
334                         self._clear_pkg_cache(self._dblink(mycpv))
335                         self._clear_pkg_cache(self._dblink(mynewcpv))
336
337                         # We need to rename the ebuild now.
338                         old_pf = catsplit(mycpv)[1]
339                         new_pf = catsplit(mynewcpv)[1]
340                         if new_pf != old_pf:
341                                 try:
342                                         os.rename(os.path.join(newpath, old_pf + ".ebuild"),
343                                                 os.path.join(newpath, new_pf + ".ebuild"))
344                                 except EnvironmentError as e:
345                                         if e.errno != errno.ENOENT:
346                                                 raise
347                                         del e
348                         write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
349                         write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
350                         fixdbentries([mylist], newpath)
351                 return moves
352
353         def cp_list(self, mycp, use_cache=1):
354                 mysplit=catsplit(mycp)
355                 if mysplit[0] == '*':
356                         mysplit[0] = mysplit[0][1:]
357                 try:
358                         mystat = os.stat(self.getpath(mysplit[0])).st_mtime
359                 except OSError:
360                         mystat = 0
361                 if use_cache and mycp in self.cpcache:
362                         cpc = self.cpcache[mycp]
363                         if cpc[0] == mystat:
364                                 return cpc[1][:]
365                 cat_dir = self.getpath(mysplit[0])
366                 try:
367                         dir_list = os.listdir(cat_dir)
368                 except EnvironmentError as e:
369                         if e.errno == PermissionDenied.errno:
370                                 raise PermissionDenied(cat_dir)
371                         del e
372                         dir_list = []
373
374                 returnme = []
375                 for x in dir_list:
376                         if self._excluded_dirs.match(x) is not None:
377                                 continue
378                         ps = pkgsplit(x)
379                         if not ps:
380                                 self.invalidentry(os.path.join(self.getpath(mysplit[0]), x))
381                                 continue
382                         if len(mysplit) > 1:
383                                 if ps[0] == mysplit[1]:
384                                         returnme.append(mysplit[0]+"/"+x)
385                 self._cpv_sort_ascending(returnme)
386                 if use_cache:
387                         self.cpcache[mycp] = [mystat, returnme[:]]
388                 elif mycp in self.cpcache:
389                         del self.cpcache[mycp]
390                 return returnme
391
392         def cpv_all(self, use_cache=1):
393                 """
394                 Set use_cache=0 to bypass the portage.cachedir() cache in cases
395                 when the accuracy of mtime staleness checks should not be trusted
396                 (generally this is only necessary in critical sections that
397                 involve merge or unmerge of packages).
398                 """
399                 returnme = []
400                 basepath = os.path.join(self._eroot, VDB_PATH) + os.path.sep
401
402                 if use_cache:
403                         from portage import listdir
404                 else:
405                         def listdir(p, **kwargs):
406                                 try:
407                                         return [x for x in os.listdir(p) \
408                                                 if os.path.isdir(os.path.join(p, x))]
409                                 except EnvironmentError as e:
410                                         if e.errno == PermissionDenied.errno:
411                                                 raise PermissionDenied(p)
412                                         del e
413                                         return []
414
415                 for x in listdir(basepath, EmptyOnError=1, ignorecvs=1, dirsonly=1):
416                         if self._excluded_dirs.match(x) is not None:
417                                 continue
418                         if not self._category_re.match(x):
419                                 continue
420                         for y in listdir(basepath + x, EmptyOnError=1, dirsonly=1):
421                                 if self._excluded_dirs.match(y) is not None:
422                                         continue
423                                 subpath = x + "/" + y
424                                 # -MERGING- should never be a cpv, nor should files.
425                                 try:
426                                         if catpkgsplit(subpath) is None:
427                                                 self.invalidentry(self.getpath(subpath))
428                                                 continue
429                                 except InvalidData:
430                                         self.invalidentry(self.getpath(subpath))
431                                         continue
432                                 returnme.append(subpath)
433
434                 return returnme
435
436         def cp_all(self, use_cache=1):
437                 mylist = self.cpv_all(use_cache=use_cache)
438                 d={}
439                 for y in mylist:
440                         if y[0] == '*':
441                                 y = y[1:]
442                         try:
443                                 mysplit = catpkgsplit(y)
444                         except InvalidData:
445                                 self.invalidentry(self.getpath(y))
446                                 continue
447                         if not mysplit:
448                                 self.invalidentry(self.getpath(y))
449                                 continue
450                         d[mysplit[0]+"/"+mysplit[1]] = None
451                 return list(d)
452
453         def checkblockers(self, origdep):
454                 pass
455
456         def _clear_cache(self):
457                 self.mtdircache.clear()
458                 self.matchcache.clear()
459                 self.cpcache.clear()
460                 self._aux_cache_obj = None
461
462         def _add(self, pkg_dblink):
463                 self._pkgs_changed = True
464                 self._clear_pkg_cache(pkg_dblink)
465
466         def _remove(self, pkg_dblink):
467                 self._pkgs_changed = True
468                 self._clear_pkg_cache(pkg_dblink)
469
470         def _clear_pkg_cache(self, pkg_dblink):
471                 # Due to 1 second mtime granularity in <python-2.5, mtime checks
472                 # are not always sufficient to invalidate vardbapi caches. Therefore,
473                 # the caches need to be actively invalidated here.
474                 self.mtdircache.pop(pkg_dblink.cat, None)
475                 self.matchcache.pop(pkg_dblink.cat, None)
476                 self.cpcache.pop(pkg_dblink.mysplit[0], None)
477                 dircache.pop(pkg_dblink.dbcatdir, None)
478
479         def match(self, origdep, use_cache=1):
480                 "caching match function"
481                 mydep = dep_expand(
482                         origdep, mydb=self, use_cache=use_cache, settings=self.settings)
483                 mykey = dep_getkey(mydep)
484                 mycat = catsplit(mykey)[0]
485                 if not use_cache:
486                         if mycat in self.matchcache:
487                                 del self.mtdircache[mycat]
488                                 del self.matchcache[mycat]
489                         return list(self._iter_match(mydep,
490                                 self.cp_list(mydep.cp, use_cache=use_cache)))
491                 try:
492                         curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
493                 except (IOError, OSError):
494                         curmtime=0
495
496                 if mycat not in self.matchcache or \
497                         self.mtdircache[mycat] != curmtime:
498                         # clear cache entry
499                         self.mtdircache[mycat] = curmtime
500                         self.matchcache[mycat] = {}
501                 if mydep not in self.matchcache[mycat]:
502                         mymatch = list(self._iter_match(mydep,
503                                 self.cp_list(mydep.cp, use_cache=use_cache)))
504                         self.matchcache[mycat][mydep] = mymatch
505                 return self.matchcache[mycat][mydep][:]
506
507         def findname(self, mycpv, myrepo=None):
508                 return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
509
510         def flush_cache(self):
511                 """If the current user has permission and the internal aux_get cache has
512                 been updated, save it to disk and mark it unmodified.  This is called
513                 by emerge after it has loaded the full vdb for use in dependency
514                 calculations.  Currently, the cache is only written if the user has
515                 superuser privileges (since that's required to obtain a lock), but all
516                 users have read access and benefit from faster metadata lookups (as
517                 long as at least part of the cache is still valid)."""
518                 if self._flush_cache_enabled and \
519                         self._aux_cache is not None and \
520                         len(self._aux_cache["modified"]) >= self._aux_cache_threshold and \
521                         secpass >= 2:
522                         self._owners.populate() # index any unindexed contents
523                         valid_nodes = set(self.cpv_all())
524                         for cpv in list(self._aux_cache["packages"]):
525                                 if cpv not in valid_nodes:
526                                         del self._aux_cache["packages"][cpv]
527                         del self._aux_cache["modified"]
528                         try:
529                                 f = atomic_ofstream(self._aux_cache_filename, 'wb')
530                                 pickle.dump(self._aux_cache, f, protocol=2)
531                                 f.close()
532                                 apply_secpass_permissions(
533                                         self._aux_cache_filename, gid=portage_gid, mode=0o644)
534                         except (IOError, OSError) as e:
535                                 pass
536                         self._aux_cache["modified"] = set()
537
538         @property
539         def _aux_cache(self):
540                 if self._aux_cache_obj is None:
541                         self._aux_cache_init()
542                 return self._aux_cache_obj
543
544         def _aux_cache_init(self):
545                 aux_cache = None
546                 open_kwargs = {}
547                 if sys.hexversion >= 0x3000000:
548                         # Buffered io triggers extreme performance issues in
549                         # Unpickler.load() (problem observed with python-3.0.1).
550                         # Unfortunately, performance is still poor relative to
551                         # python-2.x, but buffering makes it much worse.
552                         open_kwargs["buffering"] = 0
553                 try:
554                         f = open(_unicode_encode(self._aux_cache_filename,
555                                 encoding=_encodings['fs'], errors='strict'),
556                                 mode='rb', **open_kwargs)
557                         mypickle = pickle.Unpickler(f)
558                         try:
559                                 mypickle.find_global = None
560                         except AttributeError:
561                                 # TODO: If py3k, override Unpickler.find_class().
562                                 pass
563                         aux_cache = mypickle.load()
564                         f.close()
565                         del f
566                 except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
567                         if isinstance(e, EnvironmentError) and \
568                                 getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
569                                 pass
570                         else:
571                                 writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
572                                         (self._aux_cache_filename, e), noiselevel=-1)
573                         del e
574
575                 if not aux_cache or \
576                         not isinstance(aux_cache, dict) or \
577                         aux_cache.get("version") != self._aux_cache_version or \
578                         not aux_cache.get("packages"):
579                         aux_cache = {"version": self._aux_cache_version}
580                         aux_cache["packages"] = {}
581
582                 owners = aux_cache.get("owners")
583                 if owners is not None:
584                         if not isinstance(owners, dict):
585                                 owners = None
586                         elif "version" not in owners:
587                                 owners = None
588                         elif owners["version"] != self._owners_cache_version:
589                                 owners = None
590                         elif "base_names" not in owners:
591                                 owners = None
592                         elif not isinstance(owners["base_names"], dict):
593                                 owners = None
594
595                 if owners is None:
596                         owners = {
597                                 "base_names" : {},
598                                 "version"    : self._owners_cache_version
599                         }
600                         aux_cache["owners"] = owners
601
602                 aux_cache["modified"] = set()
603                 self._aux_cache_obj = aux_cache
604
605         def aux_get(self, mycpv, wants, myrepo = None):
606                 """This automatically caches selected keys that are frequently needed
607                 by emerge for dependency calculations.  The cached metadata is
608                 considered valid if the mtime of the package directory has not changed
609                 since the data was cached.  The cache is stored in a pickled dict
610                 object with the following format:
611
612                 {version:"1", "packages":{cpv1:(mtime,{k1,v1, k2,v2, ...}), cpv2...}}
613
614                 If an error occurs while loading the cache pickle or the version is
615                 unrecognized, the cache will simple be recreated from scratch (it is
616                 completely disposable).
617                 """
618                 cache_these_wants = self._aux_cache_keys.intersection(wants)
619                 for x in wants:
620                         if self._aux_cache_keys_re.match(x) is not None:
621                                 cache_these_wants.add(x)
622
623                 if not cache_these_wants:
624                         return self._aux_get(mycpv, wants)
625
626                 cache_these = set(self._aux_cache_keys)
627                 cache_these.update(cache_these_wants)
628
629                 mydir = self.getpath(mycpv)
630                 mydir_stat = None
631                 try:
632                         mydir_stat = os.stat(mydir)
633                 except OSError as e:
634                         if e.errno != errno.ENOENT:
635                                 raise
636                         raise KeyError(mycpv)
637                 mydir_mtime = mydir_stat[stat.ST_MTIME]
638                 pkg_data = self._aux_cache["packages"].get(mycpv)
639                 pull_me = cache_these.union(wants)
640                 mydata = {"_mtime_" : mydir_mtime}
641                 cache_valid = False
642                 cache_incomplete = False
643                 cache_mtime = None
644                 metadata = None
645                 if pkg_data is not None:
646                         if not isinstance(pkg_data, tuple) or len(pkg_data) != 2:
647                                 pkg_data = None
648                         else:
649                                 cache_mtime, metadata = pkg_data
650                                 if not isinstance(cache_mtime, (long, int)) or \
651                                         not isinstance(metadata, dict):
652                                         pkg_data = None
653
654                 if pkg_data:
655                         cache_mtime, metadata = pkg_data
656                         cache_valid = cache_mtime == mydir_mtime
657                 if cache_valid:
658                         # Migrate old metadata to unicode.
659                         for k, v in metadata.items():
660                                 metadata[k] = _unicode_decode(v,
661                                         encoding=_encodings['repo.content'], errors='replace')
662
663                         mydata.update(metadata)
664                         pull_me.difference_update(mydata)
665
666                 if pull_me:
667                         # pull any needed data and cache it
668                         aux_keys = list(pull_me)
669                         for k, v in zip(aux_keys,
670                                 self._aux_get(mycpv, aux_keys, st=mydir_stat)):
671                                 mydata[k] = v
672                         if not cache_valid or cache_these.difference(metadata):
673                                 cache_data = {}
674                                 if cache_valid and metadata:
675                                         cache_data.update(metadata)
676                                 for aux_key in cache_these:
677                                         cache_data[aux_key] = mydata[aux_key]
678                                 self._aux_cache["packages"][mycpv] = (mydir_mtime, cache_data)
679                                 self._aux_cache["modified"].add(mycpv)
680
681                 if _slot_re.match(mydata['SLOT']) is None:
682                         # Empty or invalid slot triggers InvalidAtom exceptions when
683                         # generating slot atoms for packages, so translate it to '0' here.
684                         mydata['SLOT'] = _unicode_decode('0')
685
686                 return [mydata[x] for x in wants]
687
688         def _aux_get(self, mycpv, wants, st=None):
689                 mydir = self.getpath(mycpv)
690                 if st is None:
691                         try:
692                                 st = os.stat(mydir)
693                         except OSError as e:
694                                 if e.errno == errno.ENOENT:
695                                         raise KeyError(mycpv)
696                                 elif e.errno == PermissionDenied.errno:
697                                         raise PermissionDenied(mydir)
698                                 else:
699                                         raise
700                 if not stat.S_ISDIR(st.st_mode):
701                         raise KeyError(mycpv)
702                 results = []
703                 for x in wants:
704                         if x == "_mtime_":
705                                 results.append(st[stat.ST_MTIME])
706                                 continue
707                         try:
708                                 myf = io.open(
709                                         _unicode_encode(os.path.join(mydir, x),
710                                         encoding=_encodings['fs'], errors='strict'),
711                                         mode='r', encoding=_encodings['repo.content'],
712                                         errors='replace')
713                                 try:
714                                         myd = myf.read()
715                                 finally:
716                                         myf.close()
717                                 # Preserve \n for metadata that is known to
718                                 # contain multiple lines.
719                                 if self._aux_multi_line_re.match(x) is None:
720                                         myd = " ".join(myd.split())
721                         except IOError:
722                                 myd = _unicode_decode('')
723                         if x == "EAPI" and not myd:
724                                 results.append(_unicode_decode('0'))
725                         else:
726                                 results.append(myd)
727                 return results
728
729         def aux_update(self, cpv, values):
730                 mylink = self._dblink(cpv)
731                 if not mylink.exists():
732                         raise KeyError(cpv)
733                 self._bump_mtime(cpv)
734                 self._clear_pkg_cache(mylink)
735                 for k, v in values.items():
736                         if v:
737                                 mylink.setfile(k, v)
738                         else:
739                                 try:
740                                         os.unlink(os.path.join(self.getpath(cpv), k))
741                                 except EnvironmentError:
742                                         pass
743                 self._bump_mtime(cpv)
744
745         def counter_tick(self, myroot=None, mycpv=None):
746                 """
747                 @param myroot: ignored, self._eroot is used instead
748                 """
749                 return self.counter_tick_core(incrementing=1, mycpv=mycpv)
750
751         def get_counter_tick_core(self, myroot=None, mycpv=None):
752                 """
753                 Use this method to retrieve the counter instead
754                 of having to trust the value of a global counter
755                 file that can lead to invalid COUNTER
756                 generation. When cache is valid, the package COUNTER
757                 files are not read and we rely on the timestamp of
758                 the package directory to validate cache. The stat
759                 calls should only take a short time, so performance
760                 is sufficient without having to rely on a potentially
761                 corrupt global counter file.
762
763                 The global counter file located at
764                 $CACHE_PATH/counter serves to record the
765                 counter of the last installed package and
766                 it also corresponds to the total number of
767                 installation actions that have occurred in
768                 the history of this package database.
769
770                 @param myroot: ignored, self._eroot is used instead
771                 """
772                 myroot = None
773                 new_vdb = False
774                 counter = -1
775                 try:
776                         cfile = io.open(
777                                 _unicode_encode(self._counter_path,
778                                 encoding=_encodings['fs'], errors='strict'),
779                                 mode='r', encoding=_encodings['repo.content'],
780                                 errors='replace')
781                 except EnvironmentError as e:
782                         new_vdb = not bool(self.cpv_all())
783                         if not new_vdb:
784                                 writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
785                                         self._counter_path, noiselevel=-1)
786                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
787                         del e
788                 else:
789                         try:
790                                 try:
791                                         counter = long(cfile.readline().strip())
792                                 finally:
793                                         cfile.close()
794                         except (OverflowError, ValueError) as e:
795                                 writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
796                                         self._counter_path, noiselevel=-1)
797                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
798                                 del e
799
800                 if self._cached_counter == counter:
801                         max_counter = counter
802                 else:
803                         # We must ensure that we return a counter
804                         # value that is at least as large as the
805                         # highest one from the installed packages,
806                         # since having a corrupt value that is too low
807                         # can trigger incorrect AUTOCLEAN behavior due
808                         # to newly installed packages having lower
809                         # COUNTERs than the previous version in the
810                         # same slot.
811                         max_counter = counter
812                         for cpv in self.cpv_all():
813                                 try:
814                                         pkg_counter = int(self.aux_get(cpv, ["COUNTER"])[0])
815                                 except (KeyError, OverflowError, ValueError):
816                                         continue
817                                 if pkg_counter > max_counter:
818                                         max_counter = pkg_counter
819
820                 if counter < 0 and not new_vdb:
821                         writemsg(_("!!! Initializing COUNTER to " \
822                                 "value of %d\n") % max_counter, noiselevel=-1)
823
824                 return max_counter + 1
825
826         def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
827                 """
828                 This method will grab the next COUNTER value and record it back
829                 to the global file. Note that every package install must have
830                 a unique counter, since a slotmove update can move two packages
831                 into the same SLOT and in that case it's important that both
832                 packages have different COUNTER metadata.
833
834                 @param myroot: ignored, self._eroot is used instead
835                 @param mycpv: ignored
836                 @rtype: int
837                 @returns: new counter value
838                 """
839                 myroot = None
840                 mycpv = None
841                 self.lock()
842                 try:
843                         counter = self.get_counter_tick_core() - 1
844                         if incrementing:
845                                 #increment counter
846                                 counter += 1
847                                 # update new global counter file
848                                 try:
849                                         write_atomic(self._counter_path, str(counter))
850                                 except InvalidLocation:
851                                         self.settings._init_dirs()
852                                         write_atomic(self._counter_path, str(counter))
853                         self._cached_counter = counter
854
855                         # Since we hold a lock, this is a good opportunity
856                         # to flush the cache. Note that this will only
857                         # flush the cache periodically in the main process
858                         # when _aux_cache_threshold is exceeded.
859                         self.flush_cache()
860                 finally:
861                         self.unlock()
862
863                 return counter
864
865         def _dblink(self, cpv):
866                 category, pf = catsplit(cpv)
867                 return dblink(category, pf, settings=self.settings,
868                         vartree=self.vartree, treetype="vartree")
869
870         def removeFromContents(self, pkg, paths, relative_paths=True):
871                 """
872                 @param pkg: cpv for an installed package
873                 @type pkg: string
874                 @param paths: paths of files to remove from contents
875                 @type paths: iterable
876                 """
877                 if not hasattr(pkg, "getcontents"):
878                         pkg = self._dblink(pkg)
879                 root = self.settings['ROOT']
880                 root_len = len(root) - 1
881                 new_contents = pkg.getcontents().copy()
882                 removed = 0
883
884                 for filename in paths:
885                         filename = _unicode_decode(filename,
886                                 encoding=_encodings['content'], errors='strict')
887                         filename = normalize_path(filename)
888                         if relative_paths:
889                                 relative_filename = filename
890                         else:
891                                 relative_filename = filename[root_len:]
892                         contents_key = pkg._match_contents(relative_filename)
893                         if contents_key:
894                                 del new_contents[contents_key]
895                                 removed += 1
896
897                 if removed:
898                         self._bump_mtime(pkg.mycpv)
899                         f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
900                         write_contents(new_contents, root, f)
901                         f.close()
902                         self._bump_mtime(pkg.mycpv)
903                         pkg._clear_contents_cache()
904
905         class _owners_cache(object):
906                 """
907                 This class maintains an hash table that serves to index package
908                 contents by mapping the basename of file to a list of possible
909                 packages that own it. This is used to optimize owner lookups
910                 by narrowing the search down to a smaller number of packages.
911                 """
912                 try:
913                         from hashlib import md5 as _new_hash
914                 except ImportError:
915                         from md5 import new as _new_hash
916
917                 _hash_bits = 16
918                 _hex_chars = int(_hash_bits / 4)
919
920                 def __init__(self, vardb):
921                         self._vardb = vardb
922
923                 def add(self, cpv):
924                         eroot_len = len(self._vardb._eroot)
925                         contents = self._vardb._dblink(cpv).getcontents()
926                         pkg_hash = self._hash_pkg(cpv)
927                         if not contents:
928                                 # Empty path is a code used to represent empty contents.
929                                 self._add_path("", pkg_hash)
930
931                         for x in contents:
932                                 self._add_path(x[eroot_len:], pkg_hash)
933
934                         self._vardb._aux_cache["modified"].add(cpv)
935
936                 def _add_path(self, path, pkg_hash):
937                         """
938                         Empty path is a code that represents empty contents.
939                         """
940                         if path:
941                                 name = os.path.basename(path.rstrip(os.path.sep))
942                                 if not name:
943                                         return
944                         else:
945                                 name = path
946                         name_hash = self._hash_str(name)
947                         base_names = self._vardb._aux_cache["owners"]["base_names"]
948                         pkgs = base_names.get(name_hash)
949                         if pkgs is None:
950                                 pkgs = {}
951                                 base_names[name_hash] = pkgs
952                         pkgs[pkg_hash] = None
953
954                 def _hash_str(self, s):
955                         h = self._new_hash()
956                         # Always use a constant utf_8 encoding here, since
957                         # the "default" encoding can change.
958                         h.update(_unicode_encode(s,
959                                 encoding=_encodings['repo.content'],
960                                 errors='backslashreplace'))
961                         h = h.hexdigest()
962                         h = h[-self._hex_chars:]
963                         h = int(h, 16)
964                         return h
965
966                 def _hash_pkg(self, cpv):
967                         counter, mtime = self._vardb.aux_get(
968                                 cpv, ["COUNTER", "_mtime_"])
969                         try:
970                                 counter = int(counter)
971                         except ValueError:
972                                 counter = 0
973                         return (cpv, counter, mtime)
974
975         class _owners_db(object):
976
977                 def __init__(self, vardb):
978                         self._vardb = vardb
979
980                 def populate(self):
981                         self._populate()
982
983                 def _populate(self):
984                         owners_cache = vardbapi._owners_cache(self._vardb)
985                         cached_hashes = set()
986                         base_names = self._vardb._aux_cache["owners"]["base_names"]
987
988                         # Take inventory of all cached package hashes.
989                         for name, hash_values in list(base_names.items()):
990                                 if not isinstance(hash_values, dict):
991                                         del base_names[name]
992                                         continue
993                                 cached_hashes.update(hash_values)
994
995                         # Create sets of valid package hashes and uncached packages.
996                         uncached_pkgs = set()
997                         hash_pkg = owners_cache._hash_pkg
998                         valid_pkg_hashes = set()
999                         for cpv in self._vardb.cpv_all():
1000                                 hash_value = hash_pkg(cpv)
1001                                 valid_pkg_hashes.add(hash_value)
1002                                 if hash_value not in cached_hashes:
1003                                         uncached_pkgs.add(cpv)
1004
1005                         # Cache any missing packages.
1006                         for cpv in uncached_pkgs:
1007                                 owners_cache.add(cpv)
1008
1009                         # Delete any stale cache.
1010                         stale_hashes = cached_hashes.difference(valid_pkg_hashes)
1011                         if stale_hashes:
1012                                 for base_name_hash, bucket in list(base_names.items()):
1013                                         for hash_value in stale_hashes.intersection(bucket):
1014                                                 del bucket[hash_value]
1015                                         if not bucket:
1016                                                 del base_names[base_name_hash]
1017
1018                         return owners_cache
1019
1020                 def get_owners(self, path_iter):
1021                         """
1022                         @return the owners as a dblink -> set(files) mapping.
1023                         """
1024                         owners = {}
1025                         for owner, f in self.iter_owners(path_iter):
1026                                 owned_files = owners.get(owner)
1027                                 if owned_files is None:
1028                                         owned_files = set()
1029                                         owners[owner] = owned_files
1030                                 owned_files.add(f)
1031                         return owners
1032
1033                 def getFileOwnerMap(self, path_iter):
1034                         owners = self.get_owners(path_iter)
1035                         file_owners = {}
1036                         for pkg_dblink, files in owners.items():
1037                                 for f in files:
1038                                         owner_set = file_owners.get(f)
1039                                         if owner_set is None:
1040                                                 owner_set = set()
1041                                                 file_owners[f] = owner_set
1042                                         owner_set.add(pkg_dblink)
1043                         return file_owners
1044
1045                 def iter_owners(self, path_iter):
1046                         """
1047                         Iterate over tuples of (dblink, path). In order to avoid
1048                         consuming too many resources for too much time, resources
1049                         are only allocated for the duration of a given iter_owners()
1050                         call. Therefore, to maximize reuse of resources when searching
1051                         for multiple files, it's best to search for them all in a single
1052                         call.
1053                         """
1054
1055                         if not isinstance(path_iter, list):
1056                                 path_iter = list(path_iter)
1057                         owners_cache = self._populate()
1058                         vardb = self._vardb
1059                         root = vardb._eroot
1060                         hash_pkg = owners_cache._hash_pkg
1061                         hash_str = owners_cache._hash_str
1062                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1063
1064                         dblink_cache = {}
1065
1066                         def dblink(cpv):
1067                                 x = dblink_cache.get(cpv)
1068                                 if x is None:
1069                                         if len(dblink_cache) > 20:
1070                                                 # Ensure that we don't run out of memory.
1071                                                 raise StopIteration()
1072                                         x = self._vardb._dblink(cpv)
1073                                         dblink_cache[cpv] = x
1074                                 return x
1075
1076                         while path_iter:
1077
1078                                 path = path_iter.pop()
1079                                 is_basename = os.sep != path[:1]
1080                                 if is_basename:
1081                                         name = path
1082                                 else:
1083                                         name = os.path.basename(path.rstrip(os.path.sep))
1084
1085                                 if not name:
1086                                         continue
1087
1088                                 name_hash = hash_str(name)
1089                                 pkgs = base_names.get(name_hash)
1090                                 owners = []
1091                                 if pkgs is not None:
1092                                         try:
1093                                                 for hash_value in pkgs:
1094                                                         if not isinstance(hash_value, tuple) or \
1095                                                                 len(hash_value) != 3:
1096                                                                 continue
1097                                                         cpv, counter, mtime = hash_value
1098                                                         if not isinstance(cpv, basestring):
1099                                                                 continue
1100                                                         try:
1101                                                                 current_hash = hash_pkg(cpv)
1102                                                         except KeyError:
1103                                                                 continue
1104
1105                                                         if current_hash != hash_value:
1106                                                                 continue
1107
1108                                                         if is_basename:
1109                                                                 for p in dblink(cpv).getcontents():
1110                                                                         if os.path.basename(p) == name:
1111                                                                                 owners.append((cpv, p[len(root):]))
1112                                                         else:
1113                                                                 if dblink(cpv).isowner(path):
1114                                                                         owners.append((cpv, path))
1115
1116                                         except StopIteration:
1117                                                 path_iter.append(path)
1118                                                 del owners[:]
1119                                                 dblink_cache.clear()
1120                                                 gc.collect()
1121                                                 for x in self._iter_owners_low_mem(path_iter):
1122                                                         yield x
1123                                                 return
1124                                         else:
1125                                                 for cpv, p in owners:
1126                                                         yield (dblink(cpv), p)
1127
1128                 def _iter_owners_low_mem(self, path_list):
1129                         """
1130                         This implemention will make a short-lived dblink instance (and
1131                         parse CONTENTS) for every single installed package. This is
1132                         slower and but uses less memory than the method which uses the
1133                         basename cache.
1134                         """
1135
1136                         if not path_list:
1137                                 return
1138
1139                         path_info_list = []
1140                         for path in path_list:
1141                                 is_basename = os.sep != path[:1]
1142                                 if is_basename:
1143                                         name = path
1144                                 else:
1145                                         name = os.path.basename(path.rstrip(os.path.sep))
1146                                 path_info_list.append((path, name, is_basename))
1147
1148                         root = self._vardb._eroot
1149                         for cpv in self._vardb.cpv_all():
1150                                 dblnk =  self._vardb._dblink(cpv)
1151
1152                                 for path, name, is_basename in path_info_list:
1153                                         if is_basename:
1154                                                 for p in dblnk.getcontents():
1155                                                         if os.path.basename(p) == name:
1156                                                                 yield dblnk, p[len(root):]
1157                                         else:
1158                                                 if dblnk.isowner(path):
1159                                                         yield dblnk, path
1160
1161 class vartree(object):
1162         "this tree will scan a var/db/pkg database located at root (passed to init)"
1163         def __init__(self, root=None, virtual=None, categories=None,
1164                 settings=None):
1165
1166                 if settings is None:
1167                         settings = portage.settings
1168
1169                 if root is not None and root != settings['ROOT']:
1170                         warnings.warn("The 'root' parameter of the "
1171                                 "portage.dbapi.vartree.vartree"
1172                                 " constructor is now unused. Use "
1173                                 "settings['ROOT'] instead.",
1174                                 DeprecationWarning, stacklevel=2)
1175
1176                 self.settings = settings
1177                 self.dbapi = vardbapi(settings=settings, vartree=self)
1178                 self.populated = 1
1179
1180         @property
1181         def root(self):
1182                 warnings.warn("The root attribute of "
1183                         "portage.dbapi.vartree.vartree"
1184                         " is deprecated. Use "
1185                         "settings['ROOT'] instead.",
1186                         DeprecationWarning, stacklevel=2)
1187                 return self.settings['ROOT']
1188
1189         def getpath(self, mykey, filename=None):
1190                 return self.dbapi.getpath(mykey, filename=filename)
1191
1192         def zap(self, mycpv):
1193                 return
1194
1195         def inject(self, mycpv):
1196                 return
1197
1198         def get_provide(self, mycpv):
1199                 myprovides = []
1200                 mylines = None
1201                 try:
1202                         mylines, myuse = self.dbapi.aux_get(mycpv, ["PROVIDE", "USE"])
1203                         if mylines:
1204                                 myuse = myuse.split()
1205                                 mylines = use_reduce(mylines, uselist=myuse, flat=True)
1206                                 for myprovide in mylines:
1207                                         mys = catpkgsplit(myprovide)
1208                                         if not mys:
1209                                                 mys = myprovide.split("/")
1210                                         myprovides += [mys[0] + "/" + mys[1]]
1211                         return myprovides
1212                 except SystemExit as e:
1213                         raise
1214                 except Exception as e:
1215                         mydir = self.dbapi.getpath(mycpv)
1216                         writemsg(_("\nParse Error reading PROVIDE and USE in '%s'\n") % mydir,
1217                                 noiselevel=-1)
1218                         if mylines:
1219                                 writemsg(_("Possibly Invalid: '%s'\n") % str(mylines),
1220                                         noiselevel=-1)
1221                         writemsg(_("Exception: %s\n\n") % str(e), noiselevel=-1)
1222                         return []
1223
1224         def get_all_provides(self):
1225                 myprovides = {}
1226                 for node in self.getallcpv():
1227                         for mykey in self.get_provide(node):
1228                                 if mykey in myprovides:
1229                                         myprovides[mykey] += [node]
1230                                 else:
1231                                         myprovides[mykey] = [node]
1232                 return myprovides
1233
1234         def dep_bestmatch(self, mydep, use_cache=1):
1235                 "compatibility method -- all matches, not just visible ones"
1236                 #mymatch=best(match(dep_expand(mydep,self.dbapi),self.dbapi))
1237                 mymatch = best(self.dbapi.match(
1238                         dep_expand(mydep, mydb=self.dbapi, settings=self.settings),
1239                         use_cache=use_cache))
1240                 if mymatch is None:
1241                         return ""
1242                 else:
1243                         return mymatch
1244
1245         def dep_match(self, mydep, use_cache=1):
1246                 "compatibility method -- we want to see all matches, not just visible ones"
1247                 #mymatch = match(mydep,self.dbapi)
1248                 mymatch = self.dbapi.match(mydep, use_cache=use_cache)
1249                 if mymatch is None:
1250                         return []
1251                 else:
1252                         return mymatch
1253
1254         def exists_specific(self, cpv):
1255                 return self.dbapi.cpv_exists(cpv)
1256
1257         def getallcpv(self):
1258                 """temporary function, probably to be renamed --- Gets a list of all
1259                 category/package-versions installed on the system."""
1260                 return self.dbapi.cpv_all()
1261
1262         def getallnodes(self):
1263                 """new behavior: these are all *unmasked* nodes.  There may or may not be available
1264                 masked package for nodes in this nodes list."""
1265                 return self.dbapi.cp_all()
1266
1267         def getebuildpath(self, fullpackage):
1268                 cat, package = catsplit(fullpackage)
1269                 return self.getpath(fullpackage, filename=package+".ebuild")
1270
1271         def getslot(self, mycatpkg):
1272                 "Get a slot for a catpkg; assume it exists."
1273                 try:
1274                         return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1275                 except KeyError:
1276                         return ""
1277
1278         def populate(self):
1279                 self.populated=1
1280
1281 class dblink(object):
1282         """
1283         This class provides an interface to the installed package database
1284         At present this is implemented as a text backend in /var/db/pkg.
1285         """
1286
1287         import re
1288         _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
1289
1290         _contents_re = re.compile(r'^(' + \
1291                 r'(?P<dir>(dev|dir|fif) (.+))|' + \
1292                 r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
1293                 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \
1294                 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))' + \
1295                 r')$'
1296         )
1297
1298         def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
1299                 vartree=None, blockers=None, scheduler=None, pipe=None):
1300                 """
1301                 Creates a DBlink object for a given CPV.
1302                 The given CPV may not be present in the database already.
1303                 
1304                 @param cat: Category
1305                 @type cat: String
1306                 @param pkg: Package (PV)
1307                 @type pkg: String
1308                 @param myroot: ignored, settings['ROOT'] is used instead
1309                 @type myroot: String (Path)
1310                 @param settings: Typically portage.settings
1311                 @type settings: portage.config
1312                 @param treetype: one of ['porttree','bintree','vartree']
1313                 @type treetype: String
1314                 @param vartree: an instance of vartree corresponding to myroot.
1315                 @type vartree: vartree
1316                 """
1317
1318                 if settings is None:
1319                         raise TypeError("settings argument is required")
1320
1321                 mysettings = settings
1322                 self._eroot = mysettings['EROOT']
1323                 self.cat = cat
1324                 self.pkg = pkg
1325                 self.mycpv = self.cat + "/" + self.pkg
1326                 self.mysplit = list(catpkgsplit(self.mycpv)[1:])
1327                 self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0])
1328                 self.treetype = treetype
1329                 if vartree is None:
1330                         vartree = portage.db[self._eroot]["vartree"]
1331                 self.vartree = vartree
1332                 self._blockers = blockers
1333                 self._scheduler = scheduler
1334                 self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
1335                 self.dbcatdir = self.dbroot+"/"+cat
1336                 self.dbpkgdir = self.dbcatdir+"/"+pkg
1337                 self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg
1338                 self.dbdir = self.dbpkgdir
1339                 self.settings = mysettings
1340                 self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
1341
1342                 self.myroot = self.settings['ROOT']
1343                 self._installed_instance = None
1344                 self.contentscache = None
1345                 self._contents_inodes = None
1346                 self._contents_basenames = None
1347                 self._linkmap_broken = False
1348                 self._md5_merge_map = {}
1349                 self._hash_key = (self._eroot, self.mycpv)
1350                 self._protect_obj = None
1351                 self._pipe = pipe
1352
1353         def __hash__(self):
1354                 return hash(self._hash_key)
1355
1356         def __eq__(self, other):
1357                 return isinstance(other, dblink) and \
1358                         self._hash_key == other._hash_key
1359
1360         def _get_protect_obj(self):
1361
1362                 if self._protect_obj is None:
1363                         self._protect_obj = ConfigProtect(self._eroot,
1364                         portage.util.shlex_split(
1365                                 self.settings.get("CONFIG_PROTECT", "")),
1366                         portage.util.shlex_split(
1367                                 self.settings.get("CONFIG_PROTECT_MASK", "")))
1368
1369                 return self._protect_obj
1370
1371         def isprotected(self, obj):
1372                 return self._get_protect_obj().isprotected(obj)
1373
1374         def updateprotect(self):
1375                 self._get_protect_obj().updateprotect()
1376
1377         def lockdb(self):
1378                 self.vartree.dbapi.lock()
1379
1380         def unlockdb(self):
1381                 self.vartree.dbapi.unlock()
1382
1383         def getpath(self):
1384                 "return path to location of db information (for >>> informational display)"
1385                 return self.dbdir
1386
1387         def exists(self):
1388                 "does the db entry exist?  boolean."
1389                 return os.path.exists(self.dbdir)
1390
1391         def delete(self):
1392                 """
1393                 Remove this entry from the database
1394                 """
1395                 if not os.path.exists(self.dbdir):
1396                         return
1397
1398                 # Check validity of self.dbdir before attempting to remove it.
1399                 if not self.dbdir.startswith(self.dbroot):
1400                         writemsg(_("portage.dblink.delete(): invalid dbdir: %s\n") % \
1401                                 self.dbdir, noiselevel=-1)
1402                         return
1403
1404                 shutil.rmtree(self.dbdir)
1405                 # If empty, remove parent category directory.
1406                 try:
1407                         os.rmdir(os.path.dirname(self.dbdir))
1408                 except OSError:
1409                         pass
1410                 self.vartree.dbapi._remove(self)
1411
1412         def clearcontents(self):
1413                 """
1414                 For a given db entry (self), erase the CONTENTS values.
1415                 """
1416                 self.lockdb()
1417                 try:
1418                         if os.path.exists(self.dbdir+"/CONTENTS"):
1419                                 os.unlink(self.dbdir+"/CONTENTS")
1420                 finally:
1421                         self.unlockdb()
1422
1423         def _clear_contents_cache(self):
1424                 self.contentscache = None
1425                 self._contents_inodes = None
1426                 self._contents_basenames = None
1427
1428         def getcontents(self):
1429                 """
1430                 Get the installed files of a given package (aka what that package installed)
1431                 """
1432                 contents_file = os.path.join(self.dbdir, "CONTENTS")
1433                 if self.contentscache is not None:
1434                         return self.contentscache
1435                 pkgfiles = {}
1436                 try:
1437                         myc = io.open(_unicode_encode(contents_file,
1438                                 encoding=_encodings['fs'], errors='strict'),
1439                                 mode='r', encoding=_encodings['repo.content'],
1440                                 errors='replace')
1441                 except EnvironmentError as e:
1442                         if e.errno != errno.ENOENT:
1443                                 raise
1444                         del e
1445                         self.contentscache = pkgfiles
1446                         return pkgfiles
1447                 mylines = myc.readlines()
1448                 myc.close()
1449                 null_byte = "\0"
1450                 normalize_needed = self._normalize_needed
1451                 contents_re = self._contents_re
1452                 obj_index = contents_re.groupindex['obj']
1453                 dir_index = contents_re.groupindex['dir']
1454                 sym_index = contents_re.groupindex['sym']
1455                 # The old symlink format may exist on systems that have packages
1456                 # which were installed many years ago (see bug #351814).
1457                 oldsym_index = contents_re.groupindex['oldsym']
1458                 # CONTENTS files already contain EPREFIX
1459                 myroot = self.settings['ROOT']
1460                 if myroot == os.path.sep:
1461                         myroot = None
1462                 # used to generate parent dir entries
1463                 dir_entry = (_unicode_decode("dir"),)
1464                 eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1465                 pos = 0
1466                 errors = []
1467                 for pos, line in enumerate(mylines):
1468                         if null_byte in line:
1469                                 # Null bytes are a common indication of corruption.
1470                                 errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1471                                 continue
1472                         line = line.rstrip("\n")
1473                         m = contents_re.match(line)
1474                         if m is None:
1475                                 errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1476                                 continue
1477
1478                         if m.group(obj_index) is not None:
1479                                 base = obj_index
1480                                 #format: type, mtime, md5sum
1481                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1482                         elif m.group(dir_index) is not None:
1483                                 base = dir_index
1484                                 #format: type
1485                                 data = (m.group(base+1),)
1486                         elif m.group(sym_index) is not None:
1487                                 base = sym_index
1488                                 if m.group(oldsym_index) is None:
1489                                         mtime = m.group(base+5)
1490                                 else:
1491                                         mtime = m.group(base+8)
1492                                 #format: type, mtime, dest
1493                                 data = (m.group(base+1), mtime, m.group(base+3))
1494                         else:
1495                                 # This won't happen as long the regular expression
1496                                 # is written to only match valid entries.
1497                                 raise AssertionError(_("required group not found " + \
1498                                         "in CONTENTS entry: '%s'") % line)
1499
1500                         path = m.group(base+2)
1501                         if normalize_needed.search(path) is not None:
1502                                 path = normalize_path(path)
1503                                 if not path.startswith(os.path.sep):
1504                                         path = os.path.sep + path
1505
1506                         if myroot is not None:
1507                                 path = os.path.join(myroot, path.lstrip(os.path.sep))
1508
1509                         # Implicitly add parent directories, since we can't necessarily
1510                         # assume that they are explicitly listed in CONTENTS, and it's
1511                         # useful for callers if they can rely on parent directory entries
1512                         # being generated here (crucial for things like dblink.isowner()).
1513                         path_split = path.split(os.sep)
1514                         path_split.pop()
1515                         while len(path_split) > eroot_split_len:
1516                                 parent = os.sep.join(path_split)
1517                                 if parent in pkgfiles:
1518                                         break
1519                                 pkgfiles[parent] = dir_entry
1520                                 path_split.pop()
1521
1522                         pkgfiles[path] = data
1523
1524                 if errors:
1525                         writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1526                         for pos, e in errors:
1527                                 writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1528                 self.contentscache = pkgfiles
1529                 return pkgfiles
1530
1531         def _prune_plib_registry(self, unmerge=False,
1532                 needed=None, preserve_paths=None):
1533                 # remove preserved libraries that don't have any consumers left
1534                 if not (self._linkmap_broken or
1535                         self.vartree.dbapi._linkmap is None or
1536                         self.vartree.dbapi._plib_registry is None):
1537                         self.vartree.dbapi._fs_lock()
1538                         plib_registry = self.vartree.dbapi._plib_registry
1539                         plib_registry.lock()
1540                         try:
1541                                 plib_registry.load()
1542
1543                                 unmerge_with_replacement = \
1544                                         unmerge and preserve_paths is not None
1545                                 if unmerge_with_replacement:
1546                                         # If self.mycpv is about to be unmerged and we
1547                                         # have a replacement package, we want to exclude
1548                                         # the irrelevant NEEDED data that belongs to
1549                                         # files which are being unmerged now.
1550                                         exclude_pkgs = (self.mycpv,)
1551                                 else:
1552                                         exclude_pkgs = None
1553
1554                                 self._linkmap_rebuild(exclude_pkgs=exclude_pkgs,
1555                                         include_file=needed, preserve_paths=preserve_paths)
1556
1557                                 if unmerge:
1558                                         unmerge_preserve = None
1559                                         if not unmerge_with_replacement:
1560                                                 unmerge_preserve = \
1561                                                         self._find_libs_to_preserve(unmerge=True)
1562                                         counter = self.vartree.dbapi.cpv_counter(self.mycpv)
1563                                         plib_registry.unregister(self.mycpv,
1564                                                 self.settings["SLOT"], counter)
1565                                         if unmerge_preserve:
1566                                                 for path in sorted(unmerge_preserve):
1567                                                         contents_key = self._match_contents(path)
1568                                                         if not contents_key:
1569                                                                 continue
1570                                                         obj_type = self.getcontents()[contents_key][0]
1571                                                         self._display_merge(_(">>> needed   %s %s\n") % \
1572                                                                 (obj_type, contents_key), noiselevel=-1)
1573                                                 plib_registry.register(self.mycpv,
1574                                                         self.settings["SLOT"], counter, unmerge_preserve)
1575                                                 # Remove the preserved files from our contents
1576                                                 # so that they won't be unmerged.
1577                                                 self.vartree.dbapi.removeFromContents(self,
1578                                                         unmerge_preserve)
1579
1580                                 unmerge_no_replacement = \
1581                                         unmerge and not unmerge_with_replacement
1582                                 cpv_lib_map = self._find_unused_preserved_libs(
1583                                         unmerge_no_replacement)
1584                                 if cpv_lib_map:
1585                                         self._remove_preserved_libs(cpv_lib_map)
1586                                         self.vartree.dbapi.lock()
1587                                         try:
1588                                                 for cpv, removed in cpv_lib_map.items():
1589                                                         if not self.vartree.dbapi.cpv_exists(cpv):
1590                                                                 continue
1591                                                         self.vartree.dbapi.removeFromContents(cpv, removed)
1592                                         finally:
1593                                                 self.vartree.dbapi.unlock()
1594
1595                                 plib_registry.store()
1596                         finally:
1597                                 plib_registry.unlock()
1598                                 self.vartree.dbapi._fs_unlock()
1599
1600         def unmerge(self, pkgfiles=None, trimworld=None, cleanup=True,
1601                 ldpath_mtimes=None, others_in_slot=None, needed=None,
1602                 preserve_paths=None):
1603                 """
1604                 Calls prerm
1605                 Unmerges a given package (CPV)
1606                 calls postrm
1607                 calls cleanrm
1608                 calls env_update
1609                 
1610                 @param pkgfiles: files to unmerge (generally self.getcontents() )
1611                 @type pkgfiles: Dictionary
1612                 @param trimworld: Unused
1613                 @type trimworld: Boolean
1614                 @param cleanup: cleanup to pass to doebuild (see doebuild)
1615                 @type cleanup: Boolean
1616                 @param ldpath_mtimes: mtimes to pass to env_update (see env_update)
1617                 @type ldpath_mtimes: Dictionary
1618                 @param others_in_slot: all dblink instances in this slot, excluding self
1619                 @type others_in_slot: list
1620                 @param needed: Filename containing libraries needed after unmerge.
1621                 @type needed: String
1622                 @param preserve_paths: Libraries preserved by a package instance that
1623                         is currently being merged. They need to be explicitly passed to the
1624                         LinkageMap, since they are not registered in the
1625                         PreservedLibsRegistry yet.
1626                 @type preserve_paths: set
1627                 @rtype: Integer
1628                 @returns:
1629                 1. os.EX_OK if everything went well.
1630                 2. return code of the failed phase (for prerm, postrm, cleanrm)
1631                 """
1632
1633                 if trimworld is not None:
1634                         warnings.warn("The trimworld parameter of the " + \
1635                                 "portage.dbapi.vartree.dblink.unmerge()" + \
1636                                 " method is now unused.",
1637                                 DeprecationWarning, stacklevel=2)
1638
1639                 background = False
1640                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1641                 if self._scheduler is None:
1642                         # We create a scheduler instance and use it to
1643                         # log unmerge output separately from merge output.
1644                         self._scheduler = PollScheduler().sched_iface
1645                 if self.settings.get("PORTAGE_BACKGROUND") == "subprocess":
1646                         if self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "1":
1647                                 self.settings["PORTAGE_BACKGROUND"] = "1"
1648                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1649                                 background = True
1650                         elif self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "0":
1651                                 self.settings["PORTAGE_BACKGROUND"] = "0"
1652                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1653                 elif self.settings.get("PORTAGE_BACKGROUND") == "1":
1654                         background = True
1655
1656                 self.vartree.dbapi._bump_mtime(self.mycpv)
1657                 showMessage = self._display_merge
1658                 if self.vartree.dbapi._categories is not None:
1659                         self.vartree.dbapi._categories = None
1660                 # When others_in_slot is supplied, the security check has already been
1661                 # done for this slot, so it shouldn't be repeated until the next
1662                 # replacement or unmerge operation.
1663                 if others_in_slot is None:
1664                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1665                         slot_matches = self.vartree.dbapi.match(
1666                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1667                         others_in_slot = []
1668                         for cur_cpv in slot_matches:
1669                                 if cur_cpv == self.mycpv:
1670                                         continue
1671                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1672                                         settings=self.settings, vartree=self.vartree,
1673                                         treetype="vartree", pipe=self._pipe))
1674
1675                         retval = self._security_check([self] + others_in_slot)
1676                         if retval:
1677                                 return retval
1678
1679                 contents = self.getcontents()
1680                 # Now, don't assume that the name of the ebuild is the same as the
1681                 # name of the dir; the package may have been moved.
1682                 myebuildpath = os.path.join(self.dbdir, self.pkg + ".ebuild")
1683                 failures = 0
1684                 ebuild_phase = "prerm"
1685                 mystuff = os.listdir(self.dbdir)
1686                 for x in mystuff:
1687                         if x.endswith(".ebuild"):
1688                                 if x[:-7] != self.pkg:
1689                                         # Clean up after vardbapi.move_ent() breakage in
1690                                         # portage versions before 2.1.2
1691                                         os.rename(os.path.join(self.dbdir, x), myebuildpath)
1692                                         write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n")
1693                                 break
1694
1695                 if self.mycpv != self.settings.mycpv or \
1696                         "EAPI" not in self.settings.configdict["pkg"]:
1697                         # We avoid a redundant setcpv call here when
1698                         # the caller has already taken care of it.
1699                         self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi)
1700
1701                 eapi_unsupported = False
1702                 try:
1703                         doebuild_environment(myebuildpath, "prerm",
1704                                 settings=self.settings, db=self.vartree.dbapi)
1705                 except UnsupportedAPIException as e:
1706                         eapi_unsupported = e
1707
1708                 self._prune_plib_registry(unmerge=True, needed=needed,
1709                         preserve_paths=preserve_paths)
1710
1711                 builddir_lock = None
1712                 scheduler = self._scheduler
1713                 retval = os.EX_OK
1714                 try:
1715                         # Only create builddir_lock if the caller
1716                         # has not already acquired the lock.
1717                         if "PORTAGE_BUILDIR_LOCKED" not in self.settings:
1718                                 builddir_lock = EbuildBuildDir(
1719                                         scheduler=scheduler,
1720                                         settings=self.settings)
1721                                 builddir_lock.lock()
1722                                 prepare_build_dirs(settings=self.settings, cleanup=True)
1723                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1724
1725                         # Log the error after PORTAGE_LOG_FILE is initialized
1726                         # by prepare_build_dirs above.
1727                         if eapi_unsupported:
1728                                 # Sometimes this happens due to corruption of the EAPI file.
1729                                 failures += 1
1730                                 showMessage(_("!!! FAILED prerm: %s\n") % \
1731                                         os.path.join(self.dbdir, "EAPI"),
1732                                         level=logging.ERROR, noiselevel=-1)
1733                                 showMessage(_unicode_decode("%s\n") % (eapi_unsupported,),
1734                                         level=logging.ERROR, noiselevel=-1)
1735                         elif os.path.isfile(myebuildpath):
1736                                 phase = EbuildPhase(background=background,
1737                                         phase=ebuild_phase, scheduler=scheduler,
1738                                         settings=self.settings)
1739                                 phase.start()
1740                                 retval = phase.wait()
1741
1742                                 # XXX: Decide how to handle failures here.
1743                                 if retval != os.EX_OK:
1744                                         failures += 1
1745                                         showMessage(_("!!! FAILED prerm: %s\n") % retval,
1746                                                 level=logging.ERROR, noiselevel=-1)
1747
1748                         self.vartree.dbapi._fs_lock()
1749                         try:
1750                                 self._unmerge_pkgfiles(pkgfiles, others_in_slot)
1751                         finally:
1752                                 self.vartree.dbapi._fs_unlock()
1753                         self._clear_contents_cache()
1754
1755                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1756                                 ebuild_phase = "postrm"
1757                                 phase = EbuildPhase(background=background,
1758                                         phase=ebuild_phase, scheduler=scheduler,
1759                                         settings=self.settings)
1760                                 phase.start()
1761                                 retval = phase.wait()
1762
1763                                 # XXX: Decide how to handle failures here.
1764                                 if retval != os.EX_OK:
1765                                         failures += 1
1766                                         showMessage(_("!!! FAILED postrm: %s\n") % retval,
1767                                                 level=logging.ERROR, noiselevel=-1)
1768
1769                 finally:
1770                         self.vartree.dbapi._bump_mtime(self.mycpv)
1771                         try:
1772                                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1773                                                 if retval != os.EX_OK:
1774                                                         msg_lines = []
1775                                                         msg = _("The '%(ebuild_phase)s' "
1776                                                         "phase of the '%(cpv)s' package "
1777                                                         "has failed with exit value %(retval)s.") % \
1778                                                         {"ebuild_phase":ebuild_phase, "cpv":self.mycpv,
1779                                                         "retval":retval}
1780                                                         from textwrap import wrap
1781                                                         msg_lines.extend(wrap(msg, 72))
1782                                                         msg_lines.append("")
1783
1784                                                         ebuild_name = os.path.basename(myebuildpath)
1785                                                         ebuild_dir = os.path.dirname(myebuildpath)
1786                                                         msg = _("The problem occurred while executing "
1787                                                         "the ebuild file named '%(ebuild_name)s' "
1788                                                         "located in the '%(ebuild_dir)s' directory. "
1789                                                         "If necessary, manually remove "
1790                                                         "the environment.bz2 file and/or the "
1791                                                         "ebuild file located in that directory.") % \
1792                                                         {"ebuild_name":ebuild_name, "ebuild_dir":ebuild_dir}
1793                                                         msg_lines.extend(wrap(msg, 72))
1794                                                         msg_lines.append("")
1795
1796                                                         msg = _("Removal "
1797                                                         "of the environment.bz2 file is "
1798                                                         "preferred since it may allow the "
1799                                                         "removal phases to execute successfully. "
1800                                                         "The ebuild will be "
1801                                                         "sourced and the eclasses "
1802                                                         "from the current portage tree will be used "
1803                                                         "when necessary. Removal of "
1804                                                         "the ebuild file will cause the "
1805                                                         "pkg_prerm() and pkg_postrm() removal "
1806                                                         "phases to be skipped entirely.")
1807                                                         msg_lines.extend(wrap(msg, 72))
1808
1809                                                         self._eerror(ebuild_phase, msg_lines)
1810
1811                                         self._elog_process(phasefilter=("prerm", "postrm"))
1812
1813                                         if retval == os.EX_OK:
1814                                                 try:
1815                                                         doebuild_environment(myebuildpath, "cleanrm",
1816                                                                 settings=self.settings, db=self.vartree.dbapi)
1817                                                 except UnsupportedAPIException:
1818                                                         pass
1819                                                 phase = EbuildPhase(background=background,
1820                                                         phase="cleanrm", scheduler=scheduler,
1821                                                         settings=self.settings)
1822                                                 phase.start()
1823                                                 retval = phase.wait()
1824                         finally:
1825                                         if builddir_lock is not None:
1826                                                 builddir_lock.unlock()
1827
1828                 if log_path is not None:
1829
1830                         if not failures and 'unmerge-logs' not in self.settings.features:
1831                                 try:
1832                                         os.unlink(log_path)
1833                                 except OSError:
1834                                         pass
1835
1836                         try:
1837                                 st = os.stat(log_path)
1838                         except OSError:
1839                                 pass
1840                         else:
1841                                 if st.st_size == 0:
1842                                         try:
1843                                                 os.unlink(log_path)
1844                                         except OSError:
1845                                                 pass
1846
1847                 if log_path is not None and os.path.exists(log_path):
1848                         # Restore this since it gets lost somewhere above and it
1849                         # needs to be set for _display_merge() to be able to log.
1850                         # Note that the log isn't necessarily supposed to exist
1851                         # since if PORT_LOGDIR is unset then it's a temp file
1852                         # so it gets cleaned above.
1853                         self.settings["PORTAGE_LOG_FILE"] = log_path
1854                 else:
1855                         self.settings.pop("PORTAGE_LOG_FILE", None)
1856
1857                 env_update(target_root=self.settings['ROOT'],
1858                         prev_mtimes=ldpath_mtimes,
1859                         contents=contents, env=self.settings,
1860                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
1861
1862                 return os.EX_OK
1863
1864         def _display_merge(self, msg, level=0, noiselevel=0):
1865                 if not self._verbose and noiselevel >= 0 and level < logging.WARN:
1866                         return
1867                 if self._scheduler is None:
1868                         writemsg_level(msg, level=level, noiselevel=noiselevel)
1869                 else:
1870                         log_path = None
1871                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
1872                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1873                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
1874
1875                         if background and log_path is None:
1876                                 if level >= logging.WARN:
1877                                         writemsg_level(msg, level=level, noiselevel=noiselevel)
1878                         else:
1879                                 self._scheduler.output(msg,
1880                                         log_path=log_path, background=background,
1881                                         level=level, noiselevel=noiselevel)
1882
1883         def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
1884                 """
1885                 
1886                 Unmerges the contents of a package from the liveFS
1887                 Removes the VDB entry for self
1888                 
1889                 @param pkgfiles: typically self.getcontents()
1890                 @type pkgfiles: Dictionary { filename: [ 'type', '?', 'md5sum' ] }
1891                 @param others_in_slot: all dblink instances in this slot, excluding self
1892                 @type others_in_slot: list
1893                 @rtype: None
1894                 """
1895
1896                 os = _os_merge
1897                 perf_md5 = perform_md5
1898                 showMessage = self._display_merge
1899
1900                 if not pkgfiles:
1901                         showMessage(_("No package files given... Grabbing a set.\n"))
1902                         pkgfiles = self.getcontents()
1903
1904                 if others_in_slot is None:
1905                         others_in_slot = []
1906                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1907                         slot_matches = self.vartree.dbapi.match(
1908                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1909                         for cur_cpv in slot_matches:
1910                                 if cur_cpv == self.mycpv:
1911                                         continue
1912                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1913                                         settings=self.settings,
1914                                         vartree=self.vartree, treetype="vartree", pipe=self._pipe))
1915
1916                 dest_root = self._eroot
1917                 dest_root_len = len(dest_root) - 1
1918
1919                 cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
1920                 stale_confmem = []
1921                 protected_symlinks = {}
1922
1923                 unmerge_orphans = "unmerge-orphans" in self.settings.features
1924                 calc_prelink = "prelink-checksums" in self.settings.features
1925
1926                 if pkgfiles:
1927                         self.updateprotect()
1928                         mykeys = list(pkgfiles)
1929                         mykeys.sort()
1930                         mykeys.reverse()
1931
1932                         #process symlinks second-to-last, directories last.
1933                         mydirs = set()
1934                         ignored_unlink_errnos = (
1935                                 errno.EBUSY, errno.ENOENT,
1936                                 errno.ENOTDIR, errno.EISDIR)
1937                         ignored_rmdir_errnos = (
1938                                 errno.EEXIST, errno.ENOTEMPTY,
1939                                 errno.EBUSY, errno.ENOENT,
1940                                 errno.ENOTDIR, errno.EISDIR,
1941                                 errno.EPERM)
1942                         modprotect = os.path.join(self._eroot, "lib/modules/")
1943
1944                         def unlink(file_name, lstatobj):
1945                                 if bsd_chflags:
1946                                         if lstatobj.st_flags != 0:
1947                                                 bsd_chflags.lchflags(file_name, 0)
1948                                         parent_name = os.path.dirname(file_name)
1949                                         # Use normal stat/chflags for the parent since we want to
1950                                         # follow any symlinks to the real parent directory.
1951                                         pflags = os.stat(parent_name).st_flags
1952                                         if pflags != 0:
1953                                                 bsd_chflags.chflags(parent_name, 0)
1954                                 try:
1955                                         if not stat.S_ISLNK(lstatobj.st_mode):
1956                                                 # Remove permissions to ensure that any hardlinks to
1957                                                 # suid/sgid files are rendered harmless.
1958                                                 os.chmod(file_name, 0)
1959                                         os.unlink(file_name)
1960                                 except OSError as ose:
1961                                         # If the chmod or unlink fails, you are in trouble.
1962                                         # With Prefix this can be because the file is owned
1963                                         # by someone else (a screwup by root?), on a normal
1964                                         # system maybe filesystem corruption.  In any case,
1965                                         # if we backtrace and die here, we leave the system
1966                                         # in a totally undefined state, hence we just bleed
1967                                         # like hell and continue to hopefully finish all our
1968                                         # administrative and pkg_postinst stuff.
1969                                         self._eerror("postrm", 
1970                                                 ["Could not chmod or unlink '%s': %s" % \
1971                                                 (file_name, ose)])
1972                                 finally:
1973                                         if bsd_chflags and pflags != 0:
1974                                                 # Restore the parent flags we saved before unlinking
1975                                                 bsd_chflags.chflags(parent_name, pflags)
1976
1977                         def show_unmerge(zing, desc, file_type, file_name):
1978                                         showMessage("%s %s %s %s\n" % \
1979                                                 (zing, desc.ljust(8), file_type, file_name))
1980
1981                         unmerge_desc = {}
1982                         unmerge_desc["cfgpro"] = _("cfgpro")
1983                         unmerge_desc["replaced"] = _("replaced")
1984                         unmerge_desc["!dir"] = _("!dir")
1985                         unmerge_desc["!empty"] = _("!empty")
1986                         unmerge_desc["!fif"] = _("!fif")
1987                         unmerge_desc["!found"] = _("!found")
1988                         unmerge_desc["!md5"] = _("!md5")
1989                         unmerge_desc["!mtime"] = _("!mtime")
1990                         unmerge_desc["!obj"] = _("!obj")
1991                         unmerge_desc["!sym"] = _("!sym")
1992                         unmerge_desc["!prefix"] = _("!prefix")
1993
1994                         real_root = self.settings['ROOT']
1995                         real_root_len = len(real_root) - 1
1996                         eroot = self.settings["EROOT"]
1997
1998                         # These files are generated by emerge, so we need to remove
1999                         # them when they are the only thing left in a directory.
2000                         infodir_cleanup = frozenset(["dir", "dir.old"])
2001                         infodirs = frozenset(infodir for infodir in chain(
2002                                 self.settings.get("INFOPATH", "").split(":"),
2003                                 self.settings.get("INFODIR", "").split(":")) if infodir)
2004                         infodirs_inodes = set()
2005                         for infodir in infodirs:
2006                                 infodir = os.path.join(real_root, infodir.lstrip(os.sep))
2007                                 try:
2008                                         statobj = os.stat(infodir)
2009                                 except OSError:
2010                                         pass
2011                                 else:
2012                                         infodirs_inodes.add((statobj.st_dev, statobj.st_ino))
2013
2014                         for i, objkey in enumerate(mykeys):
2015
2016                                 obj = normalize_path(objkey)
2017                                 if os is _os_merge:
2018                                         try:
2019                                                 _unicode_encode(obj,
2020                                                         encoding=_encodings['merge'], errors='strict')
2021                                         except UnicodeEncodeError:
2022                                                 # The package appears to have been merged with a 
2023                                                 # different value of sys.getfilesystemencoding(),
2024                                                 # so fall back to utf_8 if appropriate.
2025                                                 try:
2026                                                         _unicode_encode(obj,
2027                                                                 encoding=_encodings['fs'], errors='strict')
2028                                                 except UnicodeEncodeError:
2029                                                         pass
2030                                                 else:
2031                                                         os = portage.os
2032                                                         perf_md5 = portage.checksum.perform_md5
2033
2034                                 file_data = pkgfiles[objkey]
2035                                 file_type = file_data[0]
2036
2037                                 # don't try to unmerge the prefix offset itself
2038                                 if len(obj) <= len(eroot) or not obj.startswith(eroot):
2039                                         show_unmerge("---", unmerge_desc["!prefix"], file_type, obj)
2040                                         continue
2041
2042                                 statobj = None
2043                                 try:
2044                                         statobj = os.stat(obj)
2045                                 except OSError:
2046                                         pass
2047                                 lstatobj = None
2048                                 try:
2049                                         lstatobj = os.lstat(obj)
2050                                 except (OSError, AttributeError):
2051                                         pass
2052                                 islink = lstatobj is not None and stat.S_ISLNK(lstatobj.st_mode)
2053                                 if lstatobj is None:
2054                                                 show_unmerge("---", unmerge_desc["!found"], file_type, obj)
2055                                                 continue
2056                                 # don't use EROOT, CONTENTS entries already contain EPREFIX
2057                                 if obj.startswith(real_root):
2058                                         relative_path = obj[real_root_len:]
2059                                         is_owned = False
2060                                         for dblnk in others_in_slot:
2061                                                 if dblnk.isowner(relative_path):
2062                                                         is_owned = True
2063                                                         break
2064
2065                                         if file_type == "sym" and is_owned and \
2066                                                 (islink and statobj and stat.S_ISDIR(statobj.st_mode)):
2067                                                 # A new instance of this package claims the file, so
2068                                                 # don't unmerge it. If the file is symlink to a
2069                                                 # directory and the unmerging package installed it as
2070                                                 # a symlink, but the new owner has it listed as a
2071                                                 # directory, then we'll produce a warning since the
2072                                                 # symlink is a sort of orphan in this case (see
2073                                                 # bug #326685).
2074                                                 symlink_orphan = False
2075                                                 for dblnk in others_in_slot:
2076                                                         parent_contents_key = \
2077                                                                 dblnk._match_contents(relative_path)
2078                                                         if not parent_contents_key:
2079                                                                 continue
2080                                                         if not parent_contents_key.startswith(
2081                                                                 real_root):
2082                                                                 continue
2083                                                         if dblnk.getcontents()[
2084                                                                 parent_contents_key][0] == "dir":
2085                                                                 symlink_orphan = True
2086                                                                 break
2087
2088                                                 if symlink_orphan:
2089                                                         protected_symlinks.setdefault(
2090                                                                 (statobj.st_dev, statobj.st_ino),
2091                                                                 []).append(relative_path)
2092
2093                                         if is_owned:
2094                                                 show_unmerge("---", unmerge_desc["replaced"], file_type, obj)
2095                                                 continue
2096                                         elif relative_path in cfgfiledict:
2097                                                 stale_confmem.append(relative_path)
2098                                 # next line includes a tweak to protect modules from being unmerged,
2099                                 # but we don't protect modules from being overwritten if they are
2100                                 # upgraded. We effectively only want one half of the config protection
2101                                 # functionality for /lib/modules. For portage-ng both capabilities
2102                                 # should be able to be independently specified.
2103                                 # TODO: For rebuilds, re-parent previous modules to the new
2104                                 # installed instance (so they are not orphans). For normal
2105                                 # uninstall (not rebuild/reinstall), remove the modules along
2106                                 # with all other files (leave no orphans).
2107                                 if obj.startswith(modprotect):
2108                                         show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
2109                                         continue
2110
2111                                 # Don't unlink symlinks to directories here since that can
2112                                 # remove /lib and /usr/lib symlinks.
2113                                 if unmerge_orphans and \
2114                                         lstatobj and not stat.S_ISDIR(lstatobj.st_mode) and \
2115                                         not (islink and statobj and stat.S_ISDIR(statobj.st_mode)) and \
2116                                         not self.isprotected(obj):
2117                                         try:
2118                                                 unlink(obj, lstatobj)
2119                                         except EnvironmentError as e:
2120                                                 if e.errno not in ignored_unlink_errnos:
2121                                                         raise
2122                                                 del e
2123                                         show_unmerge("<<<", "", file_type, obj)
2124                                         continue
2125
2126                                 lmtime = str(lstatobj[stat.ST_MTIME])
2127                                 if (pkgfiles[objkey][0] not in ("dir", "fif", "dev")) and (lmtime != pkgfiles[objkey][1]):
2128                                         show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
2129                                         continue
2130
2131                                 if pkgfiles[objkey][0] == "dir":
2132                                         if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
2133                                                 show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
2134                                                 continue
2135                                         mydirs.add((obj, (lstatobj.st_dev, lstatobj.st_ino)))
2136                                 elif pkgfiles[objkey][0] == "sym":
2137                                         if not islink:
2138                                                 show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
2139                                                 continue
2140
2141                                         # If this symlink points to a directory then we don't want
2142                                         # to unmerge it if there are any other packages that
2143                                         # installed files into the directory via this symlink
2144                                         # (see bug #326685).
2145                                         # TODO: Resolving a symlink to a directory will require
2146                                         # simulation if $ROOT != / and the link is not relative.
2147                                         if islink and statobj and stat.S_ISDIR(statobj.st_mode) \
2148                                                 and obj.startswith(real_root):
2149
2150                                                 relative_path = obj[real_root_len:]
2151                                                 try:
2152                                                         target_dir_contents = os.listdir(obj)
2153                                                 except OSError:
2154                                                         pass
2155                                                 else:
2156                                                         if target_dir_contents:
2157                                                                 # If all the children are regular files owned
2158                                                                 # by this package, then the symlink should be
2159                                                                 # safe to unmerge.
2160                                                                 all_owned = True
2161                                                                 for child in target_dir_contents:
2162                                                                         child = os.path.join(relative_path, child)
2163                                                                         if not self.isowner(child):
2164                                                                                 all_owned = False
2165                                                                                 break
2166                                                                         try:
2167                                                                                 child_lstat = os.lstat(os.path.join(
2168                                                                                         real_root, child.lstrip(os.sep)))
2169                                                                         except OSError:
2170                                                                                 continue
2171
2172                                                                         if not stat.S_ISREG(child_lstat.st_mode):
2173                                                                                 # Nested symlinks or directories make
2174                                                                                 # the issue very complex, so just
2175                                                                                 # preserve the symlink in order to be
2176                                                                                 # on the safe side.
2177                                                                                 all_owned = False
2178                                                                                 break
2179
2180                                                                 if not all_owned:
2181                                                                         protected_symlinks.setdefault(
2182                                                                                 (statobj.st_dev, statobj.st_ino),
2183                                                                                 []).append(relative_path)
2184                                                                         show_unmerge("---", unmerge_desc["!empty"],
2185                                                                                 file_type, obj)
2186                                                                         continue
2187
2188                                         # Go ahead and unlink symlinks to directories here when
2189                                         # they're actually recorded as symlinks in the contents.
2190                                         # Normally, symlinks such as /lib -> lib64 are not recorded
2191                                         # as symlinks in the contents of a package.  If a package
2192                                         # installs something into ${D}/lib/, it is recorded in the
2193                                         # contents as a directory even if it happens to correspond
2194                                         # to a symlink when it's merged to the live filesystem.
2195                                         try:
2196                                                 unlink(obj, lstatobj)
2197                                                 show_unmerge("<<<", "", file_type, obj)
2198                                         except (OSError, IOError) as e:
2199                                                 if e.errno not in ignored_unlink_errnos:
2200                                                         raise
2201                                                 del e
2202                                                 show_unmerge("!!!", "", file_type, obj)
2203                                 elif pkgfiles[objkey][0] == "obj":
2204                                         if statobj is None or not stat.S_ISREG(statobj.st_mode):
2205                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2206                                                 continue
2207                                         mymd5 = None
2208                                         try:
2209                                                 mymd5 = perf_md5(obj, calc_prelink=calc_prelink)
2210                                         except FileNotFound as e:
2211                                                 # the file has disappeared between now and our stat call
2212                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2213                                                 continue
2214
2215                                         # string.lower is needed because db entries used to be in upper-case.  The
2216                                         # string.lower allows for backwards compatibility.
2217                                         if mymd5 != pkgfiles[objkey][2].lower():
2218                                                 show_unmerge("---", unmerge_desc["!md5"], file_type, obj)
2219                                                 continue
2220                                         try:
2221                                                 unlink(obj, lstatobj)
2222                                         except (OSError, IOError) as e:
2223                                                 if e.errno not in ignored_unlink_errnos:
2224                                                         raise
2225                                                 del e
2226                                         show_unmerge("<<<", "", file_type, obj)
2227                                 elif pkgfiles[objkey][0] == "fif":
2228                                         if not stat.S_ISFIFO(lstatobj[stat.ST_MODE]):
2229                                                 show_unmerge("---", unmerge_desc["!fif"], file_type, obj)
2230                                                 continue
2231                                         show_unmerge("---", "", file_type, obj)
2232                                 elif pkgfiles[objkey][0] == "dev":
2233                                         show_unmerge("---", "", file_type, obj)
2234
2235                         mydirs = sorted(mydirs)
2236                         mydirs.reverse()
2237
2238                         for obj, inode_key in mydirs:
2239                                 # Treat any directory named "info" as a candidate here,
2240                                 # since it might have been in INFOPATH previously even
2241                                 # though it may not be there now.
2242                                 if inode_key in infodirs_inodes or \
2243                                         os.path.basename(obj) == "info":
2244                                         try:
2245                                                 remaining = os.listdir(obj)
2246                                         except OSError:
2247                                                 pass
2248                                         else:
2249                                                 cleanup_info_dir = ()
2250                                                 if remaining and \
2251                                                         len(remaining) <= len(infodir_cleanup):
2252                                                         if not set(remaining).difference(infodir_cleanup):
2253                                                                 cleanup_info_dir = remaining
2254
2255                                                 for child in cleanup_info_dir:
2256                                                         child = os.path.join(obj, child)
2257                                                         try:
2258                                                                 lstatobj = os.lstat(child)
2259                                                                 if stat.S_ISREG(lstatobj.st_mode):
2260                                                                         unlink(child, lstatobj)
2261                                                                         show_unmerge("<<<", "", "obj", child)
2262                                                         except EnvironmentError as e:
2263                                                                 if e.errno not in ignored_unlink_errnos:
2264                                                                         raise
2265                                                                 del e
2266                                                                 show_unmerge("!!!", "", "obj", child)
2267                                 try:
2268                                         if bsd_chflags:
2269                                                 lstatobj = os.lstat(obj)
2270                                                 if lstatobj.st_flags != 0:
2271                                                         bsd_chflags.lchflags(obj, 0)
2272                                                 parent_name = os.path.dirname(obj)
2273                                                 # Use normal stat/chflags for the parent since we want to
2274                                                 # follow any symlinks to the real parent directory.
2275                                                 pflags = os.stat(parent_name).st_flags
2276                                                 if pflags != 0:
2277                                                         bsd_chflags.chflags(parent_name, 0)
2278                                         try:
2279                                                 os.rmdir(obj)
2280                                         finally:
2281                                                 if bsd_chflags and pflags != 0:
2282                                                         # Restore the parent flags we saved before unlinking
2283                                                         bsd_chflags.chflags(parent_name, pflags)
2284                                         show_unmerge("<<<", "", "dir", obj)
2285                                 except EnvironmentError as e:
2286                                         if e.errno not in ignored_rmdir_errnos:
2287                                                 raise
2288                                         if e.errno != errno.ENOENT:
2289                                                 show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
2290                                         del e
2291                                 else:
2292                                         # When a directory is successfully removed, there's
2293                                         # no need to protect symlinks that point to it.
2294                                         unmerge_syms = protected_symlinks.pop(inode_key, None)
2295                                         if unmerge_syms is not None:
2296                                                 for relative_path in unmerge_syms:
2297                                                         obj = os.path.join(real_root,
2298                                                                 relative_path.lstrip(os.sep))
2299                                                         try:
2300                                                                 unlink(obj, os.lstat(obj))
2301                                                                 show_unmerge("<<<", "", "sym", obj)
2302                                                         except (OSError, IOError) as e:
2303                                                                 if e.errno not in ignored_unlink_errnos:
2304                                                                         raise
2305                                                                 del e
2306                                                                 show_unmerge("!!!", "", "sym", obj)
2307
2308                 if protected_symlinks:
2309                         msg = "One or more symlinks to directories have been " + \
2310                                 "preserved in order to ensure that files installed " + \
2311                                 "via these symlinks remain accessible:"
2312                         lines = textwrap.wrap(msg, 72)
2313                         lines.append("")
2314                         flat_list = set()
2315                         flat_list.update(*protected_symlinks.values())
2316                         flat_list = sorted(flat_list)
2317                         for f in flat_list:
2318                                 lines.append("\t%s" % (os.path.join(real_root,
2319                                         f.lstrip(os.sep))))
2320                         lines.append("")
2321                         self._elog("eerror", "postrm", lines)
2322
2323                 # Remove stale entries from config memory.
2324                 if stale_confmem:
2325                         for filename in stale_confmem:
2326                                 del cfgfiledict[filename]
2327                         writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
2328
2329                 #remove self from vartree database so that our own virtual gets zapped if we're the last node
2330                 self.vartree.zap(self.mycpv)
2331
2332         def isowner(self, filename, destroot=None):
2333                 """ 
2334                 Check if a file belongs to this package. This may
2335                 result in a stat call for the parent directory of
2336                 every installed file, since the inode numbers are
2337                 used to work around the problem of ambiguous paths
2338                 caused by symlinked directories. The results of
2339                 stat calls are cached to optimize multiple calls
2340                 to this method.
2341
2342                 @param filename:
2343                 @type filename:
2344                 @param destroot:
2345                 @type destroot:
2346                 @rtype: Boolean
2347                 @returns:
2348                 1. True if this package owns the file.
2349                 2. False if this package does not own the file.
2350                 """
2351
2352                 if destroot is not None and destroot != self._eroot:
2353                         warnings.warn("The second parameter of the " + \
2354                                 "portage.dbapi.vartree.dblink.isowner()" + \
2355                                 " is now unused. Instead " + \
2356                                 "self.settings['EROOT'] will be used.",
2357                                 DeprecationWarning, stacklevel=2)
2358
2359                 return bool(self._match_contents(filename))
2360
2361         def _match_contents(self, filename, destroot=None):
2362                 """
2363                 The matching contents entry is returned, which is useful
2364                 since the path may differ from the one given by the caller,
2365                 due to symlinks.
2366
2367                 @rtype: String
2368                 @return: the contents entry corresponding to the given path, or False
2369                         if the file is not owned by this package.
2370                 """
2371
2372                 filename = _unicode_decode(filename,
2373                         encoding=_encodings['content'], errors='strict')
2374
2375                 if destroot is not None and destroot != self._eroot:
2376                         warnings.warn("The second parameter of the " + \
2377                                 "portage.dbapi.vartree.dblink._match_contents()" + \
2378                                 " is now unused. Instead " + \
2379                                 "self.settings['ROOT'] will be used.",
2380                                 DeprecationWarning, stacklevel=2)
2381
2382                 # don't use EROOT here, image already contains EPREFIX
2383                 destroot = self.settings['ROOT']
2384
2385                 # The given filename argument might have a different encoding than the
2386                 # the filenames contained in the contents, so use separate wrapped os
2387                 # modules for each. The basename is more likely to contain non-ascii
2388                 # characters than the directory path, so use os_filename_arg for all
2389                 # operations involving the basename of the filename arg.
2390                 os_filename_arg = _os_merge
2391                 os = _os_merge
2392
2393                 try:
2394                         _unicode_encode(filename,
2395                                 encoding=_encodings['merge'], errors='strict')
2396                 except UnicodeEncodeError:
2397                         # The package appears to have been merged with a
2398                         # different value of sys.getfilesystemencoding(),
2399                         # so fall back to utf_8 if appropriate.
2400                         try:
2401                                 _unicode_encode(filename,
2402                                         encoding=_encodings['fs'], errors='strict')
2403                         except UnicodeEncodeError:
2404                                 pass
2405                         else:
2406                                 os_filename_arg = portage.os
2407
2408                 destfile = normalize_path(
2409                         os_filename_arg.path.join(destroot,
2410                         filename.lstrip(os_filename_arg.path.sep)))
2411
2412                 pkgfiles = self.getcontents()
2413                 if pkgfiles and destfile in pkgfiles:
2414                         return destfile
2415                 if pkgfiles:
2416                         basename = os_filename_arg.path.basename(destfile)
2417                         if self._contents_basenames is None:
2418
2419                                 try:
2420                                         for x in pkgfiles:
2421                                                 _unicode_encode(x,
2422                                                         encoding=_encodings['merge'],
2423                                                         errors='strict')
2424                                 except UnicodeEncodeError:
2425                                         # The package appears to have been merged with a
2426                                         # different value of sys.getfilesystemencoding(),
2427                                         # so fall back to utf_8 if appropriate.
2428                                         try:
2429                                                 for x in pkgfiles:
2430                                                         _unicode_encode(x,
2431                                                                 encoding=_encodings['fs'],
2432                                                                 errors='strict')
2433                                         except UnicodeEncodeError:
2434                                                 pass
2435                                         else:
2436                                                 os = portage.os
2437
2438                                 self._contents_basenames = set(
2439                                         os.path.basename(x) for x in pkgfiles)
2440                         if basename not in self._contents_basenames:
2441                                 # This is a shortcut that, in most cases, allows us to
2442                                 # eliminate this package as an owner without the need
2443                                 # to examine inode numbers of parent directories.
2444                                 return False
2445
2446                         # Use stat rather than lstat since we want to follow
2447                         # any symlinks to the real parent directory.
2448                         parent_path = os_filename_arg.path.dirname(destfile)
2449                         try:
2450                                 parent_stat = os_filename_arg.stat(parent_path)
2451                         except EnvironmentError as e:
2452                                 if e.errno != errno.ENOENT:
2453                                         raise
2454                                 del e
2455                                 return False
2456                         if self._contents_inodes is None:
2457
2458                                 if os is _os_merge:
2459                                         try:
2460                                                 for x in pkgfiles:
2461                                                         _unicode_encode(x,
2462                                                                 encoding=_encodings['merge'],
2463                                                                 errors='strict')
2464                                         except UnicodeEncodeError:
2465                                                 # The package appears to have been merged with a 
2466                                                 # different value of sys.getfilesystemencoding(),
2467                                                 # so fall back to utf_8 if appropriate.
2468                                                 try:
2469                                                         for x in pkgfiles:
2470                                                                 _unicode_encode(x,
2471                                                                         encoding=_encodings['fs'],
2472                                                                         errors='strict')
2473                                                 except UnicodeEncodeError:
2474                                                         pass
2475                                                 else:
2476                                                         os = portage.os
2477
2478                                 self._contents_inodes = {}
2479                                 parent_paths = set()
2480                                 for x in pkgfiles:
2481                                         p_path = os.path.dirname(x)
2482                                         if p_path in parent_paths:
2483                                                 continue
2484                                         parent_paths.add(p_path)
2485                                         try:
2486                                                 s = os.stat(p_path)
2487                                         except OSError:
2488                                                 pass
2489                                         else:
2490                                                 inode_key = (s.st_dev, s.st_ino)
2491                                                 # Use lists of paths in case multiple
2492                                                 # paths reference the same inode.
2493                                                 p_path_list = self._contents_inodes.get(inode_key)
2494                                                 if p_path_list is None:
2495                                                         p_path_list = []
2496                                                         self._contents_inodes[inode_key] = p_path_list
2497                                                 if p_path not in p_path_list:
2498                                                         p_path_list.append(p_path)
2499
2500                         p_path_list = self._contents_inodes.get(
2501                                 (parent_stat.st_dev, parent_stat.st_ino))
2502                         if p_path_list:
2503                                 for p_path in p_path_list:
2504                                         x = os_filename_arg.path.join(p_path, basename)
2505                                         if x in pkgfiles:
2506                                                 return x
2507
2508                 return False
2509
2510         def _linkmap_rebuild(self, **kwargs):
2511                 """
2512                 Rebuild the self._linkmap if it's not broken due to missing
2513                 scanelf binary. Also, return early if preserve-libs is disabled
2514                 and the preserve-libs registry is empty.
2515                 """
2516                 if self._linkmap_broken or \
2517                         self.vartree.dbapi._linkmap is None or \
2518                         self.vartree.dbapi._plib_registry is None or \
2519                         ("preserve-libs" not in self.settings.features and \
2520                         not self.vartree.dbapi._plib_registry.hasEntries()):
2521                         return
2522                 try:
2523                         self.vartree.dbapi._linkmap.rebuild(**kwargs)
2524                 except CommandNotFound as e:
2525                         self._linkmap_broken = True
2526                         self._display_merge(_("!!! Disabling preserve-libs " \
2527                                 "due to error: Command Not Found: %s\n") % (e,),
2528                                 level=logging.ERROR, noiselevel=-1)
2529
2530         def _find_libs_to_preserve(self, unmerge=False):
2531                 """
2532                 Get set of relative paths for libraries to be preserved. When
2533                 unmerge is False, file paths to preserve are selected from
2534                 self._installed_instance. Otherwise, paths are selected from
2535                 self.
2536                 """
2537                 if self._linkmap_broken or \
2538                         self.vartree.dbapi._linkmap is None or \
2539                         self.vartree.dbapi._plib_registry is None or \
2540                         (not unmerge and self._installed_instance is None) or \
2541                         "preserve-libs" not in self.settings.features:
2542                         return set()
2543
2544                 os = _os_merge
2545                 linkmap = self.vartree.dbapi._linkmap
2546                 if unmerge:
2547                         installed_instance = self
2548                 else:
2549                         installed_instance = self._installed_instance
2550                 old_contents = installed_instance.getcontents()
2551                 root = self.settings['ROOT']
2552                 root_len = len(root) - 1
2553                 lib_graph = digraph()
2554                 path_node_map = {}
2555
2556                 def path_to_node(path):
2557                         node = path_node_map.get(path)
2558                         if node is None:
2559                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2560                                 alt_path_node = lib_graph.get(node)
2561                                 if alt_path_node is not None:
2562                                         node = alt_path_node
2563                                 node.alt_paths.add(path)
2564                                 path_node_map[path] = node
2565                         return node
2566
2567                 consumer_map = {}
2568                 provider_nodes = set()
2569                 # Create provider nodes and add them to the graph.
2570                 for f_abs in old_contents:
2571
2572                         if os is _os_merge:
2573                                 try:
2574                                         _unicode_encode(f_abs,
2575                                                 encoding=_encodings['merge'], errors='strict')
2576                                 except UnicodeEncodeError:
2577                                         # The package appears to have been merged with a 
2578                                         # different value of sys.getfilesystemencoding(),
2579                                         # so fall back to utf_8 if appropriate.
2580                                         try:
2581                                                 _unicode_encode(f_abs,
2582                                                         encoding=_encodings['fs'], errors='strict')
2583                                         except UnicodeEncodeError:
2584                                                 pass
2585                                         else:
2586                                                 os = portage.os
2587
2588                         f = f_abs[root_len:]
2589                         if not unmerge and self.isowner(f):
2590                                 # We have an indentically named replacement file,
2591                                 # so we don't try to preserve the old copy.
2592                                 continue
2593                         try:
2594                                 consumers = linkmap.findConsumers(f,
2595                                         exclude_providers=(installed_instance.isowner,))
2596                         except KeyError:
2597                                 continue
2598                         if not consumers:
2599                                 continue
2600                         provider_node = path_to_node(f)
2601                         lib_graph.add(provider_node, None)
2602                         provider_nodes.add(provider_node)
2603                         consumer_map[provider_node] = consumers
2604
2605                 # Create consumer nodes and add them to the graph.
2606                 # Note that consumers can also be providers.
2607                 for provider_node, consumers in consumer_map.items():
2608                         for c in consumers:
2609                                 consumer_node = path_to_node(c)
2610                                 if installed_instance.isowner(c) and \
2611                                         consumer_node not in provider_nodes:
2612                                         # This is not a provider, so it will be uninstalled.
2613                                         continue
2614                                 lib_graph.add(provider_node, consumer_node)
2615
2616                 # Locate nodes which should be preserved. They consist of all
2617                 # providers that are reachable from consumers that are not
2618                 # providers themselves.
2619                 preserve_nodes = set()
2620                 for consumer_node in lib_graph.root_nodes():
2621                         if consumer_node in provider_nodes:
2622                                 continue
2623                         # Preserve all providers that are reachable from this consumer.
2624                         node_stack = lib_graph.child_nodes(consumer_node)
2625                         while node_stack:
2626                                 provider_node = node_stack.pop()
2627                                 if provider_node in preserve_nodes:
2628                                         continue
2629                                 preserve_nodes.add(provider_node)
2630                                 node_stack.extend(lib_graph.child_nodes(provider_node))
2631
2632                 preserve_paths = set()
2633                 for preserve_node in preserve_nodes:
2634                         # Preserve the library itself, and also preserve the
2635                         # soname symlink which is the only symlink that is
2636                         # strictly required.
2637                         hardlinks = set()
2638                         soname_symlinks = set()
2639                         soname = linkmap.getSoname(next(iter(preserve_node.alt_paths)))
2640                         for f in preserve_node.alt_paths:
2641                                 f_abs = os.path.join(root, f.lstrip(os.sep))
2642                                 try:
2643                                         if stat.S_ISREG(os.lstat(f_abs).st_mode):
2644                                                 hardlinks.add(f)
2645                                         elif os.path.basename(f) == soname:
2646                                                 soname_symlinks.add(f)
2647                                 except OSError:
2648                                         pass
2649
2650                         if hardlinks:
2651                                 preserve_paths.update(hardlinks)
2652                                 preserve_paths.update(soname_symlinks)
2653
2654                 return preserve_paths
2655
2656         def _add_preserve_libs_to_contents(self, preserve_paths):
2657                 """
2658                 Preserve libs returned from _find_libs_to_preserve().
2659                 """
2660
2661                 if not preserve_paths:
2662                         return
2663
2664                 os = _os_merge
2665                 showMessage = self._display_merge
2666                 root = self.settings['ROOT']
2667
2668                 # Copy contents entries from the old package to the new one.
2669                 new_contents = self.getcontents().copy()
2670                 old_contents = self._installed_instance.getcontents()
2671                 for f in sorted(preserve_paths):
2672                         f = _unicode_decode(f,
2673                                 encoding=_encodings['content'], errors='strict')
2674                         f_abs = os.path.join(root, f.lstrip(os.sep))
2675                         contents_entry = old_contents.get(f_abs)
2676                         if contents_entry is None:
2677                                 # This will probably never happen, but it might if one of the
2678                                 # paths returned from findConsumers() refers to one of the libs
2679                                 # that should be preserved yet the path is not listed in the
2680                                 # contents. Such a path might belong to some other package, so
2681                                 # it shouldn't be preserved here.
2682                                 showMessage(_("!!! File '%s' will not be preserved "
2683                                         "due to missing contents entry\n") % (f_abs,),
2684                                         level=logging.ERROR, noiselevel=-1)
2685                                 preserve_paths.remove(f)
2686                                 continue
2687                         new_contents[f_abs] = contents_entry
2688                         obj_type = contents_entry[0]
2689                         showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
2690                                 noiselevel=-1)
2691                         # Add parent directories to contents if necessary.
2692                         parent_dir = os.path.dirname(f_abs)
2693                         while len(parent_dir) > len(root):
2694                                 new_contents[parent_dir] = ["dir"]
2695                                 prev = parent_dir
2696                                 parent_dir = os.path.dirname(parent_dir)
2697                                 if prev == parent_dir:
2698                                         break
2699                 outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
2700                 write_contents(new_contents, root, outfile)
2701                 outfile.close()
2702                 self._clear_contents_cache()
2703
2704         def _find_unused_preserved_libs(self, unmerge_no_replacement):
2705                 """
2706                 Find preserved libraries that don't have any consumers left.
2707                 """
2708
2709                 if self._linkmap_broken or \
2710                         self.vartree.dbapi._linkmap is None or \
2711                         self.vartree.dbapi._plib_registry is None or \
2712                         not self.vartree.dbapi._plib_registry.hasEntries():
2713                         return {}
2714
2715                 # Since preserved libraries can be consumers of other preserved
2716                 # libraries, use a graph to track consumer relationships.
2717                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2718                 linkmap = self.vartree.dbapi._linkmap
2719                 lib_graph = digraph()
2720                 preserved_nodes = set()
2721                 preserved_paths = set()
2722                 path_cpv_map = {}
2723                 path_node_map = {}
2724                 root = self.settings['ROOT']
2725
2726                 def path_to_node(path):
2727                         node = path_node_map.get(path)
2728                         if node is None:
2729                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2730                                 alt_path_node = lib_graph.get(node)
2731                                 if alt_path_node is not None:
2732                                         node = alt_path_node
2733                                 node.alt_paths.add(path)
2734                                 path_node_map[path] = node
2735                         return node
2736
2737                 for cpv, plibs in plib_dict.items():
2738                         for f in plibs:
2739                                 path_cpv_map[f] = cpv
2740                                 preserved_node = path_to_node(f)
2741                                 if not preserved_node.file_exists():
2742                                         continue
2743                                 lib_graph.add(preserved_node, None)
2744                                 preserved_paths.add(f)
2745                                 preserved_nodes.add(preserved_node)
2746                                 for c in self.vartree.dbapi._linkmap.findConsumers(f):
2747                                         consumer_node = path_to_node(c)
2748                                         if not consumer_node.file_exists():
2749                                                 continue
2750                                         # Note that consumers may also be providers.
2751                                         lib_graph.add(preserved_node, consumer_node)
2752
2753                 # Eliminate consumers having providers with the same soname as an
2754                 # installed library that is not preserved. This eliminates
2755                 # libraries that are erroneously preserved due to a move from one
2756                 # directory to another.
2757                 # Also eliminate consumers that are going to be unmerged if
2758                 # unmerge_no_replacement is True.
2759                 provider_cache = {}
2760                 for preserved_node in preserved_nodes:
2761                         soname = linkmap.getSoname(preserved_node)
2762                         for consumer_node in lib_graph.parent_nodes(preserved_node):
2763                                 if consumer_node in preserved_nodes:
2764                                         continue
2765                                 if unmerge_no_replacement:
2766                                         will_be_unmerged = True
2767                                         for path in consumer_node.alt_paths:
2768                                                 if not self.isowner(path):
2769                                                         will_be_unmerged = False
2770                                                         break
2771                                         if will_be_unmerged:
2772                                                 # This consumer is not preserved and it is
2773                                                 # being unmerged, so drop this edge.
2774                                                 lib_graph.remove_edge(preserved_node, consumer_node)
2775                                                 continue
2776
2777                                 providers = provider_cache.get(consumer_node)
2778                                 if providers is None:
2779                                         providers = linkmap.findProviders(consumer_node)
2780                                         provider_cache[consumer_node] = providers
2781                                 providers = providers.get(soname)
2782                                 if providers is None:
2783                                         continue
2784                                 for provider in providers:
2785                                         if provider in preserved_paths:
2786                                                 continue
2787                                         provider_node = path_to_node(provider)
2788                                         if not provider_node.file_exists():
2789                                                 continue
2790                                         if provider_node in preserved_nodes:
2791                                                 continue
2792                                         # An alternative provider seems to be
2793                                         # installed, so drop this edge.
2794                                         lib_graph.remove_edge(preserved_node, consumer_node)
2795                                         break
2796
2797                 cpv_lib_map = {}
2798                 while lib_graph:
2799                         root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
2800                         if not root_nodes:
2801                                 break
2802                         lib_graph.difference_update(root_nodes)
2803                         unlink_list = set()
2804                         for node in root_nodes:
2805                                 unlink_list.update(node.alt_paths)
2806                         unlink_list = sorted(unlink_list)
2807                         for obj in unlink_list:
2808                                 cpv = path_cpv_map.get(obj)
2809                                 if cpv is None:
2810                                         # This means that a symlink is in the preserved libs
2811                                         # registry, but the actual lib it points to is not.
2812                                         self._display_merge(_("!!! symlink to lib is preserved, "
2813                                                 "but not the lib itself:\n!!! '%s'\n") % (obj,),
2814                                                 level=logging.ERROR, noiselevel=-1)
2815                                         continue
2816                                 removed = cpv_lib_map.get(cpv)
2817                                 if removed is None:
2818                                         removed = set()
2819                                         cpv_lib_map[cpv] = removed
2820                                 removed.add(obj)
2821
2822                 return cpv_lib_map
2823
2824         def _remove_preserved_libs(self, cpv_lib_map):
2825                 """
2826                 Remove files returned from _find_unused_preserved_libs().
2827                 """
2828
2829                 os = _os_merge
2830
2831                 files_to_remove = set()
2832                 for files in cpv_lib_map.values():
2833                         files_to_remove.update(files)
2834                 files_to_remove = sorted(files_to_remove)
2835                 showMessage = self._display_merge
2836                 root = self.settings['ROOT']
2837
2838                 parent_dirs = set()
2839                 for obj in files_to_remove:
2840                         obj = os.path.join(root, obj.lstrip(os.sep))
2841                         parent_dirs.add(os.path.dirname(obj))
2842                         if os.path.islink(obj):
2843                                 obj_type = _("sym")
2844                         else:
2845                                 obj_type = _("obj")
2846                         try:
2847                                 os.unlink(obj)
2848                         except OSError as e:
2849                                 if e.errno != errno.ENOENT:
2850                                         raise
2851                                 del e
2852                         else:
2853                                 showMessage(_("<<< !needed  %s %s\n") % (obj_type, obj),
2854                                         noiselevel=-1)
2855
2856                 # Remove empty parent directories if possible.
2857                 while parent_dirs:
2858                         x = parent_dirs.pop()
2859                         while True:
2860                                 try:
2861                                         os.rmdir(x)
2862                                 except OSError:
2863                                         break
2864                                 prev = x
2865                                 x = os.path.dirname(x)
2866                                 if x == prev:
2867                                         break
2868
2869                 self.vartree.dbapi._plib_registry.pruneNonExisting()
2870
2871         def _collision_protect(self, srcroot, destroot, mypkglist,
2872                 file_list, symlink_list):
2873
2874                         os = _os_merge
2875
2876                         collision_ignore = set([normalize_path(myignore) for myignore in \
2877                                 portage.util.shlex_split(
2878                                 self.settings.get("COLLISION_IGNORE", ""))])
2879
2880                         # For collisions with preserved libraries, the current package
2881                         # will assume ownership and the libraries will be unregistered.
2882                         if self.vartree.dbapi._plib_registry is None:
2883                                 # preserve-libs is entirely disabled
2884                                 plib_cpv_map = None
2885                                 plib_paths = None
2886                                 plib_inodes = {}
2887                         else:
2888                                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2889                                 plib_cpv_map = {}
2890                                 plib_paths = set()
2891                                 for cpv, paths in plib_dict.items():
2892                                         plib_paths.update(paths)
2893                                         for f in paths:
2894                                                 plib_cpv_map[f] = cpv
2895                                 plib_inodes = self._lstat_inode_map(plib_paths)
2896
2897                         plib_collisions = {}
2898
2899                         showMessage = self._display_merge
2900                         stopmerge = False
2901                         collisions = []
2902                         symlink_collisions = []
2903                         destroot = self.settings['ROOT']
2904                         showMessage(_(" %s checking %d files for package collisions\n") % \
2905                                 (colorize("GOOD", "*"), len(file_list) + len(symlink_list)))
2906                         for i, (f, f_type) in enumerate(chain(
2907                                 ((f, "reg") for f in file_list),
2908                                 ((f, "sym") for f in symlink_list))):
2909                                 if i % 1000 == 0 and i != 0:
2910                                         showMessage(_("%d files checked ...\n") % i)
2911
2912                                 dest_path = normalize_path(
2913                                         os.path.join(destroot, f.lstrip(os.path.sep)))
2914                                 try:
2915                                         dest_lstat = os.lstat(dest_path)
2916                                 except EnvironmentError as e:
2917                                         if e.errno == errno.ENOENT:
2918                                                 del e
2919                                                 continue
2920                                         elif e.errno == errno.ENOTDIR:
2921                                                 del e
2922                                                 # A non-directory is in a location where this package
2923                                                 # expects to have a directory.
2924                                                 dest_lstat = None
2925                                                 parent_path = dest_path
2926                                                 while len(parent_path) > len(destroot):
2927                                                         parent_path = os.path.dirname(parent_path)
2928                                                         try:
2929                                                                 dest_lstat = os.lstat(parent_path)
2930                                                                 break
2931                                                         except EnvironmentError as e:
2932                                                                 if e.errno != errno.ENOTDIR:
2933                                                                         raise
2934                                                                 del e
2935                                                 if not dest_lstat:
2936                                                         raise AssertionError(
2937                                                                 "unable to find non-directory " + \
2938                                                                 "parent for '%s'" % dest_path)
2939                                                 dest_path = parent_path
2940                                                 f = os.path.sep + dest_path[len(destroot):]
2941                                                 if f in collisions:
2942                                                         continue
2943                                         else:
2944                                                 raise
2945                                 if f[0] != "/":
2946                                         f="/"+f
2947
2948                                 if stat.S_ISDIR(dest_lstat.st_mode):
2949                                         if f_type == "sym":
2950                                                 # This case is explicitly banned
2951                                                 # by PMS (see bug #326685).
2952                                                 symlink_collisions.append(f)
2953                                                 collisions.append(f)
2954                                                 continue
2955
2956                                 plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
2957                                 if plibs:
2958                                         for path in plibs:
2959                                                 cpv = plib_cpv_map[path]
2960                                                 paths = plib_collisions.get(cpv)
2961                                                 if paths is None:
2962                                                         paths = set()
2963                                                         plib_collisions[cpv] = paths
2964                                                 paths.add(path)
2965                                         # The current package will assume ownership and the
2966                                         # libraries will be unregistered, so exclude this
2967                                         # path from the normal collisions.
2968                                         continue
2969
2970                                 isowned = False
2971                                 full_path = os.path.join(destroot, f.lstrip(os.path.sep))
2972                                 for ver in mypkglist:
2973                                         if ver.isowner(f):
2974                                                 isowned = True
2975                                                 break
2976                                 if not isowned and self.isprotected(full_path):
2977                                         isowned = True
2978                                 if not isowned:
2979                                         stopmerge = True
2980                                         if collision_ignore:
2981                                                 if f in collision_ignore:
2982                                                         stopmerge = False
2983                                                 else:
2984                                                         for myignore in collision_ignore:
2985                                                                 if f.startswith(myignore + os.path.sep):
2986                                                                         stopmerge = False
2987                                                                         break
2988                                         if stopmerge:
2989                                                 collisions.append(f)
2990                         return collisions, symlink_collisions, plib_collisions
2991
2992         def _lstat_inode_map(self, path_iter):
2993                 """
2994                 Use lstat to create a map of the form:
2995                   {(st_dev, st_ino) : set([path1, path2, ...])}
2996                 Multiple paths may reference the same inode due to hardlinks.
2997                 All lstat() calls are relative to self.myroot.
2998                 """
2999
3000                 os = _os_merge
3001
3002                 root = self.settings['ROOT']
3003                 inode_map = {}
3004                 for f in path_iter:
3005                         path = os.path.join(root, f.lstrip(os.sep))
3006                         try:
3007                                 st = os.lstat(path)
3008                         except OSError as e:
3009                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3010                                         raise
3011                                 del e
3012                                 continue
3013                         key = (st.st_dev, st.st_ino)
3014                         paths = inode_map.get(key)
3015                         if paths is None:
3016                                 paths = set()
3017                                 inode_map[key] = paths
3018                         paths.add(f)
3019                 return inode_map
3020
3021         def _security_check(self, installed_instances):
3022                 if not installed_instances:
3023                         return 0
3024
3025                 os = _os_merge
3026
3027                 showMessage = self._display_merge
3028
3029                 file_paths = set()
3030                 for dblnk in installed_instances:
3031                         file_paths.update(dblnk.getcontents())
3032                 inode_map = {}
3033                 real_paths = set()
3034                 for i, path in enumerate(file_paths):
3035
3036                         if os is _os_merge:
3037                                 try:
3038                                         _unicode_encode(path,
3039                                                 encoding=_encodings['merge'], errors='strict')
3040                                 except UnicodeEncodeError:
3041                                         # The package appears to have been merged with a 
3042                                         # different value of sys.getfilesystemencoding(),
3043                                         # so fall back to utf_8 if appropriate.
3044                                         try:
3045                                                 _unicode_encode(path,
3046                                                         encoding=_encodings['fs'], errors='strict')
3047                                         except UnicodeEncodeError:
3048                                                 pass
3049                                         else:
3050                                                 os = portage.os
3051
3052                         try:
3053                                 s = os.lstat(path)
3054                         except OSError as e:
3055                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3056                                         raise
3057                                 del e
3058                                 continue
3059                         if not stat.S_ISREG(s.st_mode):
3060                                 continue
3061                         path = os.path.realpath(path)
3062                         if path in real_paths:
3063                                 continue
3064                         real_paths.add(path)
3065                         if s.st_nlink > 1 and \
3066                                 s.st_mode & (stat.S_ISUID | stat.S_ISGID):
3067                                 k = (s.st_dev, s.st_ino)
3068                                 inode_map.setdefault(k, []).append((path, s))
3069                 suspicious_hardlinks = []
3070                 for path_list in inode_map.values():
3071                         path, s = path_list[0]
3072                         if len(path_list) == s.st_nlink:
3073                                 # All hardlinks seem to be owned by this package.
3074                                 continue
3075                         suspicious_hardlinks.append(path_list)
3076                 if not suspicious_hardlinks:
3077                         return 0
3078
3079                 msg = []
3080                 msg.append(_("suid/sgid file(s) "
3081                         "with suspicious hardlink(s):"))
3082                 msg.append("")
3083                 for path_list in suspicious_hardlinks:
3084                         for path, s in path_list:
3085                                 msg.append("\t%s" % path)
3086                 msg.append("")
3087                 msg.append(_("See the Gentoo Security Handbook " 
3088                         "guide for advice on how to proceed."))
3089
3090                 self._eerror("preinst", msg)
3091
3092                 return 1
3093
3094         def _eqawarn(self, phase, lines):
3095                 self._elog("eqawarn", phase, lines)
3096
3097         def _eerror(self, phase, lines):
3098                 self._elog("eerror", phase, lines)
3099
3100         def _elog(self, funcname, phase, lines):
3101                 func = getattr(portage.elog.messages, funcname)
3102                 if self._scheduler is None:
3103                         for l in lines:
3104                                 func(l, phase=phase, key=self.mycpv)
3105                 else:
3106                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
3107                         log_path = None
3108                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
3109                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
3110                         out = io.StringIO()
3111                         for line in lines:
3112                                 func(line, phase=phase, key=self.mycpv, out=out)
3113                         msg = out.getvalue()
3114                         self._scheduler.output(msg,
3115                                 background=background, log_path=log_path)
3116
3117         def _elog_process(self, phasefilter=None):
3118                 cpv = self.mycpv
3119                 if self._pipe is None:
3120                         elog_process(cpv, self.settings, phasefilter=phasefilter)
3121                 else:
3122                         logdir = os.path.join(self.settings["T"], "logging")
3123                         ebuild_logentries = collect_ebuild_messages(logdir)
3124                         py_logentries = collect_messages(key=cpv).get(cpv, {})
3125                         logentries = _merge_logentries(py_logentries, ebuild_logentries)
3126                         funcnames = {
3127                                 "INFO": "einfo",
3128                                 "LOG": "elog",
3129                                 "WARN": "ewarn",
3130                                 "QA": "eqawarn",
3131                                 "ERROR": "eerror"
3132                         }
3133                         str_buffer = []
3134                         for phase, messages in logentries.items():
3135                                 for key, lines in messages:
3136                                         funcname = funcnames[key]
3137                                         if isinstance(lines, basestring):
3138                                                 lines = [lines]
3139                                         for line in lines:
3140                                                 for line in line.split('\n'):
3141                                                         fields = (funcname, phase, cpv, line)
3142                                                         str_buffer.append(' '.join(fields))
3143                                                         str_buffer.append('\n')
3144                         if str_buffer:
3145                                 os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
3146
3147         def _emerge_log(self, msg):
3148                 emergelog(False, msg)
3149
3150         def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0,
3151                 mydbapi=None, prev_mtimes=None, counter=None):
3152                 """
3153                 
3154                 This function does the following:
3155                 
3156                 calls self._preserve_libs if FEATURES=preserve-libs
3157                 calls self._collision_protect if FEATURES=collision-protect
3158                 calls doebuild(mydo=pkg_preinst)
3159                 Merges the package to the livefs
3160                 unmerges old version (if required)
3161                 calls doebuild(mydo=pkg_postinst)
3162                 calls env_update
3163                 
3164                 @param srcroot: Typically this is ${D}
3165                 @type srcroot: String (Path)
3166                 @param destroot: ignored, self.settings['ROOT'] is used instead
3167                 @type destroot: String (Path)
3168                 @param inforoot: root of the vardb entry ?
3169                 @type inforoot: String (Path)
3170                 @param myebuild: path to the ebuild that we are processing
3171                 @type myebuild: String (Path)
3172                 @param mydbapi: dbapi which is handed to doebuild.
3173                 @type mydbapi: portdbapi instance
3174                 @param prev_mtimes: { Filename:mtime } mapping for env_update
3175                 @type prev_mtimes: Dictionary
3176                 @rtype: Boolean
3177                 @returns:
3178                 1. 0 on success
3179                 2. 1 on failure
3180                 
3181                 secondhand is a list of symlinks that have been skipped due to their target
3182                 not existing; we will merge these symlinks at a later time.
3183                 """
3184
3185                 os = _os_merge
3186
3187                 srcroot = _unicode_decode(srcroot,
3188                         encoding=_encodings['content'], errors='strict')
3189                 destroot = self.settings['ROOT']
3190                 inforoot = _unicode_decode(inforoot,
3191                         encoding=_encodings['content'], errors='strict')
3192                 myebuild = _unicode_decode(myebuild,
3193                         encoding=_encodings['content'], errors='strict')
3194
3195                 showMessage = self._display_merge
3196                 srcroot = normalize_path(srcroot).rstrip(os.path.sep) + os.path.sep
3197
3198                 if not os.path.isdir(srcroot):
3199                         showMessage(_("!!! Directory Not Found: D='%s'\n") % srcroot,
3200                                 level=logging.ERROR, noiselevel=-1)
3201                         return 1
3202
3203                 slot = ''
3204                 for var_name in ('CHOST', 'SLOT'):
3205                         if var_name == 'CHOST' and self.cat == 'virtual':
3206                                 try:
3207                                         os.unlink(os.path.join(inforoot, var_name))
3208                                 except OSError:
3209                                         pass
3210                                 continue
3211
3212                         f = None
3213                         try:
3214                                 f = io.open(_unicode_encode(
3215                                         os.path.join(inforoot, var_name),
3216                                         encoding=_encodings['fs'], errors='strict'),
3217                                         mode='r', encoding=_encodings['repo.content'],
3218                                         errors='replace')
3219                                 val = f.readline().strip()
3220                         except EnvironmentError as e:
3221                                 if e.errno != errno.ENOENT:
3222                                         raise
3223                                 del e
3224                                 val = ''
3225                         finally:
3226                                 if f is not None:
3227                                         f.close()
3228
3229                         if var_name == 'SLOT':
3230                                 slot = val
3231
3232                                 if not slot.strip():
3233                                         slot = self.settings.get(var_name, '')
3234                                         if not slot.strip():
3235                                                 showMessage(_("!!! SLOT is undefined\n"),
3236                                                         level=logging.ERROR, noiselevel=-1)
3237                                                 return 1
3238                                         write_atomic(os.path.join(inforoot, var_name), slot + '\n')
3239
3240                         if val != self.settings.get(var_name, ''):
3241                                 self._eqawarn('preinst',
3242                                         [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
3243                                         {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
3244
3245                 def eerror(lines):
3246                         self._eerror("preinst", lines)
3247
3248                 if not os.path.exists(self.dbcatdir):
3249                         ensure_dirs(self.dbcatdir)
3250
3251                 cp = self.mysplit[0]
3252                 slot_atom = "%s:%s" % (cp, slot)
3253
3254                 # filter any old-style virtual matches
3255                 slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
3256                         if cpv_getkey(cpv) == cp]
3257
3258                 if self.mycpv not in slot_matches and \
3259                         self.vartree.dbapi.cpv_exists(self.mycpv):
3260                         # handle multislot or unapplied slotmove
3261                         slot_matches.append(self.mycpv)
3262
3263                 others_in_slot = []
3264                 from portage import config
3265                 for cur_cpv in slot_matches:
3266                         # Clone the config in case one of these has to be unmerged since
3267                         # we need it to have private ${T} etc... for things like elog.
3268                         settings_clone = config(clone=self.settings)
3269                         settings_clone.pop("PORTAGE_BUILDIR_LOCKED", None)
3270                         settings_clone.reset()
3271                         others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
3272                                 settings=settings_clone,
3273                                 vartree=self.vartree, treetype="vartree",
3274                                 scheduler=self._scheduler, pipe=self._pipe))
3275
3276                 retval = self._security_check(others_in_slot)
3277                 if retval:
3278                         return retval
3279
3280                 if slot_matches:
3281                         # Used by self.isprotected().
3282                         max_dblnk = None
3283                         max_counter = -1
3284                         for dblnk in others_in_slot:
3285                                 cur_counter = self.vartree.dbapi.cpv_counter(dblnk.mycpv)
3286                                 if cur_counter > max_counter:
3287                                         max_counter = cur_counter
3288                                         max_dblnk = dblnk
3289                         self._installed_instance = max_dblnk
3290
3291                 # We check for unicode encoding issues after src_install. However,
3292                 # the check must be repeated here for binary packages (it's
3293                 # inexpensive since we call os.walk() here anyway).
3294                 unicode_errors = []
3295                 line_ending_re = re.compile('[\n\r]')
3296
3297                 while True:
3298
3299                         unicode_error = False
3300
3301                         myfilelist = []
3302                         mylinklist = []
3303                         paths_with_newlines = []
3304                         srcroot_len = len(srcroot)
3305                         def onerror(e):
3306                                 raise
3307                         for parent, dirs, files in os.walk(srcroot, onerror=onerror):
3308                                 try:
3309                                         parent = _unicode_decode(parent,
3310                                                 encoding=_encodings['merge'], errors='strict')
3311                                 except UnicodeDecodeError:
3312                                         new_parent = _unicode_decode(parent,
3313                                                 encoding=_encodings['merge'], errors='replace')
3314                                         new_parent = _unicode_encode(new_parent,
3315                                                 encoding='ascii', errors='backslashreplace')
3316                                         new_parent = _unicode_decode(new_parent,
3317                                                 encoding=_encodings['merge'], errors='replace')
3318                                         os.rename(parent, new_parent)
3319                                         unicode_error = True
3320                                         unicode_errors.append(new_parent[srcroot_len:])
3321                                         break
3322
3323                                 for fname in files:
3324                                         try:
3325                                                 fname = _unicode_decode(fname,
3326                                                         encoding=_encodings['merge'], errors='strict')
3327                                         except UnicodeDecodeError:
3328                                                 fpath = portage._os.path.join(
3329                                                         parent.encode(_encodings['merge']), fname)
3330                                                 new_fname = _unicode_decode(fname,
3331                                                         encoding=_encodings['merge'], errors='replace')
3332                                                 new_fname = _unicode_encode(new_fname,
3333                                                         encoding='ascii', errors='backslashreplace')
3334                                                 new_fname = _unicode_decode(new_fname,
3335                                                         encoding=_encodings['merge'], errors='replace')
3336                                                 new_fpath = os.path.join(parent, new_fname)
3337                                                 os.rename(fpath, new_fpath)
3338                                                 unicode_error = True
3339                                                 unicode_errors.append(new_fpath[srcroot_len:])
3340                                                 fname = new_fname
3341                                                 fpath = new_fpath
3342                                         else:
3343                                                 fpath = os.path.join(parent, fname)
3344
3345                                         relative_path = fpath[srcroot_len:]
3346
3347                                         if line_ending_re.search(relative_path) is not None:
3348                                                 paths_with_newlines.append(relative_path)
3349
3350                                         file_mode = os.lstat(fpath).st_mode
3351                                         if stat.S_ISREG(file_mode):
3352                                                 myfilelist.append(relative_path)
3353                                         elif stat.S_ISLNK(file_mode):
3354                                                 # Note: os.walk puts symlinks to directories in the "dirs"
3355                                                 # list and it does not traverse them since that could lead
3356                                                 # to an infinite recursion loop.
3357                                                 mylinklist.append(relative_path)
3358
3359                                 if unicode_error:
3360                                         break
3361
3362                         if not unicode_error:
3363                                 break
3364
3365                 if unicode_errors:
3366                         eerror(_merge_unicode_error(unicode_errors))
3367
3368                 if paths_with_newlines:
3369                         msg = []
3370                         msg.append(_("This package installs one or more files containing line ending characters:"))
3371                         msg.append("")
3372                         paths_with_newlines.sort()
3373                         for f in paths_with_newlines:
3374                                 msg.append("\t/%s" % (f.replace("\n", "\\n").replace("\r", "\\r")))
3375                         msg.append("")
3376                         msg.append(_("package %s NOT merged") % self.mycpv)
3377                         msg.append("")
3378                         eerror(msg)
3379                         return 1
3380
3381                 # If there are no files to merge, and an installed package in the same
3382                 # slot has files, it probably means that something went wrong.
3383                 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
3384                         not myfilelist and not mylinklist and others_in_slot:
3385                         installed_files = None
3386                         for other_dblink in others_in_slot:
3387                                 installed_files = other_dblink.getcontents()
3388                                 if not installed_files:
3389                                         continue
3390                                 from textwrap import wrap
3391                                 wrap_width = 72
3392                                 msg = []
3393                                 d = {
3394                                         "new_cpv":self.mycpv,
3395                                         "old_cpv":other_dblink.mycpv
3396                                 }
3397                                 msg.extend(wrap(_("The '%(new_cpv)s' package will not install "
3398                                         "any files, but the currently installed '%(old_cpv)s'"
3399                                         " package has the following files: ") % d, wrap_width))
3400                                 msg.append("")
3401                                 msg.extend(sorted(installed_files))
3402                                 msg.append("")
3403                                 msg.append(_("package %s NOT merged") % self.mycpv)
3404                                 msg.append("")
3405                                 msg.extend(wrap(
3406                                         _("Manually run `emerge --unmerge =%s` if you "
3407                                         "really want to remove the above files. Set "
3408                                         "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
3409                                         "/etc/make.conf if you do not want to "
3410                                         "abort in cases like this.") % other_dblink.mycpv,
3411                                         wrap_width))
3412                                 eerror(msg)
3413                         if installed_files:
3414                                 return 1
3415
3416                 # check for package collisions
3417                 blockers = self._blockers
3418                 if blockers is None:
3419                         blockers = []
3420                 collisions, symlink_collisions, plib_collisions = \
3421                         self._collision_protect(srcroot, destroot,
3422                         others_in_slot + blockers, myfilelist, mylinklist)
3423
3424                 # Make sure the ebuild environment is initialized and that ${T}/elog
3425                 # exists for logging of collision-protect eerror messages.
3426                 if myebuild is None:
3427                         myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3428                 doebuild_environment(myebuild, "preinst",
3429                         settings=self.settings, db=mydbapi)
3430                 self.settings["REPLACING_VERSIONS"] = " ".join(
3431                         [portage.versions.cpv_getversion(other.mycpv)
3432                         for other in others_in_slot])
3433                 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
3434
3435                 if collisions:
3436                         collision_protect = "collision-protect" in self.settings.features
3437                         protect_owned = "protect-owned" in self.settings.features
3438                         msg = _("This package will overwrite one or more files that"
3439                         " may belong to other packages (see list below).")
3440                         if not (collision_protect or protect_owned):
3441                                 msg += _(" Add either \"collision-protect\" or" 
3442                                 " \"protect-owned\" to FEATURES in"
3443                                 " make.conf if you would like the merge to abort"
3444                                 " in cases like this. See the make.conf man page for"
3445                                 " more information about these features.")
3446                         if self.settings.get("PORTAGE_QUIET") != "1":
3447                                 msg += _(" You can use a command such as"
3448                                 " `portageq owners / <filename>` to identify the"
3449                                 " installed package that owns a file. If portageq"
3450                                 " reports that only one package owns a file then do NOT"
3451                                 " file a bug report. A bug report is only useful if it"
3452                                 " identifies at least two or more packages that are known"
3453                                 " to install the same file(s)."
3454                                 " If a collision occurs and you"
3455                                 " can not explain where the file came from then you"
3456                                 " should simply ignore the collision since there is not"
3457                                 " enough information to determine if a real problem"
3458                                 " exists. Please do NOT file a bug report at"
3459                                 " http://bugs.gentoo.org unless you report exactly which"
3460                                 " two packages install the same file(s). Once again,"
3461                                 " please do NOT file a bug report unless you have"
3462                                 " completely understood the above message.")
3463
3464                         self.settings["EBUILD_PHASE"] = "preinst"
3465                         from textwrap import wrap
3466                         msg = wrap(msg, 70)
3467                         if collision_protect:
3468                                 msg.append("")
3469                                 msg.append(_("package %s NOT merged") % self.settings.mycpv)
3470                         msg.append("")
3471                         msg.append(_("Detected file collision(s):"))
3472                         msg.append("")
3473
3474                         for f in collisions:
3475                                 msg.append("\t%s" % \
3476                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3477
3478                         eerror(msg)
3479
3480                         owners = None
3481                         if collision_protect or protect_owned or symlink_collisions:
3482                                 msg = []
3483                                 msg.append("")
3484                                 msg.append(_("Searching all installed"
3485                                         " packages for file collisions..."))
3486                                 msg.append("")
3487                                 msg.append(_("Press Ctrl-C to Stop"))
3488                                 msg.append("")
3489                                 eerror(msg)
3490
3491                                 if len(collisions) > 20:
3492                                         # get_owners is slow for large numbers of files, so
3493                                         # don't look them all up.
3494                                         collisions = collisions[:20]
3495                                 self.lockdb()
3496                                 try:
3497                                         owners = self.vartree.dbapi._owners.get_owners(collisions)
3498                                         self.vartree.dbapi.flush_cache()
3499                                 finally:
3500                                         self.unlockdb()
3501
3502                                 for pkg, owned_files in owners.items():
3503                                         cpv = pkg.mycpv
3504                                         msg = []
3505                                         msg.append("%s" % cpv)
3506                                         for f in sorted(owned_files):
3507                                                 msg.append("\t%s" % os.path.join(destroot,
3508                                                         f.lstrip(os.path.sep)))
3509                                         msg.append("")
3510                                         eerror(msg)
3511
3512                                 if not owners:
3513                                         eerror([_("None of the installed"
3514                                                 " packages claim the file(s)."), ""])
3515
3516                         # The explanation about the collision and how to solve
3517                         # it may not be visible via a scrollback buffer, especially
3518                         # if the number of file collisions is large. Therefore,
3519                         # show a summary at the end.
3520                         abort = False
3521                         if collision_protect:
3522                                 abort = True
3523                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3524                                         self.settings.mycpv
3525                         elif protect_owned and owners:
3526                                 abort = True
3527                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3528                                         self.settings.mycpv
3529                         elif symlink_collisions:
3530                                 abort = True
3531                                 msg = _("Package '%s' NOT merged due to collision " + \
3532                                 "between a symlink and a directory which is explicitly " + \
3533                                 "forbidden by PMS (see bug #326685).") % \
3534                                 (self.settings.mycpv,)
3535                         else:
3536                                 msg = _("Package '%s' merged despite file collisions.") % \
3537                                         self.settings.mycpv
3538                         msg += _(" If necessary, refer to your elog "
3539                                 "messages for the whole content of the above message.")
3540                         eerror(wrap(msg, 70))
3541
3542                         if abort:
3543                                 return 1
3544
3545                 # The merge process may move files out of the image directory,
3546                 # which causes invalidation of the .installed flag.
3547                 try:
3548                         os.unlink(os.path.join(
3549                                 os.path.dirname(normalize_path(srcroot)), ".installed"))
3550                 except OSError as e:
3551                         if e.errno != errno.ENOENT:
3552                                 raise
3553                         del e
3554
3555                 self.dbdir = self.dbtmpdir
3556                 self.delete()
3557                 ensure_dirs(self.dbtmpdir)
3558
3559                 # run preinst script
3560                 showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % \
3561                         {"cpv":self.mycpv, "destroot":destroot})
3562                 phase = EbuildPhase(background=False, phase="preinst",
3563                         scheduler=self._scheduler, settings=self.settings)
3564                 phase.start()
3565                 a = phase.wait()
3566
3567                 # XXX: Decide how to handle failures here.
3568                 if a != os.EX_OK:
3569                         showMessage(_("!!! FAILED preinst: ")+str(a)+"\n",
3570                                 level=logging.ERROR, noiselevel=-1)
3571                         return a
3572
3573                 # copy "info" files (like SLOT, CFLAGS, etc.) into the database
3574                 for x in os.listdir(inforoot):
3575                         self.copyfile(inforoot+"/"+x)
3576
3577                 # write local package counter for recording
3578                 if counter is None:
3579                         counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
3580                 f = io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
3581                         encoding=_encodings['fs'], errors='strict'),
3582                         mode='w', encoding=_encodings['repo.content'],
3583                         errors='backslashreplace')
3584                 f.write(_unicode_decode(str(counter)))
3585                 f.close()
3586
3587                 self.updateprotect()
3588
3589                 #if we have a file containing previously-merged config file md5sums, grab it.
3590                 self.vartree.dbapi._fs_lock()
3591                 try:
3592                         cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
3593                         if "NOCONFMEM" in self.settings:
3594                                 cfgfiledict["IGNORE"]=1
3595                         else:
3596                                 cfgfiledict["IGNORE"]=0
3597
3598                         # Always behave like --noconfmem is enabled for downgrades
3599                         # so that people who don't know about this option are less
3600                         # likely to get confused when doing upgrade/downgrade cycles.
3601                         pv_split = catpkgsplit(self.mycpv)[1:]
3602                         for other in others_in_slot:
3603                                 if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0:
3604                                         cfgfiledict["IGNORE"] = 1
3605                                         break
3606
3607                         rval = self._merge_contents(srcroot, destroot, cfgfiledict)
3608                         if rval != os.EX_OK:
3609                                 return rval
3610                 finally:
3611                         self.vartree.dbapi._fs_unlock()
3612
3613                 # These caches are populated during collision-protect and the data
3614                 # they contain is now invalid. It's very important to invalidate
3615                 # the contents_inodes cache so that FEATURES=unmerge-orphans
3616                 # doesn't unmerge anything that belongs to this package that has
3617                 # just been merged.
3618                 for dblnk in others_in_slot:
3619                         dblnk._clear_contents_cache()
3620                 self._clear_contents_cache()
3621
3622                 linkmap = self.vartree.dbapi._linkmap
3623                 plib_registry = self.vartree.dbapi._plib_registry
3624                 # We initialize preserve_paths to an empty set rather
3625                 # than None here because it plays an important role
3626                 # in prune_plib_registry logic by serving to indicate
3627                 # that we have a replacement for a package that's
3628                 # being unmerged.
3629
3630                 preserve_paths = set()
3631                 needed = None
3632                 if not (self._linkmap_broken or linkmap is None or
3633                         plib_registry is None):
3634                         self.vartree.dbapi._fs_lock()
3635                         plib_registry.lock()
3636                         try:
3637                                 plib_registry.load()
3638                                 needed = os.path.join(inforoot, linkmap._needed_aux_key)
3639                                 self._linkmap_rebuild(include_file=needed)
3640
3641                                 # Preserve old libs if they are still in use
3642                                 # TODO: Handle cases where the previous instance
3643                                 # has already been uninstalled but it still has some
3644                                 # preserved libraries in the registry that we may
3645                                 # want to preserve here.
3646                                 preserve_paths = self._find_libs_to_preserve()
3647                         finally:
3648                                 plib_registry.unlock()
3649                                 self.vartree.dbapi._fs_unlock()
3650
3651                         if preserve_paths:
3652                                 self._add_preserve_libs_to_contents(preserve_paths)
3653
3654                 # If portage is reinstalling itself, remove the old
3655                 # version now since we want to use the temporary
3656                 # PORTAGE_BIN_PATH that will be removed when we return.
3657                 reinstall_self = False
3658                 if self.myroot == "/" and \
3659                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]):
3660                         reinstall_self = True
3661
3662                 emerge_log = self._emerge_log
3663
3664                 # If we have any preserved libraries then autoclean
3665                 # is forced so that preserve-libs logic doesn't have
3666                 # to account for the additional complexity of the
3667                 # AUTOCLEAN=no mode.
3668                 autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes" \
3669                         or preserve_paths
3670
3671                 if autoclean:
3672                         emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
3673
3674                 others_in_slot.append(self)  # self has just been merged
3675                 for dblnk in list(others_in_slot):
3676                         if dblnk is self:
3677                                 continue
3678                         if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
3679                                 continue
3680                         showMessage(_(">>> Safely unmerging already-installed instance...\n"))
3681                         emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
3682                         others_in_slot.remove(dblnk) # dblnk will unmerge itself now
3683                         dblnk._linkmap_broken = self._linkmap_broken
3684                         dblnk.settings["REPLACED_BY_VERSION"] = portage.versions.cpv_getversion(self.mycpv)
3685                         dblnk.settings.backup_changes("REPLACED_BY_VERSION")
3686                         unmerge_rval = dblnk.unmerge(ldpath_mtimes=prev_mtimes,
3687                                 others_in_slot=others_in_slot, needed=needed,
3688                                 preserve_paths=preserve_paths)
3689                         dblnk.settings.pop("REPLACED_BY_VERSION", None)
3690
3691                         if unmerge_rval == os.EX_OK:
3692                                 emerge_log(_(" >>> unmerge success: %s") % (dblnk.mycpv,))
3693                         else:
3694                                 emerge_log(_(" !!! unmerge FAILURE: %s") % (dblnk.mycpv,))
3695
3696                         self.lockdb()
3697                         try:
3698                                 # TODO: Check status and abort if necessary.
3699                                 dblnk.delete()
3700                         finally:
3701                                 self.unlockdb()
3702                         showMessage(_(">>> Original instance of package unmerged safely.\n"))
3703
3704                 if len(others_in_slot) > 1:
3705                         showMessage(colorize("WARN", _("WARNING:"))
3706                                 + _(" AUTOCLEAN is disabled.  This can cause serious"
3707                                 " problems due to overlapping packages.\n"),
3708                                 level=logging.WARN, noiselevel=-1)
3709
3710                 # We hold both directory locks.
3711                 self.dbdir = self.dbpkgdir
3712                 self.lockdb()
3713                 try:
3714                         self.delete()
3715                         _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
3716                 finally:
3717                         self.unlockdb()
3718
3719                 # Check for file collisions with blocking packages
3720                 # and remove any colliding files from their CONTENTS
3721                 # since they now belong to this package.
3722                 self._clear_contents_cache()
3723                 contents = self.getcontents()
3724                 destroot_len = len(destroot) - 1
3725                 self.lockdb()
3726                 try:
3727                         for blocker in blockers:
3728                                 self.vartree.dbapi.removeFromContents(blocker, iter(contents),
3729                                         relative_paths=False)
3730                 finally:
3731                         self.unlockdb()
3732
3733                 plib_registry = self.vartree.dbapi._plib_registry
3734                 if plib_registry:
3735                         self.vartree.dbapi._fs_lock()
3736                         plib_registry.lock()
3737                         try:
3738                                 plib_registry.load()
3739
3740                                 if preserve_paths:
3741                                         # keep track of the libs we preserved
3742                                         plib_registry.register(self.mycpv, slot, counter,
3743                                                 sorted(preserve_paths))
3744
3745                                 # Unregister any preserved libs that this package has overwritten
3746                                 # and update the contents of the packages that owned them.
3747                                 plib_dict = plib_registry.getPreservedLibs()
3748                                 for cpv, paths in plib_collisions.items():
3749                                         if cpv not in plib_dict:
3750                                                 continue
3751                                         has_vdb_entry = False
3752                                         if cpv != self.mycpv:
3753                                                 # If we've replaced another instance with the
3754                                                 # same cpv then the vdb entry no longer belongs
3755                                                 # to it, so we'll have to get the slot and counter
3756                                                 # from plib_registry._data instead.
3757                                                 self.vartree.dbapi.lock()
3758                                                 try:
3759                                                         try:
3760                                                                 slot, counter = self.vartree.dbapi.aux_get(
3761                                                                         cpv, ["SLOT", "COUNTER"])
3762                                                         except KeyError:
3763                                                                 pass
3764                                                         else:
3765                                                                 has_vdb_entry = True
3766                                                                 self.vartree.dbapi.removeFromContents(
3767                                                                         cpv, paths)
3768                                                 finally:
3769                                                         self.vartree.dbapi.unlock()
3770
3771                                         if not has_vdb_entry:
3772                                                 # It's possible for previously unmerged packages
3773                                                 # to have preserved libs in the registry, so try
3774                                                 # to retrieve the slot and counter from there.
3775                                                 has_registry_entry = False
3776                                                 for plib_cps, (plib_cpv, plib_counter, plib_paths) in \
3777                                                         plib_registry._data.items():
3778                                                         if plib_cpv != cpv:
3779                                                                 continue
3780                                                         try:
3781                                                                 cp, slot = plib_cps.split(":", 1)
3782                                                         except ValueError:
3783                                                                 continue
3784                                                         counter = plib_counter
3785                                                         has_registry_entry = True
3786                                                         break
3787
3788                                                 if not has_registry_entry:
3789                                                         continue
3790
3791                                         remaining = [f for f in plib_dict[cpv] if f not in paths]
3792                                         plib_registry.register(cpv, slot, counter, remaining)
3793
3794                                 plib_registry.store()
3795                         finally:
3796                                 plib_registry.unlock()
3797                                 self.vartree.dbapi._fs_unlock()
3798
3799                 self.vartree.dbapi._add(self)
3800                 contents = self.getcontents()
3801
3802                 #do postinst script
3803                 self.settings["PORTAGE_UPDATE_ENV"] = \
3804                         os.path.join(self.dbpkgdir, "environment.bz2")
3805                 self.settings.backup_changes("PORTAGE_UPDATE_ENV")
3806                 try:
3807                         phase = EbuildPhase(background=False, phase="postinst",
3808                                 scheduler=self._scheduler, settings=self.settings)
3809                         phase.start()
3810                         a = phase.wait()
3811                         if a == os.EX_OK:
3812                                 showMessage(_(">>> %s merged.\n") % self.mycpv)
3813                 finally:
3814                         self.settings.pop("PORTAGE_UPDATE_ENV", None)
3815
3816                 if a != os.EX_OK:
3817                         # It's stupid to bail out here, so keep going regardless of
3818                         # phase return code.
3819                         showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
3820                                 level=logging.ERROR, noiselevel=-1)
3821
3822                 #update environment settings, library paths. DO NOT change symlinks.
3823                 env_update(
3824                         target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
3825                         contents=contents, env=self.settings,
3826                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
3827
3828                 # For gcc upgrades, preserved libs have to be removed after the
3829                 # the library path has been updated.
3830                 self._prune_plib_registry()
3831
3832                 return os.EX_OK
3833
3834         def _new_backup_path(self, p):
3835                 """
3836                 The works for any type path, such as a regular file, symlink,
3837                 or directory. The parent directory is assumed to exist.
3838                 The returned filename is of the form p + '.backup.' + x, where
3839                 x guarantees that the returned path does not exist yet.
3840                 """
3841                 os = _os_merge
3842
3843                 x = -1
3844                 while True:
3845                         x += 1
3846                         backup_p = p + '.backup.' + str(x).rjust(4, '0')
3847                         try:
3848                                 os.lstat(backup_p)
3849                         except OSError:
3850                                 break
3851
3852                 return backup_p
3853
3854         def _merge_contents(self, srcroot, destroot, cfgfiledict):
3855
3856                 cfgfiledict_orig = cfgfiledict.copy()
3857
3858                 # open CONTENTS file (possibly overwriting old one) for recording
3859                 # Use atomic_ofstream for automatic coercion of raw bytes to
3860                 # unicode, in order to prevent TypeError when writing raw bytes
3861                 # to TextIOWrapper with python2.
3862                 outfile = atomic_ofstream(_unicode_encode(
3863                         os.path.join(self.dbtmpdir, 'CONTENTS'),
3864                         encoding=_encodings['fs'], errors='strict'),
3865                         mode='w', encoding=_encodings['repo.content'],
3866                         errors='backslashreplace')
3867
3868                 # Don't bump mtimes on merge since some application require
3869                 # preservation of timestamps.  This means that the unmerge phase must
3870                 # check to see if file belongs to an installed instance in the same
3871                 # slot.
3872                 mymtime = None
3873
3874                 # set umask to 0 for merging; back up umask, save old one in prevmask (since this is a global change)
3875                 prevmask = os.umask(0)
3876                 secondhand = []
3877
3878                 # we do a first merge; this will recurse through all files in our srcroot but also build up a
3879                 # "second hand" of symlinks to merge later
3880                 if self.mergeme(srcroot, destroot, outfile, secondhand,
3881                         self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
3882                         return 1
3883
3884                 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore.  The rest are
3885                 # broken symlinks.  We'll merge them too.
3886                 lastlen = 0
3887                 while len(secondhand) and len(secondhand)!=lastlen:
3888                         # clear the thirdhand.  Anything from our second hand that
3889                         # couldn't get merged will be added to thirdhand.
3890
3891                         thirdhand = []
3892                         if self.mergeme(srcroot, destroot, outfile, thirdhand,
3893                                 secondhand, cfgfiledict, mymtime):
3894                                 return 1
3895
3896                         #swap hands
3897                         lastlen = len(secondhand)
3898
3899                         # our thirdhand now becomes our secondhand.  It's ok to throw
3900                         # away secondhand since thirdhand contains all the stuff that
3901                         # couldn't be merged.
3902                         secondhand = thirdhand
3903
3904                 if len(secondhand):
3905                         # force merge of remaining symlinks (broken or circular; oh well)
3906                         if self.mergeme(srcroot, destroot, outfile, None,
3907                                 secondhand, cfgfiledict, mymtime):
3908                                 return 1
3909
3910                 #restore umask
3911                 os.umask(prevmask)
3912
3913                 #if we opened it, close it
3914                 outfile.flush()
3915                 outfile.close()
3916
3917                 # write out our collection of md5sums
3918                 if cfgfiledict != cfgfiledict_orig:
3919                         cfgfiledict.pop("IGNORE", None)
3920                         try:
3921                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
3922                         except InvalidLocation:
3923                                 self.settings._init_dirs()
3924                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
3925
3926                 return os.EX_OK
3927
3928         def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
3929                 """
3930                 
3931                 This function handles actual merging of the package contents to the livefs.
3932                 It also handles config protection.
3933                 
3934                 @param srcroot: Where are we copying files from (usually ${D})
3935                 @type srcroot: String (Path)
3936                 @param destroot: Typically ${ROOT}
3937                 @type destroot: String (Path)
3938                 @param outfile: File to log operations to
3939                 @type outfile: File Object
3940                 @param secondhand: A set of items to merge in pass two (usually
3941                 or symlinks that point to non-existing files that may get merged later)
3942                 @type secondhand: List
3943                 @param stufftomerge: Either a diretory to merge, or a list of items.
3944                 @type stufftomerge: String or List
3945                 @param cfgfiledict: { File:mtime } mapping for config_protected files
3946                 @type cfgfiledict: Dictionary
3947                 @param thismtime: The current time (typically long(time.time())
3948                 @type thismtime: Long
3949                 @rtype: None or Boolean
3950                 @returns:
3951                 1. True on failure
3952                 2. None otherwise
3953                 
3954                 """
3955
3956                 showMessage = self._display_merge
3957                 writemsg = self._display_merge
3958
3959                 os = _os_merge
3960                 sep = os.sep
3961                 join = os.path.join
3962                 srcroot = normalize_path(srcroot).rstrip(sep) + sep
3963                 destroot = normalize_path(destroot).rstrip(sep) + sep
3964                 calc_prelink = "prelink-checksums" in self.settings.features
3965
3966                 # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
3967                 if isinstance(stufftomerge, basestring):
3968                         #A directory is specified.  Figure out protection paths, listdir() it and process it.
3969                         mergelist = os.listdir(join(srcroot, stufftomerge))
3970                         offset = stufftomerge
3971                 else:
3972                         mergelist = stufftomerge
3973                         offset = ""
3974
3975                 for i, x in enumerate(mergelist):
3976
3977                         mysrc = join(srcroot, offset, x)
3978                         mydest = join(destroot, offset, x)
3979                         # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
3980                         myrealdest = join(sep, offset, x)
3981                         # stat file once, test using S_* macros many times (faster that way)
3982                         mystat = os.lstat(mysrc)
3983                         mymode = mystat[stat.ST_MODE]
3984                         # handy variables; mydest is the target object on the live filesystems;
3985                         # mysrc is the source object in the temporary install dir
3986                         try:
3987                                 mydstat = os.lstat(mydest)
3988                                 mydmode = mydstat.st_mode
3989                         except OSError as e:
3990                                 if e.errno != errno.ENOENT:
3991                                         raise
3992                                 del e
3993                                 #dest file doesn't exist
3994                                 mydstat = None
3995                                 mydmode = None
3996
3997                         if stat.S_ISLNK(mymode):
3998                                 # we are merging a symbolic link
3999                                 # The file name of mysrc and the actual file that it points to
4000                                 # will have earlier been forcefully converted to the 'merge'
4001                                 # encoding if necessary, but the content of the symbolic link
4002                                 # may need to be forcefully converted here.
4003                                 myto = _os.readlink(_unicode_encode(mysrc,
4004                                         encoding=_encodings['merge'], errors='strict'))
4005                                 try:
4006                                         myto = _unicode_decode(myto,
4007                                                 encoding=_encodings['merge'], errors='strict')
4008                                 except UnicodeDecodeError:
4009                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4010                                                 errors='replace')
4011                                         myto = _unicode_encode(myto, encoding='ascii',
4012                                                 errors='backslashreplace')
4013                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4014                                                 errors='replace')
4015                                         os.unlink(mysrc)
4016                                         os.symlink(myto, mysrc)
4017
4018                                 # Pass in the symlink target in order to bypass the
4019                                 # os.readlink() call inside abssymlink(), since that
4020                                 # call is unsafe if the merge encoding is not ascii
4021                                 # or utf_8 (see bug #382021).
4022                                 myabsto = abssymlink(mysrc, target=myto)
4023
4024                                 if myabsto.startswith(srcroot):
4025                                         myabsto = myabsto[len(srcroot):]
4026                                 myabsto = myabsto.lstrip(sep)
4027                                 if self.settings and self.settings["D"]:
4028                                         if myto.startswith(self.settings["D"]):
4029                                                 myto = myto[len(self.settings["D"]):]
4030                                 # myrealto contains the path of the real file to which this symlink points.
4031                                 # we can simply test for existence of this file to see if the target has been merged yet
4032                                 myrealto = normalize_path(os.path.join(destroot, myabsto))
4033                                 if mydmode!=None:
4034                                         #destination exists
4035                                         if stat.S_ISDIR(mydmode):
4036                                                 # we can't merge a symlink over a directory
4037                                                 newdest = self._new_backup_path(mydest)
4038                                                 msg = []
4039                                                 msg.append("")
4040                                                 msg.append(_("Installation of a symlink is blocked by a directory:"))
4041                                                 msg.append("  '%s'" % mydest)
4042                                                 msg.append(_("This symlink will be merged with a different name:"))
4043                                                 msg.append("  '%s'" % newdest)
4044                                                 msg.append("")
4045                                                 self._eerror("preinst", msg)
4046                                                 mydest = newdest
4047
4048                                         elif not stat.S_ISLNK(mydmode):
4049                                                 if os.path.exists(mysrc) and stat.S_ISDIR(os.stat(mysrc)[stat.ST_MODE]):
4050                                                         # Kill file blocking installation of symlink to dir #71787
4051                                                         pass
4052                                                 elif self.isprotected(mydest):
4053                                                         # Use md5 of the target in ${D} if it exists...
4054                                                         try:
4055                                                                 newmd5 = perform_md5(join(srcroot, myabsto))
4056                                                         except FileNotFound:
4057                                                                 # Maybe the target is merged already.
4058                                                                 try:
4059                                                                         newmd5 = perform_md5(myrealto)
4060                                                                 except FileNotFound:
4061                                                                         newmd5 = None
4062                                                         mydest = new_protect_filename(mydest, newmd5=newmd5)
4063
4064                                 # if secondhand is None it means we're operating in "force" mode and should not create a second hand.
4065                                 if (secondhand != None) and (not os.path.exists(myrealto)):
4066                                         # either the target directory doesn't exist yet or the target file doesn't exist -- or
4067                                         # the target is a broken symlink.  We will add this file to our "second hand" and merge
4068                                         # it later.
4069                                         secondhand.append(mysrc[len(srcroot):])
4070                                         continue
4071                                 # unlinking no longer necessary; "movefile" will overwrite symlinks atomically and correctly
4072                                 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4073                                         sstat=mystat, mysettings=self.settings,
4074                                         encoding=_encodings['merge'])
4075                                 if mymtime != None:
4076                                         showMessage(">>> %s -> %s\n" % (mydest, myto))
4077                                         outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
4078                                 else:
4079                                         showMessage(_("!!! Failed to move file.\n"),
4080                                                 level=logging.ERROR, noiselevel=-1)
4081                                         showMessage("!!! %s -> %s\n" % (mydest, myto),
4082                                                 level=logging.ERROR, noiselevel=-1)
4083                                         return 1
4084                         elif stat.S_ISDIR(mymode):
4085                                 # we are merging a directory
4086                                 if mydmode != None:
4087                                         # destination exists
4088
4089                                         if bsd_chflags:
4090                                                 # Save then clear flags on dest.
4091                                                 dflags = mydstat.st_flags
4092                                                 if dflags != 0:
4093                                                         bsd_chflags.lchflags(mydest, 0)
4094
4095                                         if not os.access(mydest, os.W_OK):
4096                                                 pkgstuff = pkgsplit(self.pkg)
4097                                                 writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1)
4098                                                 writemsg(_("!!! Please check permissions and directories for broken symlinks.\n"))
4099                                                 writemsg(_("!!! You may start the merge process again by using ebuild:\n"))
4100                                                 writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n")
4101                                                 writemsg(_("!!! And finish by running this: env-update\n\n"))
4102                                                 return 1
4103
4104                                         if stat.S_ISDIR(mydmode) or \
4105                                                 (stat.S_ISLNK(mydmode) and os.path.isdir(mydest)):
4106                                                 # a symlink to an existing directory will work for us; keep it:
4107                                                 showMessage("--- %s/\n" % mydest)
4108                                                 if bsd_chflags:
4109                                                         bsd_chflags.lchflags(mydest, dflags)
4110                                         else:
4111                                                 # a non-directory and non-symlink-to-directory.  Won't work for us.  Move out of the way.
4112                                                 backup_dest = self._new_backup_path(mydest)
4113                                                 msg = []
4114                                                 msg.append("")
4115                                                 msg.append(_("Installation of a directory is blocked by a file:"))
4116                                                 msg.append("  '%s'" % mydest)
4117                                                 msg.append(_("This file will be renamed to a different name:"))
4118                                                 msg.append("  '%s'" % backup_dest)
4119                                                 msg.append("")
4120                                                 self._eerror("preinst", msg)
4121                                                 if movefile(mydest, backup_dest,
4122                                                         mysettings=self.settings,
4123                                                         encoding=_encodings['merge']) is None:
4124                                                         return 1
4125                                                 showMessage(_("bak %s %s.backup\n") % (mydest, mydest),
4126                                                         level=logging.ERROR, noiselevel=-1)
4127                                                 #now create our directory
4128                                                 try:
4129                                                         if self.settings.selinux_enabled():
4130                                                                 _selinux_merge.mkdir(mydest, mysrc)
4131                                                         else:
4132                                                                 os.mkdir(mydest)
4133                                                 except OSError as e:
4134                                                         # Error handling should be equivalent to
4135                                                         # portage.util.ensure_dirs() for cases
4136                                                         # like bug #187518.
4137                                                         if e.errno in (errno.EEXIST,):
4138                                                                 pass
4139                                                         elif os.path.isdir(mydest):
4140                                                                 pass
4141                                                         else:
4142                                                                 raise
4143                                                         del e
4144
4145                                                 if bsd_chflags:
4146                                                         bsd_chflags.lchflags(mydest, dflags)
4147                                                 os.chmod(mydest, mystat[0])
4148                                                 os.chown(mydest, mystat[4], mystat[5])
4149                                                 showMessage(">>> %s/\n" % mydest)
4150                                 else:
4151                                         try:
4152                                                 #destination doesn't exist
4153                                                 if self.settings.selinux_enabled():
4154                                                         _selinux_merge.mkdir(mydest, mysrc)
4155                                                 else:
4156                                                         os.mkdir(mydest)
4157                                         except OSError as e:
4158                                                 # Error handling should be equivalent to
4159                                                 # portage.util.ensure_dirs() for cases
4160                                                 # like bug #187518.
4161                                                 if e.errno in (errno.EEXIST,):
4162                                                         pass
4163                                                 elif os.path.isdir(mydest):
4164                                                         pass
4165                                                 else:
4166                                                         raise
4167                                                 del e
4168                                         os.chmod(mydest, mystat[0])
4169                                         os.chown(mydest, mystat[4], mystat[5])
4170                                         showMessage(">>> %s/\n" % mydest)
4171                                 outfile.write("dir "+myrealdest+"\n")
4172                                 # recurse and merge this directory
4173                                 if self.mergeme(srcroot, destroot, outfile, secondhand,
4174                                         join(offset, x), cfgfiledict, thismtime):
4175                                         return 1
4176                         elif stat.S_ISREG(mymode):
4177                                 # we are merging a regular file
4178                                 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
4179                                 # calculate config file protection stuff
4180                                 mydestdir = os.path.dirname(mydest)
4181                                 moveme = 1
4182                                 zing = "!!!"
4183                                 mymtime = None
4184                                 protected = self.isprotected(mydest)
4185                                 if mydmode != None:
4186                                         # destination file exists
4187                                         
4188                                         if stat.S_ISDIR(mydmode):
4189                                                 # install of destination is blocked by an existing directory with the same name
4190                                                 newdest = self._new_backup_path(mydest)
4191                                                 msg = []
4192                                                 msg.append("")
4193                                                 msg.append(_("Installation of a regular file is blocked by a directory:"))
4194                                                 msg.append("  '%s'" % mydest)
4195                                                 msg.append(_("This file will be merged with a different name:"))
4196                                                 msg.append("  '%s'" % newdest)
4197                                                 msg.append("")
4198                                                 self._eerror("preinst", msg)
4199                                                 mydest = newdest
4200
4201                                         elif stat.S_ISREG(mydmode) or (stat.S_ISLNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST_MODE])):
4202                                                 # install of destination is blocked by an existing regular file,
4203                                                 # or by a symlink to an existing regular file;
4204                                                 # now, config file management may come into play.
4205                                                 # we only need to tweak mydest if cfg file management is in play.
4206                                                 if protected:
4207                                                         # we have a protection path; enable config file management.
4208                                                         cfgprot = 0
4209                                                         destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
4210                                                         if mymd5 == destmd5:
4211                                                                 #file already in place; simply update mtimes of destination
4212                                                                 moveme = 1
4213                                                         else:
4214                                                                 if mymd5 == cfgfiledict.get(myrealdest, [None])[0]:
4215                                                                         """ An identical update has previously been
4216                                                                         merged.  Skip it unless the user has chosen
4217                                                                         --noconfmem."""
4218                                                                         moveme = cfgfiledict["IGNORE"]
4219                                                                         cfgprot = cfgfiledict["IGNORE"]
4220                                                                         if not moveme:
4221                                                                                 zing = "---"
4222                                                                                 mymtime = mystat[stat.ST_MTIME]
4223                                                                 else:
4224                                                                         moveme = 1
4225                                                                         cfgprot = 1
4226                                                         if moveme:
4227                                                                 # Merging a new file, so update confmem.
4228                                                                 cfgfiledict[myrealdest] = [mymd5]
4229                                                         elif destmd5 == cfgfiledict.get(myrealdest, [None])[0]:
4230                                                                 """A previously remembered update has been
4231                                                                 accepted, so it is removed from confmem."""
4232                                                                 del cfgfiledict[myrealdest]
4233
4234                                                         if cfgprot:
4235                                                                 mydest = new_protect_filename(mydest, newmd5=mymd5)
4236
4237                                 # whether config protection or not, we merge the new file the
4238                                 # same way.  Unless moveme=0 (blocking directory)
4239                                 if moveme:
4240                                         # Create hardlinks only for source files that already exist
4241                                         # as hardlinks (having identical st_dev and st_ino).
4242                                         hardlink_key = (mystat.st_dev, mystat.st_ino)
4243
4244                                         hardlink_candidates = self._md5_merge_map.get(hardlink_key)
4245                                         if hardlink_candidates is None:
4246                                                 hardlink_candidates = []
4247                                                 self._md5_merge_map[hardlink_key] = hardlink_candidates
4248
4249                                         mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4250                                                 sstat=mystat, mysettings=self.settings,
4251                                                 hardlink_candidates=hardlink_candidates,
4252                                                 encoding=_encodings['merge'])
4253                                         if mymtime is None:
4254                                                 return 1
4255                                         if hardlink_candidates is not None:
4256                                                 hardlink_candidates.append(mydest)
4257                                         zing = ">>>"
4258
4259                                 if mymtime != None:
4260                                         outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
4261                                 showMessage("%s %s\n" % (zing,mydest))
4262                         else:
4263                                 # we are merging a fifo or device node
4264                                 zing = "!!!"
4265                                 if mydmode is None:
4266                                         # destination doesn't exist
4267                                         if movefile(mysrc, mydest, newmtime=thismtime,
4268                                                 sstat=mystat, mysettings=self.settings,
4269                                                 encoding=_encodings['merge']) is not None:
4270                                                 zing = ">>>"
4271                                         else:
4272                                                 return 1
4273                                 if stat.S_ISFIFO(mymode):
4274                                         outfile.write("fif %s\n" % myrealdest)
4275                                 else:
4276                                         outfile.write("dev %s\n" % myrealdest)
4277                                 showMessage(zing + " " + mydest + "\n")
4278
4279         def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
4280                 mydbapi=None, prev_mtimes=None, counter=None):
4281                 """
4282                 @param myroot: ignored, self._eroot is used instead
4283                 """
4284                 myroot = None
4285                 retval = -1
4286                 parallel_install = "parallel-install" in self.settings.features
4287                 if not parallel_install:
4288                         self.lockdb()
4289                 self.vartree.dbapi._bump_mtime(self.mycpv)
4290                 if self._scheduler is None:
4291                         self._scheduler = PollScheduler().sched_iface
4292                 try:
4293                         retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
4294                                 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes,
4295                                 counter=counter)
4296
4297                         # If PORTAGE_BUILDDIR doesn't exist, then it probably means
4298                         # fail-clean is enabled, and the success/die hooks have
4299                         # already been called by EbuildPhase.
4300                         if os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
4301
4302                                 if retval == os.EX_OK:
4303                                         phase = 'success_hooks'
4304                                 else:
4305                                         phase = 'die_hooks'
4306
4307                                 ebuild_phase = MiscFunctionsProcess(
4308                                         background=False, commands=[phase],
4309                                         scheduler=self._scheduler, settings=self.settings)
4310                                 ebuild_phase.start()
4311                                 ebuild_phase.wait()
4312                                 self._elog_process()
4313
4314                                 if 'noclean' not in self.settings.features and \
4315                                         (retval == os.EX_OK or \
4316                                         'fail-clean' in self.settings.features):
4317                                         if myebuild is None:
4318                                                 myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
4319
4320                                         doebuild_environment(myebuild, "clean",
4321                                                 settings=self.settings, db=mydbapi)
4322                                         phase = EbuildPhase(background=False, phase="clean",
4323                                                 scheduler=self._scheduler, settings=self.settings)
4324                                         phase.start()
4325                                         phase.wait()
4326                 finally:
4327                         self.settings.pop('REPLACING_VERSIONS', None)
4328                         if self.vartree.dbapi._linkmap is None:
4329                                 # preserve-libs is entirely disabled
4330                                 pass
4331                         else:
4332                                 self.vartree.dbapi._linkmap._clear_cache()
4333                         self.vartree.dbapi._bump_mtime(self.mycpv)
4334                         if not parallel_install:
4335                                 self.unlockdb()
4336                 return retval
4337
4338         def getstring(self,name):
4339                 "returns contents of a file with whitespace converted to spaces"
4340                 if not os.path.exists(self.dbdir+"/"+name):
4341                         return ""
4342                 mydata = io.open(
4343                         _unicode_encode(os.path.join(self.dbdir, name),
4344                         encoding=_encodings['fs'], errors='strict'),
4345                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4346                         ).read().split()
4347                 return " ".join(mydata)
4348
4349         def copyfile(self,fname):
4350                 shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname))
4351
4352         def getfile(self,fname):
4353                 if not os.path.exists(self.dbdir+"/"+fname):
4354                         return ""
4355                 return io.open(_unicode_encode(os.path.join(self.dbdir, fname),
4356                         encoding=_encodings['fs'], errors='strict'), 
4357                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4358                         ).read()
4359
4360         def setfile(self,fname,data):
4361                 kwargs = {}
4362                 if fname == 'environment.bz2' or not isinstance(data, basestring):
4363                         kwargs['mode'] = 'wb'
4364                 else:
4365                         kwargs['mode'] = 'w'
4366                         kwargs['encoding'] = _encodings['repo.content']
4367                 write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
4368
4369         def getelements(self,ename):
4370                 if not os.path.exists(self.dbdir+"/"+ename):
4371                         return []
4372                 mylines = io.open(_unicode_encode(
4373                         os.path.join(self.dbdir, ename),
4374                         encoding=_encodings['fs'], errors='strict'),
4375                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4376                         ).readlines()
4377                 myreturn = []
4378                 for x in mylines:
4379                         for y in x[:-1].split():
4380                                 myreturn.append(y)
4381                 return myreturn
4382
4383         def setelements(self,mylist,ename):
4384                 myelement = io.open(_unicode_encode(
4385                         os.path.join(self.dbdir, ename),
4386                         encoding=_encodings['fs'], errors='strict'),
4387                         mode='w', encoding=_encodings['repo.content'],
4388                         errors='backslashreplace')
4389                 for x in mylist:
4390                         myelement.write(_unicode_decode(x+"\n"))
4391                 myelement.close()
4392
4393         def isregular(self):
4394                 "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
4395                 return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
4396
4397 def merge(mycat, mypkg, pkgloc, infloc,
4398         myroot=None, settings=None, myebuild=None,
4399         mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
4400         scheduler=None):
4401         """
4402         @param myroot: ignored, settings['EROOT'] is used instead
4403         """
4404         myroot = None
4405         if settings is None:
4406                 raise TypeError("settings argument is required")
4407         if not os.access(settings['EROOT'], os.W_OK):
4408                 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings['EROOT'],
4409                         noiselevel=-1)
4410                 return errno.EACCES
4411         background = (settings.get('PORTAGE_BACKGROUND') == '1')
4412         merge_task = MergeProcess(
4413                 mycat=mycat, mypkg=mypkg, settings=settings,
4414                 treetype=mytree, vartree=vartree,
4415                 scheduler=(scheduler or PollScheduler().sched_iface),
4416                 background=background, blockers=blockers, pkgloc=pkgloc,
4417                 infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
4418                 prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'))
4419         merge_task.start()
4420         retcode = merge_task.wait()
4421         return retcode
4422
4423 def unmerge(cat, pkg, myroot=None, settings=None,
4424         mytrimworld=None, vartree=None,
4425         ldpath_mtimes=None, scheduler=None):
4426         """
4427         @param myroot: ignored, settings['EROOT'] is used instead
4428         @param mytrimworld: ignored
4429         """
4430         myroot = None
4431         if settings is None:
4432                 raise TypeError("settings argument is required")
4433         mylink = dblink(cat, pkg, settings=settings, treetype="vartree",
4434                 vartree=vartree, scheduler=scheduler)
4435         vartree = mylink.vartree
4436         parallel_install = "parallel-install" in settings.features
4437         if not parallel_install:
4438                 mylink.lockdb()
4439         try:
4440                 if mylink.exists():
4441                         retval = mylink.unmerge(ldpath_mtimes=ldpath_mtimes)
4442                         if retval == os.EX_OK:
4443                                 mylink.lockdb()
4444                                 try:
4445                                         mylink.delete()
4446                                 finally:
4447                                         mylink.unlockdb()
4448                         return retval
4449                 return os.EX_OK
4450         finally:
4451                 if vartree.dbapi._linkmap is None:
4452                         # preserve-libs is entirely disabled
4453                         pass
4454                 else:
4455                         vartree.dbapi._linkmap._clear_cache()
4456                 if not parallel_install:
4457                         mylink.unlockdb()
4458
4459 def write_contents(contents, root, f):
4460         """
4461         Write contents to any file like object. The file will be left open.
4462         """
4463         root_len = len(root) - 1
4464         for filename in sorted(contents):
4465                 entry_data = contents[filename]
4466                 entry_type = entry_data[0]
4467                 relative_filename = filename[root_len:]
4468                 if entry_type == "obj":
4469                         entry_type, mtime, md5sum = entry_data
4470                         line = "%s %s %s %s\n" % \
4471                                 (entry_type, relative_filename, md5sum, mtime)
4472                 elif entry_type == "sym":
4473                         entry_type, mtime, link = entry_data
4474                         line = "%s %s -> %s %s\n" % \
4475                                 (entry_type, relative_filename, link, mtime)
4476                 else: # dir, dev, fif
4477                         line = "%s %s\n" % (entry_type, relative_filename)
4478                 f.write(line)
4479
4480 def tar_contents(contents, root, tar, protect=None, onProgress=None):
4481         os = _os_merge
4482         encoding = _encodings['merge']
4483
4484         try:
4485                 for x in contents:
4486                         _unicode_encode(x,
4487                                 encoding=_encodings['merge'],
4488                                 errors='strict')
4489         except UnicodeEncodeError:
4490                 # The package appears to have been merged with a
4491                 # different value of sys.getfilesystemencoding(),
4492                 # so fall back to utf_8 if appropriate.
4493                 try:
4494                         for x in contents:
4495                                 _unicode_encode(x,
4496                                         encoding=_encodings['fs'],
4497                                         errors='strict')
4498                 except UnicodeEncodeError:
4499                         pass
4500                 else:
4501                         os = portage.os
4502                         encoding = _encodings['fs']
4503
4504         root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
4505         id_strings = {}
4506         maxval = len(contents)
4507         curval = 0
4508         if onProgress:
4509                 onProgress(maxval, 0)
4510         paths = list(contents)
4511         paths.sort()
4512         for path in paths:
4513                 curval += 1
4514                 try:
4515                         lst = os.lstat(path)
4516                 except OSError as e:
4517                         if e.errno != errno.ENOENT:
4518                                 raise
4519                         del e
4520                         if onProgress:
4521                                 onProgress(maxval, curval)
4522                         continue
4523                 contents_type = contents[path][0]
4524                 if path.startswith(root):
4525                         arcname = path[len(root):]
4526                 else:
4527                         raise ValueError("invalid root argument: '%s'" % root)
4528                 live_path = path
4529                 if 'dir' == contents_type and \
4530                         not stat.S_ISDIR(lst.st_mode) and \
4531                         os.path.isdir(live_path):
4532                         # Even though this was a directory in the original ${D}, it exists
4533                         # as a symlink to a directory in the live filesystem.  It must be
4534                         # recorded as a real directory in the tar file to ensure that tar
4535                         # can properly extract it's children.
4536                         live_path = os.path.realpath(live_path)
4537                 tarinfo = tar.gettarinfo(live_path, arcname)
4538
4539                 if stat.S_ISREG(lst.st_mode):
4540                         if protect and protect(path):
4541                                 # Create an empty file as a place holder in order to avoid
4542                                 # potential collision-protect issues.
4543                                 f = tempfile.TemporaryFile()
4544                                 f.write(_unicode_encode(
4545                                         "# empty file because --include-config=n " + \
4546                                         "when `quickpkg` was used\n"))
4547                                 f.flush()
4548                                 f.seek(0)
4549                                 tarinfo.size = os.fstat(f.fileno()).st_size
4550                                 tar.addfile(tarinfo, f)
4551                                 f.close()
4552                         else:
4553                                 f = open(_unicode_encode(path,
4554                                         encoding=encoding,
4555                                         errors='strict'), 'rb')
4556                                 try:
4557                                         tar.addfile(tarinfo, f)
4558                                 finally:
4559                                         f.close()
4560                 else:
4561                         tar.addfile(tarinfo)
4562                 if onProgress:
4563                         onProgress(maxval, curval)