Add a _pkg_str class to cache catpkgsplit results
[portage.git] / pym / portage / dbapi / vartree.py
1 # Copyright 1998-2012 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = [
5         "vardbapi", "vartree", "dblink"] + \
6         ["write_contents", "tar_contents"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10         'portage.checksum:_perform_md5_merge@perform_md5',
11         'portage.data:portage_gid,portage_uid,secpass',
12         'portage.dbapi.dep_expand:dep_expand',
13         'portage.dbapi._MergeProcess:MergeProcess',
14         'portage.dep:dep_getkey,isjustname,match_from_list,' + \
15                 'use_reduce,_slot_re',
16         'portage.elog:collect_ebuild_messages,collect_messages,' + \
17                 'elog_process,_merge_logentries',
18         'portage.locks:lockdir,unlockdir,lockfile,unlockfile',
19         'portage.output:bold,colorize',
20         'portage.package.ebuild.doebuild:doebuild_environment,' + \
21                 '_merge_unicode_error', '_spawn_phase',
22         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
23         'portage.update:fixdbentries',
24         'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
25                 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
26                 'grabdict,normalize_path,new_protect_filename',
27         'portage.util.digraph:digraph',
28         'portage.util.env_update:env_update',
29         'portage.util.listdir:dircache,listdir',
30         'portage.util.movefile:movefile',
31         'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
32         'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
33         'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \
34                 '_pkgsplit@pkgsplit,_pkg_str',
35         'subprocess',
36         'tarfile',
37 )
38
39 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
40         PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
41 from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
42 from portage.dbapi import dbapi
43 from portage.exception import CommandNotFound, \
44         InvalidData, InvalidLocation, InvalidPackageName, \
45         FileNotFound, PermissionDenied, UnsupportedAPIException
46 from portage.localization import _
47
48 from portage import abssymlink, _movefile, bsd_chflags
49
50 # This is a special version of the os module, wrapped for unicode support.
51 from portage import os
52 from portage import shutil
53 from portage import _encodings
54 from portage import _os_merge
55 from portage import _selinux_merge
56 from portage import _unicode_decode
57 from portage import _unicode_encode
58
59 from _emerge.EbuildBuildDir import EbuildBuildDir
60 from _emerge.EbuildPhase import EbuildPhase
61 from _emerge.emergelog import emergelog
62 from _emerge.PollScheduler import PollScheduler
63 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
64
65 import errno
66 import fnmatch
67 import gc
68 import grp
69 import io
70 from itertools import chain
71 import logging
72 import os as _os
73 import pwd
74 import re
75 import stat
76 import sys
77 import tempfile
78 import textwrap
79 import time
80 import warnings
81
82 try:
83         import cPickle as pickle
84 except ImportError:
85         import pickle
86
87 if sys.hexversion >= 0x3000000:
88         basestring = str
89         long = int
90         _unicode = str
91 else:
92         _unicode = unicode
93
94 class vardbapi(dbapi):
95
96         _excluded_dirs = ["CVS", "lost+found"]
97         _excluded_dirs = [re.escape(x) for x in _excluded_dirs]
98         _excluded_dirs = re.compile(r'^(\..*|-MERGING-.*|' + \
99                 "|".join(_excluded_dirs) + r')$')
100
101         _aux_cache_version        = "1"
102         _owners_cache_version     = "1"
103
104         # Number of uncached packages to trigger cache update, since
105         # it's wasteful to update it for every vdb change.
106         _aux_cache_threshold = 5
107
108         _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
109         _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
110
111         def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
112                 """
113                 The categories parameter is unused since the dbapi class
114                 now has a categories property that is generated from the
115                 available packages.
116                 """
117
118                 # Used by emerge to check whether any packages
119                 # have been added or removed.
120                 self._pkgs_changed = False
121
122                 # The _aux_cache_threshold doesn't work as designed
123                 # if the cache is flushed from a subprocess, so we
124                 # use this to avoid waste vdb cache updates.
125                 self._flush_cache_enabled = True
126
127                 #cache for category directory mtimes
128                 self.mtdircache = {}
129
130                 #cache for dependency checks
131                 self.matchcache = {}
132
133                 #cache for cp_list results
134                 self.cpcache = {}
135
136                 self.blockers = None
137                 if settings is None:
138                         settings = portage.settings
139                 self.settings = settings
140
141                 if _unused_param is not None and _unused_param != settings['ROOT']:
142                         warnings.warn("The first parameter of the "
143                                 "portage.dbapi.vartree.vardbapi"
144                                 " constructor is now unused. Use "
145                                 "settings['ROOT'] instead.",
146                                 DeprecationWarning, stacklevel=2)
147
148                 self._eroot = settings['EROOT']
149                 self._dbroot = self._eroot + VDB_PATH
150                 self._lock = None
151                 self._lock_count = 0
152
153                 self._conf_mem_file = self._eroot + CONFIG_MEMORY_FILE
154                 self._fs_lock_obj = None
155                 self._fs_lock_count = 0
156
157                 if vartree is None:
158                         vartree = portage.db[settings['EROOT']]['vartree']
159                 self.vartree = vartree
160                 self._aux_cache_keys = set(
161                         ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
162                         "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
163                         "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
164                         "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
165                         ])
166                 self._aux_cache_obj = None
167                 self._aux_cache_filename = os.path.join(self._eroot,
168                         CACHE_PATH, "vdb_metadata.pickle")
169                 self._counter_path = os.path.join(self._eroot,
170                         CACHE_PATH, "counter")
171
172                 self._plib_registry = None
173                 if _ENABLE_PRESERVE_LIBS:
174                         self._plib_registry = PreservedLibsRegistry(settings["ROOT"],
175                                 os.path.join(self._eroot, PRIVATE_PATH,
176                                 "preserved_libs_registry"))
177
178                 self._linkmap = None
179                 if _ENABLE_DYN_LINK_MAP:
180                         self._linkmap = LinkageMap(self)
181                 self._owners = self._owners_db(self)
182
183                 self._cached_counter = None
184
185         @property
186         def root(self):
187                 warnings.warn("The root attribute of "
188                         "portage.dbapi.vartree.vardbapi"
189                         " is deprecated. Use "
190                         "settings['ROOT'] instead.",
191                         DeprecationWarning, stacklevel=3)
192                 return self.settings['ROOT']
193
194         def getpath(self, mykey, filename=None):
195                 # This is an optimized hotspot, so don't use unicode-wrapped
196                 # os module and don't use os.path.join().
197                 rValue = self._eroot + VDB_PATH + _os.sep + mykey
198                 if filename is not None:
199                         # If filename is always relative, we can do just
200                         # rValue += _os.sep + filename
201                         rValue = _os.path.join(rValue, filename)
202                 return rValue
203
204         def lock(self):
205                 """
206                 Acquire a reentrant lock, blocking, for cooperation with concurrent
207                 processes. State is inherited by subprocesses, allowing subprocesses
208                 to reenter a lock that was acquired by a parent process. However,
209                 a lock can be released only by the same process that acquired it.
210                 """
211                 if self._lock_count:
212                         self._lock_count += 1
213                 else:
214                         if self._lock is not None:
215                                 raise AssertionError("already locked")
216                         # At least the parent needs to exist for the lock file.
217                         ensure_dirs(self._dbroot)
218                         self._lock = lockdir(self._dbroot)
219                         self._lock_count += 1
220
221         def unlock(self):
222                 """
223                 Release a lock, decrementing the recursion level. Each unlock() call
224                 must be matched with a prior lock() call, or else an AssertionError
225                 will be raised if unlock() is called while not locked.
226                 """
227                 if self._lock_count > 1:
228                         self._lock_count -= 1
229                 else:
230                         if self._lock is None:
231                                 raise AssertionError("not locked")
232                         self._lock_count = 0
233                         unlockdir(self._lock)
234                         self._lock = None
235
236         def _fs_lock(self):
237                 """
238                 Acquire a reentrant lock, blocking, for cooperation with concurrent
239                 processes.
240                 """
241                 if self._fs_lock_count < 1:
242                         if self._fs_lock_obj is not None:
243                                 raise AssertionError("already locked")
244                         try:
245                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
246                         except InvalidLocation:
247                                 self.settings._init_dirs()
248                                 self._fs_lock_obj = lockfile(self._conf_mem_file)
249                 self._fs_lock_count += 1
250
251         def _fs_unlock(self):
252                 """
253                 Release a lock, decrementing the recursion level.
254                 """
255                 if self._fs_lock_count <= 1:
256                         if self._fs_lock_obj is None:
257                                 raise AssertionError("not locked")
258                         unlockfile(self._fs_lock_obj)
259                         self._fs_lock_obj = None
260                 self._fs_lock_count -= 1
261
262         def _bump_mtime(self, cpv):
263                 """
264                 This is called before an after any modifications, so that consumers
265                 can use directory mtimes to validate caches. See bug #290428.
266                 """
267                 base = self._eroot + VDB_PATH
268                 cat = catsplit(cpv)[0]
269                 catdir = base + _os.sep + cat
270                 t = time.time()
271                 t = (t, t)
272                 try:
273                         for x in (catdir, base):
274                                 os.utime(x, t)
275                 except OSError:
276                         ensure_dirs(catdir)
277
278         def cpv_exists(self, mykey, myrepo=None):
279                 "Tells us whether an actual ebuild exists on disk (no masking)"
280                 return os.path.exists(self.getpath(mykey))
281
282         def cpv_counter(self, mycpv):
283                 "This method will grab the COUNTER. Returns a counter value."
284                 try:
285                         return long(self.aux_get(mycpv, ["COUNTER"])[0])
286                 except (KeyError, ValueError):
287                         pass
288                 writemsg_level(_("portage: COUNTER for %s was corrupted; " \
289                         "resetting to value of 0\n") % (mycpv,),
290                         level=logging.ERROR, noiselevel=-1)
291                 return 0
292
293         def cpv_inject(self, mycpv):
294                 "injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
295                 ensure_dirs(self.getpath(mycpv))
296                 counter = self.counter_tick(mycpv=mycpv)
297                 # write local package counter so that emerge clean does the right thing
298                 write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
299
300         def isInjected(self, mycpv):
301                 if self.cpv_exists(mycpv):
302                         if os.path.exists(self.getpath(mycpv, filename="INJECTED")):
303                                 return True
304                         if not os.path.exists(self.getpath(mycpv, filename="CONTENTS")):
305                                 return True
306                 return False
307
308         def move_ent(self, mylist, repo_match=None):
309                 origcp = mylist[1]
310                 newcp = mylist[2]
311
312                 # sanity check
313                 for atom in (origcp, newcp):
314                         if not isjustname(atom):
315                                 raise InvalidPackageName(str(atom))
316                 origmatches = self.match(origcp, use_cache=0)
317                 moves = 0
318                 if not origmatches:
319                         return moves
320                 for mycpv in origmatches:
321                         mycpv_cp = cpv_getkey(mycpv)
322                         if mycpv_cp != origcp:
323                                 # Ignore PROVIDE virtual match.
324                                 continue
325                         if repo_match is not None \
326                                 and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
327                                 continue
328                         mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
329                         mynewcat = catsplit(newcp)[0]
330                         origpath = self.getpath(mycpv)
331                         if not os.path.exists(origpath):
332                                 continue
333                         moves += 1
334                         if not os.path.exists(self.getpath(mynewcat)):
335                                 #create the directory
336                                 ensure_dirs(self.getpath(mynewcat))
337                         newpath = self.getpath(mynewcpv)
338                         if os.path.exists(newpath):
339                                 #dest already exists; keep this puppy where it is.
340                                 continue
341                         _movefile(origpath, newpath, mysettings=self.settings)
342                         self._clear_pkg_cache(self._dblink(mycpv))
343                         self._clear_pkg_cache(self._dblink(mynewcpv))
344
345                         # We need to rename the ebuild now.
346                         old_pf = catsplit(mycpv)[1]
347                         new_pf = catsplit(mynewcpv)[1]
348                         if new_pf != old_pf:
349                                 try:
350                                         os.rename(os.path.join(newpath, old_pf + ".ebuild"),
351                                                 os.path.join(newpath, new_pf + ".ebuild"))
352                                 except EnvironmentError as e:
353                                         if e.errno != errno.ENOENT:
354                                                 raise
355                                         del e
356                         write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
357                         write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
358                         fixdbentries([mylist], newpath)
359                 return moves
360
361         def cp_list(self, mycp, use_cache=1):
362                 mysplit=catsplit(mycp)
363                 if mysplit[0] == '*':
364                         mysplit[0] = mysplit[0][1:]
365                 try:
366                         mystat = os.stat(self.getpath(mysplit[0])).st_mtime
367                 except OSError:
368                         mystat = 0
369                 if use_cache and mycp in self.cpcache:
370                         cpc = self.cpcache[mycp]
371                         if cpc[0] == mystat:
372                                 return cpc[1][:]
373                 cat_dir = self.getpath(mysplit[0])
374                 try:
375                         dir_list = os.listdir(cat_dir)
376                 except EnvironmentError as e:
377                         if e.errno == PermissionDenied.errno:
378                                 raise PermissionDenied(cat_dir)
379                         del e
380                         dir_list = []
381
382                 returnme = []
383                 for x in dir_list:
384                         if self._excluded_dirs.match(x) is not None:
385                                 continue
386                         ps = pkgsplit(x)
387                         if not ps:
388                                 self.invalidentry(os.path.join(self.getpath(mysplit[0]), x))
389                                 continue
390                         if len(mysplit) > 1:
391                                 if ps[0] == mysplit[1]:
392                                         returnme.append(_pkg_str(mysplit[0]+"/"+x))
393                 self._cpv_sort_ascending(returnme)
394                 if use_cache:
395                         self.cpcache[mycp] = [mystat, returnme[:]]
396                 elif mycp in self.cpcache:
397                         del self.cpcache[mycp]
398                 return returnme
399
400         def cpv_all(self, use_cache=1):
401                 """
402                 Set use_cache=0 to bypass the portage.cachedir() cache in cases
403                 when the accuracy of mtime staleness checks should not be trusted
404                 (generally this is only necessary in critical sections that
405                 involve merge or unmerge of packages).
406                 """
407                 returnme = []
408                 basepath = os.path.join(self._eroot, VDB_PATH) + os.path.sep
409
410                 if use_cache:
411                         from portage import listdir
412                 else:
413                         def listdir(p, **kwargs):
414                                 try:
415                                         return [x for x in os.listdir(p) \
416                                                 if os.path.isdir(os.path.join(p, x))]
417                                 except EnvironmentError as e:
418                                         if e.errno == PermissionDenied.errno:
419                                                 raise PermissionDenied(p)
420                                         del e
421                                         return []
422
423                 for x in listdir(basepath, EmptyOnError=1, ignorecvs=1, dirsonly=1):
424                         if self._excluded_dirs.match(x) is not None:
425                                 continue
426                         if not self._category_re.match(x):
427                                 continue
428                         for y in listdir(basepath + x, EmptyOnError=1, dirsonly=1):
429                                 if self._excluded_dirs.match(y) is not None:
430                                         continue
431                                 subpath = x + "/" + y
432                                 # -MERGING- should never be a cpv, nor should files.
433                                 try:
434                                         if catpkgsplit(subpath) is None:
435                                                 self.invalidentry(self.getpath(subpath))
436                                                 continue
437                                 except InvalidData:
438                                         self.invalidentry(self.getpath(subpath))
439                                         continue
440                                 returnme.append(subpath)
441
442                 return returnme
443
444         def cp_all(self, use_cache=1):
445                 mylist = self.cpv_all(use_cache=use_cache)
446                 d={}
447                 for y in mylist:
448                         if y[0] == '*':
449                                 y = y[1:]
450                         try:
451                                 mysplit = catpkgsplit(y)
452                         except InvalidData:
453                                 self.invalidentry(self.getpath(y))
454                                 continue
455                         if not mysplit:
456                                 self.invalidentry(self.getpath(y))
457                                 continue
458                         d[mysplit[0]+"/"+mysplit[1]] = None
459                 return list(d)
460
461         def checkblockers(self, origdep):
462                 pass
463
464         def _clear_cache(self):
465                 self.mtdircache.clear()
466                 self.matchcache.clear()
467                 self.cpcache.clear()
468                 self._aux_cache_obj = None
469
470         def _add(self, pkg_dblink):
471                 self._pkgs_changed = True
472                 self._clear_pkg_cache(pkg_dblink)
473
474         def _remove(self, pkg_dblink):
475                 self._pkgs_changed = True
476                 self._clear_pkg_cache(pkg_dblink)
477
478         def _clear_pkg_cache(self, pkg_dblink):
479                 # Due to 1 second mtime granularity in <python-2.5, mtime checks
480                 # are not always sufficient to invalidate vardbapi caches. Therefore,
481                 # the caches need to be actively invalidated here.
482                 self.mtdircache.pop(pkg_dblink.cat, None)
483                 self.matchcache.pop(pkg_dblink.cat, None)
484                 self.cpcache.pop(pkg_dblink.mysplit[0], None)
485                 dircache.pop(pkg_dblink.dbcatdir, None)
486
487         def match(self, origdep, use_cache=1):
488                 "caching match function"
489                 mydep = dep_expand(
490                         origdep, mydb=self, use_cache=use_cache, settings=self.settings)
491                 cache_key = (mydep, mydep.unevaluated_atom)
492                 mykey = dep_getkey(mydep)
493                 mycat = catsplit(mykey)[0]
494                 if not use_cache:
495                         if mycat in self.matchcache:
496                                 del self.mtdircache[mycat]
497                                 del self.matchcache[mycat]
498                         return list(self._iter_match(mydep,
499                                 self.cp_list(mydep.cp, use_cache=use_cache)))
500                 try:
501                         curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
502                 except (IOError, OSError):
503                         curmtime=0
504
505                 if mycat not in self.matchcache or \
506                         self.mtdircache[mycat] != curmtime:
507                         # clear cache entry
508                         self.mtdircache[mycat] = curmtime
509                         self.matchcache[mycat] = {}
510                 if mydep not in self.matchcache[mycat]:
511                         mymatch = list(self._iter_match(mydep,
512                                 self.cp_list(mydep.cp, use_cache=use_cache)))
513                         self.matchcache[mycat][cache_key] = mymatch
514                 return self.matchcache[mycat][cache_key][:]
515
516         def findname(self, mycpv, myrepo=None):
517                 return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
518
519         def flush_cache(self):
520                 """If the current user has permission and the internal aux_get cache has
521                 been updated, save it to disk and mark it unmodified.  This is called
522                 by emerge after it has loaded the full vdb for use in dependency
523                 calculations.  Currently, the cache is only written if the user has
524                 superuser privileges (since that's required to obtain a lock), but all
525                 users have read access and benefit from faster metadata lookups (as
526                 long as at least part of the cache is still valid)."""
527                 if self._flush_cache_enabled and \
528                         self._aux_cache is not None and \
529                         len(self._aux_cache["modified"]) >= self._aux_cache_threshold and \
530                         secpass >= 2:
531                         self._owners.populate() # index any unindexed contents
532                         valid_nodes = set(self.cpv_all())
533                         for cpv in list(self._aux_cache["packages"]):
534                                 if cpv not in valid_nodes:
535                                         del self._aux_cache["packages"][cpv]
536                         del self._aux_cache["modified"]
537                         try:
538                                 f = atomic_ofstream(self._aux_cache_filename, 'wb')
539                                 pickle.dump(self._aux_cache, f, protocol=2)
540                                 f.close()
541                                 apply_secpass_permissions(
542                                         self._aux_cache_filename, gid=portage_gid, mode=0o644)
543                         except (IOError, OSError) as e:
544                                 pass
545                         self._aux_cache["modified"] = set()
546
547         @property
548         def _aux_cache(self):
549                 if self._aux_cache_obj is None:
550                         self._aux_cache_init()
551                 return self._aux_cache_obj
552
553         def _aux_cache_init(self):
554                 aux_cache = None
555                 open_kwargs = {}
556                 if sys.hexversion >= 0x3000000:
557                         # Buffered io triggers extreme performance issues in
558                         # Unpickler.load() (problem observed with python-3.0.1).
559                         # Unfortunately, performance is still poor relative to
560                         # python-2.x, but buffering makes it much worse.
561                         open_kwargs["buffering"] = 0
562                 try:
563                         f = open(_unicode_encode(self._aux_cache_filename,
564                                 encoding=_encodings['fs'], errors='strict'),
565                                 mode='rb', **open_kwargs)
566                         mypickle = pickle.Unpickler(f)
567                         try:
568                                 mypickle.find_global = None
569                         except AttributeError:
570                                 # TODO: If py3k, override Unpickler.find_class().
571                                 pass
572                         aux_cache = mypickle.load()
573                         f.close()
574                         del f
575                 except (AttributeError, EOFError, EnvironmentError, ValueError, pickle.UnpicklingError) as e:
576                         if isinstance(e, EnvironmentError) and \
577                                 getattr(e, 'errno', None) in (errno.ENOENT, errno.EACCES):
578                                 pass
579                         else:
580                                 writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
581                                         (self._aux_cache_filename, e), noiselevel=-1)
582                         del e
583
584                 if not aux_cache or \
585                         not isinstance(aux_cache, dict) or \
586                         aux_cache.get("version") != self._aux_cache_version or \
587                         not aux_cache.get("packages"):
588                         aux_cache = {"version": self._aux_cache_version}
589                         aux_cache["packages"] = {}
590
591                 owners = aux_cache.get("owners")
592                 if owners is not None:
593                         if not isinstance(owners, dict):
594                                 owners = None
595                         elif "version" not in owners:
596                                 owners = None
597                         elif owners["version"] != self._owners_cache_version:
598                                 owners = None
599                         elif "base_names" not in owners:
600                                 owners = None
601                         elif not isinstance(owners["base_names"], dict):
602                                 owners = None
603
604                 if owners is None:
605                         owners = {
606                                 "base_names" : {},
607                                 "version"    : self._owners_cache_version
608                         }
609                         aux_cache["owners"] = owners
610
611                 aux_cache["modified"] = set()
612                 self._aux_cache_obj = aux_cache
613
614         def aux_get(self, mycpv, wants, myrepo = None):
615                 """This automatically caches selected keys that are frequently needed
616                 by emerge for dependency calculations.  The cached metadata is
617                 considered valid if the mtime of the package directory has not changed
618                 since the data was cached.  The cache is stored in a pickled dict
619                 object with the following format:
620
621                 {version:"1", "packages":{cpv1:(mtime,{k1,v1, k2,v2, ...}), cpv2...}}
622
623                 If an error occurs while loading the cache pickle or the version is
624                 unrecognized, the cache will simple be recreated from scratch (it is
625                 completely disposable).
626                 """
627                 cache_these_wants = self._aux_cache_keys.intersection(wants)
628                 for x in wants:
629                         if self._aux_cache_keys_re.match(x) is not None:
630                                 cache_these_wants.add(x)
631
632                 if not cache_these_wants:
633                         mydata = self._aux_get(mycpv, wants)
634                         return [mydata[x] for x in wants]
635
636                 cache_these = set(self._aux_cache_keys)
637                 cache_these.update(cache_these_wants)
638
639                 mydir = self.getpath(mycpv)
640                 mydir_stat = None
641                 try:
642                         mydir_stat = os.stat(mydir)
643                 except OSError as e:
644                         if e.errno != errno.ENOENT:
645                                 raise
646                         raise KeyError(mycpv)
647                 mydir_mtime = mydir_stat[stat.ST_MTIME]
648                 pkg_data = self._aux_cache["packages"].get(mycpv)
649                 pull_me = cache_these.union(wants)
650                 mydata = {"_mtime_" : mydir_mtime}
651                 cache_valid = False
652                 cache_incomplete = False
653                 cache_mtime = None
654                 metadata = None
655                 if pkg_data is not None:
656                         if not isinstance(pkg_data, tuple) or len(pkg_data) != 2:
657                                 pkg_data = None
658                         else:
659                                 cache_mtime, metadata = pkg_data
660                                 if not isinstance(cache_mtime, (long, int)) or \
661                                         not isinstance(metadata, dict):
662                                         pkg_data = None
663
664                 if pkg_data:
665                         cache_mtime, metadata = pkg_data
666                         cache_valid = cache_mtime == mydir_mtime
667                 if cache_valid:
668                         # Migrate old metadata to unicode.
669                         for k, v in metadata.items():
670                                 metadata[k] = _unicode_decode(v,
671                                         encoding=_encodings['repo.content'], errors='replace')
672
673                         mydata.update(metadata)
674                         pull_me.difference_update(mydata)
675
676                 if pull_me:
677                         # pull any needed data and cache it
678                         aux_keys = list(pull_me)
679                         mydata.update(self._aux_get(mycpv, aux_keys, st=mydir_stat))
680                         if not cache_valid or cache_these.difference(metadata):
681                                 cache_data = {}
682                                 if cache_valid and metadata:
683                                         cache_data.update(metadata)
684                                 for aux_key in cache_these:
685                                         cache_data[aux_key] = mydata[aux_key]
686                                 self._aux_cache["packages"][_unicode(mycpv)] = \
687                                         (mydir_mtime, cache_data)
688                                 self._aux_cache["modified"].add(mycpv)
689
690                 if _slot_re.match(mydata['SLOT']) is None:
691                         # Empty or invalid slot triggers InvalidAtom exceptions when
692                         # generating slot atoms for packages, so translate it to '0' here.
693                         mydata['SLOT'] = _unicode_decode('0')
694
695                 return [mydata[x] for x in wants]
696
697         def _aux_get(self, mycpv, wants, st=None):
698                 mydir = self.getpath(mycpv)
699                 if st is None:
700                         try:
701                                 st = os.stat(mydir)
702                         except OSError as e:
703                                 if e.errno == errno.ENOENT:
704                                         raise KeyError(mycpv)
705                                 elif e.errno == PermissionDenied.errno:
706                                         raise PermissionDenied(mydir)
707                                 else:
708                                         raise
709                 if not stat.S_ISDIR(st.st_mode):
710                         raise KeyError(mycpv)
711                 results = {}
712                 env_keys = []
713                 for x in wants:
714                         if x == "_mtime_":
715                                 results[x] = st[stat.ST_MTIME]
716                                 continue
717                         try:
718                                 myf = io.open(
719                                         _unicode_encode(os.path.join(mydir, x),
720                                         encoding=_encodings['fs'], errors='strict'),
721                                         mode='r', encoding=_encodings['repo.content'],
722                                         errors='replace')
723                                 try:
724                                         myd = myf.read()
725                                 finally:
726                                         myf.close()
727                         except IOError:
728                                 if x not in self._aux_cache_keys and \
729                                         self._aux_cache_keys_re.match(x) is None:
730                                         env_keys.append(x)
731                                         continue
732                                 myd = _unicode_decode('')
733
734                         # Preserve \n for metadata that is known to
735                         # contain multiple lines.
736                         if self._aux_multi_line_re.match(x) is None:
737                                 myd = " ".join(myd.split())
738
739                         results[x] = myd
740
741                 if env_keys:
742                         env_results = self._aux_env_search(mycpv, env_keys)
743                         for k in env_keys:
744                                 v = env_results.get(k)
745                                 if v is None:
746                                         v = _unicode_decode('')
747                                 if self._aux_multi_line_re.match(k) is None:
748                                         v = " ".join(v.split())
749                                 results[k] = v
750
751                 if results.get("EAPI") == "":
752                         results[_unicode_decode("EAPI")] = _unicode_decode('0')
753
754                 return results
755
756         def _aux_env_search(self, cpv, variables):
757                 """
758                 Search environment.bz2 for the specified variables. Returns
759                 a dict mapping variables to values, and any variables not
760                 found in the environment will not be included in the dict.
761                 This is useful for querying variables like ${SRC_URI} and
762                 ${A}, which are not saved in separate files but are available
763                 in environment.bz2 (see bug #395463).
764                 """
765                 env_file = self.getpath(cpv, filename="environment.bz2")
766                 if not os.path.isfile(env_file):
767                         return {}
768                 bunzip2_cmd = portage.util.shlex_split(
769                         self.settings.get("PORTAGE_BUNZIP2_COMMAND", ""))
770                 if not bunzip2_cmd:
771                         bunzip2_cmd = portage.util.shlex_split(
772                                 self.settings["PORTAGE_BZIP2_COMMAND"])
773                         bunzip2_cmd.append("-d")
774                 args = bunzip2_cmd + ["-c", env_file]
775                 try:
776                         proc = subprocess.Popen(args, stdout=subprocess.PIPE)
777                 except EnvironmentError as e:
778                         if e.errno != errno.ENOENT:
779                                 raise
780                         raise portage.exception.CommandNotFound(args[0])
781
782                 # Parts of the following code are borrowed from
783                 # filter-bash-environment.py (keep them in sync).
784                 var_assign_re = re.compile(r'(^|^declare\s+-\S+\s+|^declare\s+|^export\s+)([^=\s]+)=("|\')?(.*)$')
785                 close_quote_re = re.compile(r'(\\"|"|\')\s*$')
786                 def have_end_quote(quote, line):
787                         close_quote_match = close_quote_re.search(line)
788                         return close_quote_match is not None and \
789                                 close_quote_match.group(1) == quote
790
791                 variables = frozenset(variables)
792                 results = {}
793                 for line in proc.stdout:
794                         line = _unicode_decode(line,
795                                 encoding=_encodings['content'], errors='replace')
796                         var_assign_match = var_assign_re.match(line)
797                         if var_assign_match is not None:
798                                 key = var_assign_match.group(2)
799                                 quote = var_assign_match.group(3)
800                                 if quote is not None:
801                                         if have_end_quote(quote,
802                                                 line[var_assign_match.end(2)+2:]):
803                                                 value = var_assign_match.group(4)
804                                         else:
805                                                 value = [var_assign_match.group(4)]
806                                                 for line in proc.stdout:
807                                                         line = _unicode_decode(line,
808                                                                 encoding=_encodings['content'],
809                                                                 errors='replace')
810                                                         value.append(line)
811                                                         if have_end_quote(quote, line):
812                                                                 break
813                                                 value = ''.join(value)
814                                         # remove trailing quote and whitespace
815                                         value = value.rstrip()[:-1]
816                                 else:
817                                         value = var_assign_match.group(4).rstrip()
818
819                                 if key in variables:
820                                         results[key] = value
821
822                 proc.wait()
823                 proc.stdout.close()
824                 return results
825
826         def aux_update(self, cpv, values):
827                 mylink = self._dblink(cpv)
828                 if not mylink.exists():
829                         raise KeyError(cpv)
830                 self._bump_mtime(cpv)
831                 self._clear_pkg_cache(mylink)
832                 for k, v in values.items():
833                         if v:
834                                 mylink.setfile(k, v)
835                         else:
836                                 try:
837                                         os.unlink(os.path.join(self.getpath(cpv), k))
838                                 except EnvironmentError:
839                                         pass
840                 self._bump_mtime(cpv)
841
842         def counter_tick(self, myroot=None, mycpv=None):
843                 """
844                 @param myroot: ignored, self._eroot is used instead
845                 """
846                 return self.counter_tick_core(incrementing=1, mycpv=mycpv)
847
848         def get_counter_tick_core(self, myroot=None, mycpv=None):
849                 """
850                 Use this method to retrieve the counter instead
851                 of having to trust the value of a global counter
852                 file that can lead to invalid COUNTER
853                 generation. When cache is valid, the package COUNTER
854                 files are not read and we rely on the timestamp of
855                 the package directory to validate cache. The stat
856                 calls should only take a short time, so performance
857                 is sufficient without having to rely on a potentially
858                 corrupt global counter file.
859
860                 The global counter file located at
861                 $CACHE_PATH/counter serves to record the
862                 counter of the last installed package and
863                 it also corresponds to the total number of
864                 installation actions that have occurred in
865                 the history of this package database.
866
867                 @param myroot: ignored, self._eroot is used instead
868                 """
869                 del myroot
870                 counter = -1
871                 try:
872                         cfile = io.open(
873                                 _unicode_encode(self._counter_path,
874                                 encoding=_encodings['fs'], errors='strict'),
875                                 mode='r', encoding=_encodings['repo.content'],
876                                 errors='replace')
877                 except EnvironmentError as e:
878                         # Silently allow ENOENT since files under
879                         # /var/cache/ are allowed to disappear.
880                         if e.errno != errno.ENOENT:
881                                 writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
882                                         self._counter_path, noiselevel=-1)
883                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
884                         del e
885                 else:
886                         try:
887                                 try:
888                                         counter = long(cfile.readline().strip())
889                                 finally:
890                                         cfile.close()
891                         except (OverflowError, ValueError) as e:
892                                 writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
893                                         self._counter_path, noiselevel=-1)
894                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
895                                 del e
896
897                 if self._cached_counter == counter:
898                         max_counter = counter
899                 else:
900                         # We must ensure that we return a counter
901                         # value that is at least as large as the
902                         # highest one from the installed packages,
903                         # since having a corrupt value that is too low
904                         # can trigger incorrect AUTOCLEAN behavior due
905                         # to newly installed packages having lower
906                         # COUNTERs than the previous version in the
907                         # same slot.
908                         max_counter = counter
909                         for cpv in self.cpv_all():
910                                 try:
911                                         pkg_counter = int(self.aux_get(cpv, ["COUNTER"])[0])
912                                 except (KeyError, OverflowError, ValueError):
913                                         continue
914                                 if pkg_counter > max_counter:
915                                         max_counter = pkg_counter
916
917                 return max_counter + 1
918
919         def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
920                 """
921                 This method will grab the next COUNTER value and record it back
922                 to the global file. Note that every package install must have
923                 a unique counter, since a slotmove update can move two packages
924                 into the same SLOT and in that case it's important that both
925                 packages have different COUNTER metadata.
926
927                 @param myroot: ignored, self._eroot is used instead
928                 @param mycpv: ignored
929                 @rtype: int
930                 @return: new counter value
931                 """
932                 myroot = None
933                 mycpv = None
934                 self.lock()
935                 try:
936                         counter = self.get_counter_tick_core() - 1
937                         if incrementing:
938                                 #increment counter
939                                 counter += 1
940                                 # update new global counter file
941                                 try:
942                                         write_atomic(self._counter_path, str(counter))
943                                 except InvalidLocation:
944                                         self.settings._init_dirs()
945                                         write_atomic(self._counter_path, str(counter))
946                         self._cached_counter = counter
947
948                         # Since we hold a lock, this is a good opportunity
949                         # to flush the cache. Note that this will only
950                         # flush the cache periodically in the main process
951                         # when _aux_cache_threshold is exceeded.
952                         self.flush_cache()
953                 finally:
954                         self.unlock()
955
956                 return counter
957
958         def _dblink(self, cpv):
959                 category, pf = catsplit(cpv)
960                 return dblink(category, pf, settings=self.settings,
961                         vartree=self.vartree, treetype="vartree")
962
963         def removeFromContents(self, pkg, paths, relative_paths=True):
964                 """
965                 @param pkg: cpv for an installed package
966                 @type pkg: string
967                 @param paths: paths of files to remove from contents
968                 @type paths: iterable
969                 """
970                 if not hasattr(pkg, "getcontents"):
971                         pkg = self._dblink(pkg)
972                 root = self.settings['ROOT']
973                 root_len = len(root) - 1
974                 new_contents = pkg.getcontents().copy()
975                 removed = 0
976
977                 for filename in paths:
978                         filename = _unicode_decode(filename,
979                                 encoding=_encodings['content'], errors='strict')
980                         filename = normalize_path(filename)
981                         if relative_paths:
982                                 relative_filename = filename
983                         else:
984                                 relative_filename = filename[root_len:]
985                         contents_key = pkg._match_contents(relative_filename)
986                         if contents_key:
987                                 del new_contents[contents_key]
988                                 removed += 1
989
990                 if removed:
991                         self._bump_mtime(pkg.mycpv)
992                         f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
993                         write_contents(new_contents, root, f)
994                         f.close()
995                         self._bump_mtime(pkg.mycpv)
996                         pkg._clear_contents_cache()
997
998         class _owners_cache(object):
999                 """
1000                 This class maintains an hash table that serves to index package
1001                 contents by mapping the basename of file to a list of possible
1002                 packages that own it. This is used to optimize owner lookups
1003                 by narrowing the search down to a smaller number of packages.
1004                 """
1005                 try:
1006                         from hashlib import md5 as _new_hash
1007                 except ImportError:
1008                         from md5 import new as _new_hash
1009
1010                 _hash_bits = 16
1011                 _hex_chars = int(_hash_bits / 4)
1012
1013                 def __init__(self, vardb):
1014                         self._vardb = vardb
1015
1016                 def add(self, cpv):
1017                         eroot_len = len(self._vardb._eroot)
1018                         contents = self._vardb._dblink(cpv).getcontents()
1019                         pkg_hash = self._hash_pkg(cpv)
1020                         if not contents:
1021                                 # Empty path is a code used to represent empty contents.
1022                                 self._add_path("", pkg_hash)
1023
1024                         for x in contents:
1025                                 self._add_path(x[eroot_len:], pkg_hash)
1026
1027                         self._vardb._aux_cache["modified"].add(cpv)
1028
1029                 def _add_path(self, path, pkg_hash):
1030                         """
1031                         Empty path is a code that represents empty contents.
1032                         """
1033                         if path:
1034                                 name = os.path.basename(path.rstrip(os.path.sep))
1035                                 if not name:
1036                                         return
1037                         else:
1038                                 name = path
1039                         name_hash = self._hash_str(name)
1040                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1041                         pkgs = base_names.get(name_hash)
1042                         if pkgs is None:
1043                                 pkgs = {}
1044                                 base_names[name_hash] = pkgs
1045                         pkgs[pkg_hash] = None
1046
1047                 def _hash_str(self, s):
1048                         h = self._new_hash()
1049                         # Always use a constant utf_8 encoding here, since
1050                         # the "default" encoding can change.
1051                         h.update(_unicode_encode(s,
1052                                 encoding=_encodings['repo.content'],
1053                                 errors='backslashreplace'))
1054                         h = h.hexdigest()
1055                         h = h[-self._hex_chars:]
1056                         h = int(h, 16)
1057                         return h
1058
1059                 def _hash_pkg(self, cpv):
1060                         counter, mtime = self._vardb.aux_get(
1061                                 cpv, ["COUNTER", "_mtime_"])
1062                         try:
1063                                 counter = int(counter)
1064                         except ValueError:
1065                                 counter = 0
1066                         return (_unicode(cpv), counter, mtime)
1067
1068         class _owners_db(object):
1069
1070                 def __init__(self, vardb):
1071                         self._vardb = vardb
1072
1073                 def populate(self):
1074                         self._populate()
1075
1076                 def _populate(self):
1077                         owners_cache = vardbapi._owners_cache(self._vardb)
1078                         cached_hashes = set()
1079                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1080
1081                         # Take inventory of all cached package hashes.
1082                         for name, hash_values in list(base_names.items()):
1083                                 if not isinstance(hash_values, dict):
1084                                         del base_names[name]
1085                                         continue
1086                                 cached_hashes.update(hash_values)
1087
1088                         # Create sets of valid package hashes and uncached packages.
1089                         uncached_pkgs = set()
1090                         hash_pkg = owners_cache._hash_pkg
1091                         valid_pkg_hashes = set()
1092                         for cpv in self._vardb.cpv_all():
1093                                 hash_value = hash_pkg(cpv)
1094                                 valid_pkg_hashes.add(hash_value)
1095                                 if hash_value not in cached_hashes:
1096                                         uncached_pkgs.add(cpv)
1097
1098                         # Cache any missing packages.
1099                         for cpv in uncached_pkgs:
1100                                 owners_cache.add(cpv)
1101
1102                         # Delete any stale cache.
1103                         stale_hashes = cached_hashes.difference(valid_pkg_hashes)
1104                         if stale_hashes:
1105                                 for base_name_hash, bucket in list(base_names.items()):
1106                                         for hash_value in stale_hashes.intersection(bucket):
1107                                                 del bucket[hash_value]
1108                                         if not bucket:
1109                                                 del base_names[base_name_hash]
1110
1111                         return owners_cache
1112
1113                 def get_owners(self, path_iter):
1114                         """
1115                         @return the owners as a dblink -> set(files) mapping.
1116                         """
1117                         owners = {}
1118                         for owner, f in self.iter_owners(path_iter):
1119                                 owned_files = owners.get(owner)
1120                                 if owned_files is None:
1121                                         owned_files = set()
1122                                         owners[owner] = owned_files
1123                                 owned_files.add(f)
1124                         return owners
1125
1126                 def getFileOwnerMap(self, path_iter):
1127                         owners = self.get_owners(path_iter)
1128                         file_owners = {}
1129                         for pkg_dblink, files in owners.items():
1130                                 for f in files:
1131                                         owner_set = file_owners.get(f)
1132                                         if owner_set is None:
1133                                                 owner_set = set()
1134                                                 file_owners[f] = owner_set
1135                                         owner_set.add(pkg_dblink)
1136                         return file_owners
1137
1138                 def iter_owners(self, path_iter):
1139                         """
1140                         Iterate over tuples of (dblink, path). In order to avoid
1141                         consuming too many resources for too much time, resources
1142                         are only allocated for the duration of a given iter_owners()
1143                         call. Therefore, to maximize reuse of resources when searching
1144                         for multiple files, it's best to search for them all in a single
1145                         call.
1146                         """
1147
1148                         if not isinstance(path_iter, list):
1149                                 path_iter = list(path_iter)
1150                         owners_cache = self._populate()
1151                         vardb = self._vardb
1152                         root = vardb._eroot
1153                         hash_pkg = owners_cache._hash_pkg
1154                         hash_str = owners_cache._hash_str
1155                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1156
1157                         dblink_cache = {}
1158
1159                         def dblink(cpv):
1160                                 x = dblink_cache.get(cpv)
1161                                 if x is None:
1162                                         if len(dblink_cache) > 20:
1163                                                 # Ensure that we don't run out of memory.
1164                                                 raise StopIteration()
1165                                         x = self._vardb._dblink(cpv)
1166                                         dblink_cache[cpv] = x
1167                                 return x
1168
1169                         while path_iter:
1170
1171                                 path = path_iter.pop()
1172                                 is_basename = os.sep != path[:1]
1173                                 if is_basename:
1174                                         name = path
1175                                 else:
1176                                         name = os.path.basename(path.rstrip(os.path.sep))
1177
1178                                 if not name:
1179                                         continue
1180
1181                                 name_hash = hash_str(name)
1182                                 pkgs = base_names.get(name_hash)
1183                                 owners = []
1184                                 if pkgs is not None:
1185                                         try:
1186                                                 for hash_value in pkgs:
1187                                                         if not isinstance(hash_value, tuple) or \
1188                                                                 len(hash_value) != 3:
1189                                                                 continue
1190                                                         cpv, counter, mtime = hash_value
1191                                                         if not isinstance(cpv, basestring):
1192                                                                 continue
1193                                                         try:
1194                                                                 current_hash = hash_pkg(cpv)
1195                                                         except KeyError:
1196                                                                 continue
1197
1198                                                         if current_hash != hash_value:
1199                                                                 continue
1200
1201                                                         if is_basename:
1202                                                                 for p in dblink(cpv).getcontents():
1203                                                                         if os.path.basename(p) == name:
1204                                                                                 owners.append((cpv, p[len(root):]))
1205                                                         else:
1206                                                                 if dblink(cpv).isowner(path):
1207                                                                         owners.append((cpv, path))
1208
1209                                         except StopIteration:
1210                                                 path_iter.append(path)
1211                                                 del owners[:]
1212                                                 dblink_cache.clear()
1213                                                 gc.collect()
1214                                                 for x in self._iter_owners_low_mem(path_iter):
1215                                                         yield x
1216                                                 return
1217                                         else:
1218                                                 for cpv, p in owners:
1219                                                         yield (dblink(cpv), p)
1220
1221                 def _iter_owners_low_mem(self, path_list):
1222                         """
1223                         This implemention will make a short-lived dblink instance (and
1224                         parse CONTENTS) for every single installed package. This is
1225                         slower and but uses less memory than the method which uses the
1226                         basename cache.
1227                         """
1228
1229                         if not path_list:
1230                                 return
1231
1232                         path_info_list = []
1233                         for path in path_list:
1234                                 is_basename = os.sep != path[:1]
1235                                 if is_basename:
1236                                         name = path
1237                                 else:
1238                                         name = os.path.basename(path.rstrip(os.path.sep))
1239                                 path_info_list.append((path, name, is_basename))
1240
1241                         root = self._vardb._eroot
1242                         for cpv in self._vardb.cpv_all():
1243                                 dblnk =  self._vardb._dblink(cpv)
1244
1245                                 for path, name, is_basename in path_info_list:
1246                                         if is_basename:
1247                                                 for p in dblnk.getcontents():
1248                                                         if os.path.basename(p) == name:
1249                                                                 yield dblnk, p[len(root):]
1250                                         else:
1251                                                 if dblnk.isowner(path):
1252                                                         yield dblnk, path
1253
1254 class vartree(object):
1255         "this tree will scan a var/db/pkg database located at root (passed to init)"
1256         def __init__(self, root=None, virtual=DeprecationWarning, categories=None,
1257                 settings=None):
1258
1259                 if settings is None:
1260                         settings = portage.settings
1261
1262                 if root is not None and root != settings['ROOT']:
1263                         warnings.warn("The 'root' parameter of the "
1264                                 "portage.dbapi.vartree.vartree"
1265                                 " constructor is now unused. Use "
1266                                 "settings['ROOT'] instead.",
1267                                 DeprecationWarning, stacklevel=2)
1268
1269                 if virtual is not DeprecationWarning:
1270                         warnings.warn("The 'virtual' parameter of the "
1271                                 "portage.dbapi.vartree.vartree"
1272                                 " constructor is unused",
1273                                 DeprecationWarning, stacklevel=2)
1274
1275                 self.settings = settings
1276                 self.dbapi = vardbapi(settings=settings, vartree=self)
1277                 self.populated = 1
1278
1279         @property
1280         def root(self):
1281                 warnings.warn("The root attribute of "
1282                         "portage.dbapi.vartree.vartree"
1283                         " is deprecated. Use "
1284                         "settings['ROOT'] instead.",
1285                         DeprecationWarning, stacklevel=3)
1286                 return self.settings['ROOT']
1287
1288         def getpath(self, mykey, filename=None):
1289                 return self.dbapi.getpath(mykey, filename=filename)
1290
1291         def zap(self, mycpv):
1292                 return
1293
1294         def inject(self, mycpv):
1295                 return
1296
1297         def get_provide(self, mycpv):
1298                 myprovides = []
1299                 mylines = None
1300                 try:
1301                         mylines, myuse = self.dbapi.aux_get(mycpv, ["PROVIDE", "USE"])
1302                         if mylines:
1303                                 myuse = myuse.split()
1304                                 mylines = use_reduce(mylines, uselist=myuse, flat=True)
1305                                 for myprovide in mylines:
1306                                         mys = catpkgsplit(myprovide)
1307                                         if not mys:
1308                                                 mys = myprovide.split("/")
1309                                         myprovides += [mys[0] + "/" + mys[1]]
1310                         return myprovides
1311                 except SystemExit as e:
1312                         raise
1313                 except Exception as e:
1314                         mydir = self.dbapi.getpath(mycpv)
1315                         writemsg(_("\nParse Error reading PROVIDE and USE in '%s'\n") % mydir,
1316                                 noiselevel=-1)
1317                         if mylines:
1318                                 writemsg(_("Possibly Invalid: '%s'\n") % str(mylines),
1319                                         noiselevel=-1)
1320                         writemsg(_("Exception: %s\n\n") % str(e), noiselevel=-1)
1321                         return []
1322
1323         def get_all_provides(self):
1324                 myprovides = {}
1325                 for node in self.getallcpv():
1326                         for mykey in self.get_provide(node):
1327                                 if mykey in myprovides:
1328                                         myprovides[mykey] += [node]
1329                                 else:
1330                                         myprovides[mykey] = [node]
1331                 return myprovides
1332
1333         def dep_bestmatch(self, mydep, use_cache=1):
1334                 "compatibility method -- all matches, not just visible ones"
1335                 #mymatch=best(match(dep_expand(mydep,self.dbapi),self.dbapi))
1336                 mymatch = best(self.dbapi.match(
1337                         dep_expand(mydep, mydb=self.dbapi, settings=self.settings),
1338                         use_cache=use_cache))
1339                 if mymatch is None:
1340                         return ""
1341                 else:
1342                         return mymatch
1343
1344         def dep_match(self, mydep, use_cache=1):
1345                 "compatibility method -- we want to see all matches, not just visible ones"
1346                 #mymatch = match(mydep,self.dbapi)
1347                 mymatch = self.dbapi.match(mydep, use_cache=use_cache)
1348                 if mymatch is None:
1349                         return []
1350                 else:
1351                         return mymatch
1352
1353         def exists_specific(self, cpv):
1354                 return self.dbapi.cpv_exists(cpv)
1355
1356         def getallcpv(self):
1357                 """temporary function, probably to be renamed --- Gets a list of all
1358                 category/package-versions installed on the system."""
1359                 return self.dbapi.cpv_all()
1360
1361         def getallnodes(self):
1362                 """new behavior: these are all *unmasked* nodes.  There may or may not be available
1363                 masked package for nodes in this nodes list."""
1364                 return self.dbapi.cp_all()
1365
1366         def getebuildpath(self, fullpackage):
1367                 cat, package = catsplit(fullpackage)
1368                 return self.getpath(fullpackage, filename=package+".ebuild")
1369
1370         def getslot(self, mycatpkg):
1371                 "Get a slot for a catpkg; assume it exists."
1372                 try:
1373                         return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1374                 except KeyError:
1375                         return ""
1376
1377         def populate(self):
1378                 self.populated=1
1379
1380 class dblink(object):
1381         """
1382         This class provides an interface to the installed package database
1383         At present this is implemented as a text backend in /var/db/pkg.
1384         """
1385
1386         import re
1387         _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
1388
1389         _contents_re = re.compile(r'^(' + \
1390                 r'(?P<dir>(dev|dir|fif) (.+))|' + \
1391                 r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
1392                 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \
1393                 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))' + \
1394                 r')$'
1395         )
1396
1397         # These files are generated by emerge, so we need to remove
1398         # them when they are the only thing left in a directory.
1399         _infodir_cleanup = frozenset(["dir", "dir.old"])
1400
1401         _ignored_unlink_errnos = (
1402                 errno.EBUSY, errno.ENOENT,
1403                 errno.ENOTDIR, errno.EISDIR)
1404
1405         _ignored_rmdir_errnos = (
1406                 errno.EEXIST, errno.ENOTEMPTY,
1407                 errno.EBUSY, errno.ENOENT,
1408                 errno.ENOTDIR, errno.EISDIR,
1409                 errno.EPERM)
1410
1411         def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
1412                 vartree=None, blockers=None, scheduler=None, pipe=None):
1413                 """
1414                 Creates a DBlink object for a given CPV.
1415                 The given CPV may not be present in the database already.
1416                 
1417                 @param cat: Category
1418                 @type cat: String
1419                 @param pkg: Package (PV)
1420                 @type pkg: String
1421                 @param myroot: ignored, settings['ROOT'] is used instead
1422                 @type myroot: String (Path)
1423                 @param settings: Typically portage.settings
1424                 @type settings: portage.config
1425                 @param treetype: one of ['porttree','bintree','vartree']
1426                 @type treetype: String
1427                 @param vartree: an instance of vartree corresponding to myroot.
1428                 @type vartree: vartree
1429                 """
1430
1431                 if settings is None:
1432                         raise TypeError("settings argument is required")
1433
1434                 mysettings = settings
1435                 self._eroot = mysettings['EROOT']
1436                 self.cat = cat
1437                 self.pkg = pkg
1438                 self.mycpv = self.cat + "/" + self.pkg
1439                 self.mysplit = list(catpkgsplit(self.mycpv)[1:])
1440                 self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0])
1441                 self.treetype = treetype
1442                 if vartree is None:
1443                         vartree = portage.db[self._eroot]["vartree"]
1444                 self.vartree = vartree
1445                 self._blockers = blockers
1446                 self._scheduler = scheduler
1447                 self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
1448                 self.dbcatdir = self.dbroot+"/"+cat
1449                 self.dbpkgdir = self.dbcatdir+"/"+pkg
1450                 self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg
1451                 self.dbdir = self.dbpkgdir
1452                 self.settings = mysettings
1453                 self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
1454
1455                 self.myroot = self.settings['ROOT']
1456                 self._installed_instance = None
1457                 self.contentscache = None
1458                 self._contents_inodes = None
1459                 self._contents_basenames = None
1460                 self._linkmap_broken = False
1461                 self._hardlink_merge_map = {}
1462                 self._hash_key = (self._eroot, self.mycpv)
1463                 self._protect_obj = None
1464                 self._pipe = pipe
1465
1466         def __hash__(self):
1467                 return hash(self._hash_key)
1468
1469         def __eq__(self, other):
1470                 return isinstance(other, dblink) and \
1471                         self._hash_key == other._hash_key
1472
1473         def _get_protect_obj(self):
1474
1475                 if self._protect_obj is None:
1476                         self._protect_obj = ConfigProtect(self._eroot,
1477                         portage.util.shlex_split(
1478                                 self.settings.get("CONFIG_PROTECT", "")),
1479                         portage.util.shlex_split(
1480                                 self.settings.get("CONFIG_PROTECT_MASK", "")))
1481
1482                 return self._protect_obj
1483
1484         def isprotected(self, obj):
1485                 return self._get_protect_obj().isprotected(obj)
1486
1487         def updateprotect(self):
1488                 self._get_protect_obj().updateprotect()
1489
1490         def lockdb(self):
1491                 self.vartree.dbapi.lock()
1492
1493         def unlockdb(self):
1494                 self.vartree.dbapi.unlock()
1495
1496         def getpath(self):
1497                 "return path to location of db information (for >>> informational display)"
1498                 return self.dbdir
1499
1500         def exists(self):
1501                 "does the db entry exist?  boolean."
1502                 return os.path.exists(self.dbdir)
1503
1504         def delete(self):
1505                 """
1506                 Remove this entry from the database
1507                 """
1508                 if not os.path.exists(self.dbdir):
1509                         return
1510
1511                 # Check validity of self.dbdir before attempting to remove it.
1512                 if not self.dbdir.startswith(self.dbroot):
1513                         writemsg(_("portage.dblink.delete(): invalid dbdir: %s\n") % \
1514                                 self.dbdir, noiselevel=-1)
1515                         return
1516
1517                 shutil.rmtree(self.dbdir)
1518                 # If empty, remove parent category directory.
1519                 try:
1520                         os.rmdir(os.path.dirname(self.dbdir))
1521                 except OSError:
1522                         pass
1523                 self.vartree.dbapi._remove(self)
1524
1525         def clearcontents(self):
1526                 """
1527                 For a given db entry (self), erase the CONTENTS values.
1528                 """
1529                 self.lockdb()
1530                 try:
1531                         if os.path.exists(self.dbdir+"/CONTENTS"):
1532                                 os.unlink(self.dbdir+"/CONTENTS")
1533                 finally:
1534                         self.unlockdb()
1535
1536         def _clear_contents_cache(self):
1537                 self.contentscache = None
1538                 self._contents_inodes = None
1539                 self._contents_basenames = None
1540
1541         def getcontents(self):
1542                 """
1543                 Get the installed files of a given package (aka what that package installed)
1544                 """
1545                 contents_file = os.path.join(self.dbdir, "CONTENTS")
1546                 if self.contentscache is not None:
1547                         return self.contentscache
1548                 pkgfiles = {}
1549                 try:
1550                         myc = io.open(_unicode_encode(contents_file,
1551                                 encoding=_encodings['fs'], errors='strict'),
1552                                 mode='r', encoding=_encodings['repo.content'],
1553                                 errors='replace')
1554                 except EnvironmentError as e:
1555                         if e.errno != errno.ENOENT:
1556                                 raise
1557                         del e
1558                         self.contentscache = pkgfiles
1559                         return pkgfiles
1560                 mylines = myc.readlines()
1561                 myc.close()
1562                 null_byte = "\0"
1563                 normalize_needed = self._normalize_needed
1564                 contents_re = self._contents_re
1565                 obj_index = contents_re.groupindex['obj']
1566                 dir_index = contents_re.groupindex['dir']
1567                 sym_index = contents_re.groupindex['sym']
1568                 # The old symlink format may exist on systems that have packages
1569                 # which were installed many years ago (see bug #351814).
1570                 oldsym_index = contents_re.groupindex['oldsym']
1571                 # CONTENTS files already contain EPREFIX
1572                 myroot = self.settings['ROOT']
1573                 if myroot == os.path.sep:
1574                         myroot = None
1575                 # used to generate parent dir entries
1576                 dir_entry = (_unicode_decode("dir"),)
1577                 eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1578                 pos = 0
1579                 errors = []
1580                 for pos, line in enumerate(mylines):
1581                         if null_byte in line:
1582                                 # Null bytes are a common indication of corruption.
1583                                 errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1584                                 continue
1585                         line = line.rstrip("\n")
1586                         m = contents_re.match(line)
1587                         if m is None:
1588                                 errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1589                                 continue
1590
1591                         if m.group(obj_index) is not None:
1592                                 base = obj_index
1593                                 #format: type, mtime, md5sum
1594                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1595                         elif m.group(dir_index) is not None:
1596                                 base = dir_index
1597                                 #format: type
1598                                 data = (m.group(base+1),)
1599                         elif m.group(sym_index) is not None:
1600                                 base = sym_index
1601                                 if m.group(oldsym_index) is None:
1602                                         mtime = m.group(base+5)
1603                                 else:
1604                                         mtime = m.group(base+8)
1605                                 #format: type, mtime, dest
1606                                 data = (m.group(base+1), mtime, m.group(base+3))
1607                         else:
1608                                 # This won't happen as long the regular expression
1609                                 # is written to only match valid entries.
1610                                 raise AssertionError(_("required group not found " + \
1611                                         "in CONTENTS entry: '%s'") % line)
1612
1613                         path = m.group(base+2)
1614                         if normalize_needed.search(path) is not None:
1615                                 path = normalize_path(path)
1616                                 if not path.startswith(os.path.sep):
1617                                         path = os.path.sep + path
1618
1619                         if myroot is not None:
1620                                 path = os.path.join(myroot, path.lstrip(os.path.sep))
1621
1622                         # Implicitly add parent directories, since we can't necessarily
1623                         # assume that they are explicitly listed in CONTENTS, and it's
1624                         # useful for callers if they can rely on parent directory entries
1625                         # being generated here (crucial for things like dblink.isowner()).
1626                         path_split = path.split(os.sep)
1627                         path_split.pop()
1628                         while len(path_split) > eroot_split_len:
1629                                 parent = os.sep.join(path_split)
1630                                 if parent in pkgfiles:
1631                                         break
1632                                 pkgfiles[parent] = dir_entry
1633                                 path_split.pop()
1634
1635                         pkgfiles[path] = data
1636
1637                 if errors:
1638                         writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1639                         for pos, e in errors:
1640                                 writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1641                 self.contentscache = pkgfiles
1642                 return pkgfiles
1643
1644         def _prune_plib_registry(self, unmerge=False,
1645                 needed=None, preserve_paths=None):
1646                 # remove preserved libraries that don't have any consumers left
1647                 if not (self._linkmap_broken or
1648                         self.vartree.dbapi._linkmap is None or
1649                         self.vartree.dbapi._plib_registry is None):
1650                         self.vartree.dbapi._fs_lock()
1651                         plib_registry = self.vartree.dbapi._plib_registry
1652                         plib_registry.lock()
1653                         try:
1654                                 plib_registry.load()
1655
1656                                 unmerge_with_replacement = \
1657                                         unmerge and preserve_paths is not None
1658                                 if unmerge_with_replacement:
1659                                         # If self.mycpv is about to be unmerged and we
1660                                         # have a replacement package, we want to exclude
1661                                         # the irrelevant NEEDED data that belongs to
1662                                         # files which are being unmerged now.
1663                                         exclude_pkgs = (self.mycpv,)
1664                                 else:
1665                                         exclude_pkgs = None
1666
1667                                 self._linkmap_rebuild(exclude_pkgs=exclude_pkgs,
1668                                         include_file=needed, preserve_paths=preserve_paths)
1669
1670                                 if unmerge:
1671                                         unmerge_preserve = None
1672                                         if not unmerge_with_replacement:
1673                                                 unmerge_preserve = \
1674                                                         self._find_libs_to_preserve(unmerge=True)
1675                                         counter = self.vartree.dbapi.cpv_counter(self.mycpv)
1676                                         plib_registry.unregister(self.mycpv,
1677                                                 self.settings["SLOT"], counter)
1678                                         if unmerge_preserve:
1679                                                 for path in sorted(unmerge_preserve):
1680                                                         contents_key = self._match_contents(path)
1681                                                         if not contents_key:
1682                                                                 continue
1683                                                         obj_type = self.getcontents()[contents_key][0]
1684                                                         self._display_merge(_(">>> needed   %s %s\n") % \
1685                                                                 (obj_type, contents_key), noiselevel=-1)
1686                                                 plib_registry.register(self.mycpv,
1687                                                         self.settings["SLOT"], counter, unmerge_preserve)
1688                                                 # Remove the preserved files from our contents
1689                                                 # so that they won't be unmerged.
1690                                                 self.vartree.dbapi.removeFromContents(self,
1691                                                         unmerge_preserve)
1692
1693                                 unmerge_no_replacement = \
1694                                         unmerge and not unmerge_with_replacement
1695                                 cpv_lib_map = self._find_unused_preserved_libs(
1696                                         unmerge_no_replacement)
1697                                 if cpv_lib_map:
1698                                         self._remove_preserved_libs(cpv_lib_map)
1699                                         self.vartree.dbapi.lock()
1700                                         try:
1701                                                 for cpv, removed in cpv_lib_map.items():
1702                                                         if not self.vartree.dbapi.cpv_exists(cpv):
1703                                                                 continue
1704                                                         self.vartree.dbapi.removeFromContents(cpv, removed)
1705                                         finally:
1706                                                 self.vartree.dbapi.unlock()
1707
1708                                 plib_registry.store()
1709                         finally:
1710                                 plib_registry.unlock()
1711                                 self.vartree.dbapi._fs_unlock()
1712
1713         def unmerge(self, pkgfiles=None, trimworld=None, cleanup=True,
1714                 ldpath_mtimes=None, others_in_slot=None, needed=None,
1715                 preserve_paths=None):
1716                 """
1717                 Calls prerm
1718                 Unmerges a given package (CPV)
1719                 calls postrm
1720                 calls cleanrm
1721                 calls env_update
1722                 
1723                 @param pkgfiles: files to unmerge (generally self.getcontents() )
1724                 @type pkgfiles: Dictionary
1725                 @param trimworld: Unused
1726                 @type trimworld: Boolean
1727                 @param cleanup: cleanup to pass to doebuild (see doebuild)
1728                 @type cleanup: Boolean
1729                 @param ldpath_mtimes: mtimes to pass to env_update (see env_update)
1730                 @type ldpath_mtimes: Dictionary
1731                 @param others_in_slot: all dblink instances in this slot, excluding self
1732                 @type others_in_slot: list
1733                 @param needed: Filename containing libraries needed after unmerge.
1734                 @type needed: String
1735                 @param preserve_paths: Libraries preserved by a package instance that
1736                         is currently being merged. They need to be explicitly passed to the
1737                         LinkageMap, since they are not registered in the
1738                         PreservedLibsRegistry yet.
1739                 @type preserve_paths: set
1740                 @rtype: Integer
1741                 @return:
1742                 1. os.EX_OK if everything went well.
1743                 2. return code of the failed phase (for prerm, postrm, cleanrm)
1744                 """
1745
1746                 if trimworld is not None:
1747                         warnings.warn("The trimworld parameter of the " + \
1748                                 "portage.dbapi.vartree.dblink.unmerge()" + \
1749                                 " method is now unused.",
1750                                 DeprecationWarning, stacklevel=2)
1751
1752                 background = False
1753                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1754                 if self._scheduler is None:
1755                         # We create a scheduler instance and use it to
1756                         # log unmerge output separately from merge output.
1757                         self._scheduler = PollScheduler().sched_iface
1758                 if self.settings.get("PORTAGE_BACKGROUND") == "subprocess":
1759                         if self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "1":
1760                                 self.settings["PORTAGE_BACKGROUND"] = "1"
1761                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1762                                 background = True
1763                         elif self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "0":
1764                                 self.settings["PORTAGE_BACKGROUND"] = "0"
1765                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1766                 elif self.settings.get("PORTAGE_BACKGROUND") == "1":
1767                         background = True
1768
1769                 self.vartree.dbapi._bump_mtime(self.mycpv)
1770                 showMessage = self._display_merge
1771                 if self.vartree.dbapi._categories is not None:
1772                         self.vartree.dbapi._categories = None
1773                 # When others_in_slot is supplied, the security check has already been
1774                 # done for this slot, so it shouldn't be repeated until the next
1775                 # replacement or unmerge operation.
1776                 if others_in_slot is None:
1777                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1778                         slot_matches = self.vartree.dbapi.match(
1779                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1780                         others_in_slot = []
1781                         for cur_cpv in slot_matches:
1782                                 if cur_cpv == self.mycpv:
1783                                         continue
1784                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1785                                         settings=self.settings, vartree=self.vartree,
1786                                         treetype="vartree", pipe=self._pipe))
1787
1788                         retval = self._security_check([self] + others_in_slot)
1789                         if retval:
1790                                 return retval
1791
1792                 contents = self.getcontents()
1793                 # Now, don't assume that the name of the ebuild is the same as the
1794                 # name of the dir; the package may have been moved.
1795                 myebuildpath = os.path.join(self.dbdir, self.pkg + ".ebuild")
1796                 failures = 0
1797                 ebuild_phase = "prerm"
1798                 mystuff = os.listdir(self.dbdir)
1799                 for x in mystuff:
1800                         if x.endswith(".ebuild"):
1801                                 if x[:-7] != self.pkg:
1802                                         # Clean up after vardbapi.move_ent() breakage in
1803                                         # portage versions before 2.1.2
1804                                         os.rename(os.path.join(self.dbdir, x), myebuildpath)
1805                                         write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n")
1806                                 break
1807
1808                 if self.mycpv != self.settings.mycpv or \
1809                         "EAPI" not in self.settings.configdict["pkg"]:
1810                         # We avoid a redundant setcpv call here when
1811                         # the caller has already taken care of it.
1812                         self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi)
1813
1814                 eapi_unsupported = False
1815                 try:
1816                         doebuild_environment(myebuildpath, "prerm",
1817                                 settings=self.settings, db=self.vartree.dbapi)
1818                 except UnsupportedAPIException as e:
1819                         eapi_unsupported = e
1820
1821                 self._prune_plib_registry(unmerge=True, needed=needed,
1822                         preserve_paths=preserve_paths)
1823
1824                 builddir_lock = None
1825                 scheduler = self._scheduler
1826                 retval = os.EX_OK
1827                 try:
1828                         # Only create builddir_lock if the caller
1829                         # has not already acquired the lock.
1830                         if "PORTAGE_BUILDIR_LOCKED" not in self.settings:
1831                                 builddir_lock = EbuildBuildDir(
1832                                         scheduler=scheduler,
1833                                         settings=self.settings)
1834                                 builddir_lock.lock()
1835                                 prepare_build_dirs(settings=self.settings, cleanup=True)
1836                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1837
1838                         # Log the error after PORTAGE_LOG_FILE is initialized
1839                         # by prepare_build_dirs above.
1840                         if eapi_unsupported:
1841                                 # Sometimes this happens due to corruption of the EAPI file.
1842                                 failures += 1
1843                                 showMessage(_("!!! FAILED prerm: %s\n") % \
1844                                         os.path.join(self.dbdir, "EAPI"),
1845                                         level=logging.ERROR, noiselevel=-1)
1846                                 showMessage(_unicode_decode("%s\n") % (eapi_unsupported,),
1847                                         level=logging.ERROR, noiselevel=-1)
1848                         elif os.path.isfile(myebuildpath):
1849                                 phase = EbuildPhase(background=background,
1850                                         phase=ebuild_phase, scheduler=scheduler,
1851                                         settings=self.settings)
1852                                 phase.start()
1853                                 retval = phase.wait()
1854
1855                                 # XXX: Decide how to handle failures here.
1856                                 if retval != os.EX_OK:
1857                                         failures += 1
1858                                         showMessage(_("!!! FAILED prerm: %s\n") % retval,
1859                                                 level=logging.ERROR, noiselevel=-1)
1860
1861                         self.vartree.dbapi._fs_lock()
1862                         try:
1863                                 self._unmerge_pkgfiles(pkgfiles, others_in_slot)
1864                         finally:
1865                                 self.vartree.dbapi._fs_unlock()
1866                         self._clear_contents_cache()
1867
1868                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1869                                 ebuild_phase = "postrm"
1870                                 phase = EbuildPhase(background=background,
1871                                         phase=ebuild_phase, scheduler=scheduler,
1872                                         settings=self.settings)
1873                                 phase.start()
1874                                 retval = phase.wait()
1875
1876                                 # XXX: Decide how to handle failures here.
1877                                 if retval != os.EX_OK:
1878                                         failures += 1
1879                                         showMessage(_("!!! FAILED postrm: %s\n") % retval,
1880                                                 level=logging.ERROR, noiselevel=-1)
1881
1882                 finally:
1883                         self.vartree.dbapi._bump_mtime(self.mycpv)
1884                         try:
1885                                         if not eapi_unsupported and os.path.isfile(myebuildpath):
1886                                                 if retval != os.EX_OK:
1887                                                         msg_lines = []
1888                                                         msg = _("The '%(ebuild_phase)s' "
1889                                                         "phase of the '%(cpv)s' package "
1890                                                         "has failed with exit value %(retval)s.") % \
1891                                                         {"ebuild_phase":ebuild_phase, "cpv":self.mycpv,
1892                                                         "retval":retval}
1893                                                         from textwrap import wrap
1894                                                         msg_lines.extend(wrap(msg, 72))
1895                                                         msg_lines.append("")
1896
1897                                                         ebuild_name = os.path.basename(myebuildpath)
1898                                                         ebuild_dir = os.path.dirname(myebuildpath)
1899                                                         msg = _("The problem occurred while executing "
1900                                                         "the ebuild file named '%(ebuild_name)s' "
1901                                                         "located in the '%(ebuild_dir)s' directory. "
1902                                                         "If necessary, manually remove "
1903                                                         "the environment.bz2 file and/or the "
1904                                                         "ebuild file located in that directory.") % \
1905                                                         {"ebuild_name":ebuild_name, "ebuild_dir":ebuild_dir}
1906                                                         msg_lines.extend(wrap(msg, 72))
1907                                                         msg_lines.append("")
1908
1909                                                         msg = _("Removal "
1910                                                         "of the environment.bz2 file is "
1911                                                         "preferred since it may allow the "
1912                                                         "removal phases to execute successfully. "
1913                                                         "The ebuild will be "
1914                                                         "sourced and the eclasses "
1915                                                         "from the current portage tree will be used "
1916                                                         "when necessary. Removal of "
1917                                                         "the ebuild file will cause the "
1918                                                         "pkg_prerm() and pkg_postrm() removal "
1919                                                         "phases to be skipped entirely.")
1920                                                         msg_lines.extend(wrap(msg, 72))
1921
1922                                                         self._eerror(ebuild_phase, msg_lines)
1923
1924                                         self._elog_process(phasefilter=("prerm", "postrm"))
1925
1926                                         if retval == os.EX_OK:
1927                                                 try:
1928                                                         doebuild_environment(myebuildpath, "cleanrm",
1929                                                                 settings=self.settings, db=self.vartree.dbapi)
1930                                                 except UnsupportedAPIException:
1931                                                         pass
1932                                                 phase = EbuildPhase(background=background,
1933                                                         phase="cleanrm", scheduler=scheduler,
1934                                                         settings=self.settings)
1935                                                 phase.start()
1936                                                 retval = phase.wait()
1937                         finally:
1938                                         if builddir_lock is not None:
1939                                                 builddir_lock.unlock()
1940
1941                 if log_path is not None:
1942
1943                         if not failures and 'unmerge-logs' not in self.settings.features:
1944                                 try:
1945                                         os.unlink(log_path)
1946                                 except OSError:
1947                                         pass
1948
1949                         try:
1950                                 st = os.stat(log_path)
1951                         except OSError:
1952                                 pass
1953                         else:
1954                                 if st.st_size == 0:
1955                                         try:
1956                                                 os.unlink(log_path)
1957                                         except OSError:
1958                                                 pass
1959
1960                 if log_path is not None and os.path.exists(log_path):
1961                         # Restore this since it gets lost somewhere above and it
1962                         # needs to be set for _display_merge() to be able to log.
1963                         # Note that the log isn't necessarily supposed to exist
1964                         # since if PORT_LOGDIR is unset then it's a temp file
1965                         # so it gets cleaned above.
1966                         self.settings["PORTAGE_LOG_FILE"] = log_path
1967                 else:
1968                         self.settings.pop("PORTAGE_LOG_FILE", None)
1969
1970                 env_update(target_root=self.settings['ROOT'],
1971                         prev_mtimes=ldpath_mtimes,
1972                         contents=contents, env=self.settings,
1973                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
1974
1975                 unmerge_with_replacement = preserve_paths is not None
1976                 if not unmerge_with_replacement:
1977                         # When there's a replacement package which calls us via treewalk,
1978                         # treewalk will automatically call _prune_plib_registry for us.
1979                         # Otherwise, we need to call _prune_plib_registry ourselves.
1980                         # Don't pass in the "unmerge=True" flag here, since that flag
1981                         # is intended to be used _prior_ to unmerge, not after.
1982                         self._prune_plib_registry()
1983
1984                 return os.EX_OK
1985
1986         def _display_merge(self, msg, level=0, noiselevel=0):
1987                 if not self._verbose and noiselevel >= 0 and level < logging.WARN:
1988                         return
1989                 if self._scheduler is None:
1990                         writemsg_level(msg, level=level, noiselevel=noiselevel)
1991                 else:
1992                         log_path = None
1993                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
1994                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1995                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
1996
1997                         if background and log_path is None:
1998                                 if level >= logging.WARN:
1999                                         writemsg_level(msg, level=level, noiselevel=noiselevel)
2000                         else:
2001                                 self._scheduler.output(msg,
2002                                         log_path=log_path, background=background,
2003                                         level=level, noiselevel=noiselevel)
2004
2005         def _show_unmerge(self, zing, desc, file_type, file_name):
2006                 self._display_merge("%s %s %s %s\n" % \
2007                         (zing, desc.ljust(8), file_type, file_name))
2008
2009         def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
2010                 """
2011                 
2012                 Unmerges the contents of a package from the liveFS
2013                 Removes the VDB entry for self
2014                 
2015                 @param pkgfiles: typically self.getcontents()
2016                 @type pkgfiles: Dictionary { filename: [ 'type', '?', 'md5sum' ] }
2017                 @param others_in_slot: all dblink instances in this slot, excluding self
2018                 @type others_in_slot: list
2019                 @rtype: None
2020                 """
2021
2022                 os = _os_merge
2023                 perf_md5 = perform_md5
2024                 showMessage = self._display_merge
2025                 show_unmerge = self._show_unmerge
2026                 ignored_unlink_errnos = self._ignored_unlink_errnos
2027                 ignored_rmdir_errnos = self._ignored_rmdir_errnos
2028
2029                 if not pkgfiles:
2030                         showMessage(_("No package files given... Grabbing a set.\n"))
2031                         pkgfiles = self.getcontents()
2032
2033                 if others_in_slot is None:
2034                         others_in_slot = []
2035                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
2036                         slot_matches = self.vartree.dbapi.match(
2037                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
2038                         for cur_cpv in slot_matches:
2039                                 if cur_cpv == self.mycpv:
2040                                         continue
2041                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
2042                                         settings=self.settings,
2043                                         vartree=self.vartree, treetype="vartree", pipe=self._pipe))
2044
2045                 cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
2046                 stale_confmem = []
2047                 protected_symlinks = {}
2048
2049                 unmerge_orphans = "unmerge-orphans" in self.settings.features
2050                 calc_prelink = "prelink-checksums" in self.settings.features
2051
2052                 if pkgfiles:
2053                         self.updateprotect()
2054                         mykeys = list(pkgfiles)
2055                         mykeys.sort()
2056                         mykeys.reverse()
2057
2058                         #process symlinks second-to-last, directories last.
2059                         mydirs = set()
2060                         modprotect = os.path.join(self._eroot, "lib/modules/")
2061
2062                         def unlink(file_name, lstatobj):
2063                                 if bsd_chflags:
2064                                         if lstatobj.st_flags != 0:
2065                                                 bsd_chflags.lchflags(file_name, 0)
2066                                         parent_name = os.path.dirname(file_name)
2067                                         # Use normal stat/chflags for the parent since we want to
2068                                         # follow any symlinks to the real parent directory.
2069                                         pflags = os.stat(parent_name).st_flags
2070                                         if pflags != 0:
2071                                                 bsd_chflags.chflags(parent_name, 0)
2072                                 try:
2073                                         if not stat.S_ISLNK(lstatobj.st_mode):
2074                                                 # Remove permissions to ensure that any hardlinks to
2075                                                 # suid/sgid files are rendered harmless.
2076                                                 os.chmod(file_name, 0)
2077                                         os.unlink(file_name)
2078                                 except OSError as ose:
2079                                         # If the chmod or unlink fails, you are in trouble.
2080                                         # With Prefix this can be because the file is owned
2081                                         # by someone else (a screwup by root?), on a normal
2082                                         # system maybe filesystem corruption.  In any case,
2083                                         # if we backtrace and die here, we leave the system
2084                                         # in a totally undefined state, hence we just bleed
2085                                         # like hell and continue to hopefully finish all our
2086                                         # administrative and pkg_postinst stuff.
2087                                         self._eerror("postrm", 
2088                                                 ["Could not chmod or unlink '%s': %s" % \
2089                                                 (file_name, ose)])
2090                                 finally:
2091                                         if bsd_chflags and pflags != 0:
2092                                                 # Restore the parent flags we saved before unlinking
2093                                                 bsd_chflags.chflags(parent_name, pflags)
2094
2095                         unmerge_desc = {}
2096                         unmerge_desc["cfgpro"] = _("cfgpro")
2097                         unmerge_desc["replaced"] = _("replaced")
2098                         unmerge_desc["!dir"] = _("!dir")
2099                         unmerge_desc["!empty"] = _("!empty")
2100                         unmerge_desc["!fif"] = _("!fif")
2101                         unmerge_desc["!found"] = _("!found")
2102                         unmerge_desc["!md5"] = _("!md5")
2103                         unmerge_desc["!mtime"] = _("!mtime")
2104                         unmerge_desc["!obj"] = _("!obj")
2105                         unmerge_desc["!sym"] = _("!sym")
2106                         unmerge_desc["!prefix"] = _("!prefix")
2107
2108                         real_root = self.settings['ROOT']
2109                         real_root_len = len(real_root) - 1
2110                         eroot = self.settings["EROOT"]
2111
2112                         infodirs = frozenset(infodir for infodir in chain(
2113                                 self.settings.get("INFOPATH", "").split(":"),
2114                                 self.settings.get("INFODIR", "").split(":")) if infodir)
2115                         infodirs_inodes = set()
2116                         for infodir in infodirs:
2117                                 infodir = os.path.join(real_root, infodir.lstrip(os.sep))
2118                                 try:
2119                                         statobj = os.stat(infodir)
2120                                 except OSError:
2121                                         pass
2122                                 else:
2123                                         infodirs_inodes.add((statobj.st_dev, statobj.st_ino))
2124
2125                         for i, objkey in enumerate(mykeys):
2126
2127                                 obj = normalize_path(objkey)
2128                                 if os is _os_merge:
2129                                         try:
2130                                                 _unicode_encode(obj,
2131                                                         encoding=_encodings['merge'], errors='strict')
2132                                         except UnicodeEncodeError:
2133                                                 # The package appears to have been merged with a 
2134                                                 # different value of sys.getfilesystemencoding(),
2135                                                 # so fall back to utf_8 if appropriate.
2136                                                 try:
2137                                                         _unicode_encode(obj,
2138                                                                 encoding=_encodings['fs'], errors='strict')
2139                                                 except UnicodeEncodeError:
2140                                                         pass
2141                                                 else:
2142                                                         os = portage.os
2143                                                         perf_md5 = portage.checksum.perform_md5
2144
2145                                 file_data = pkgfiles[objkey]
2146                                 file_type = file_data[0]
2147
2148                                 # don't try to unmerge the prefix offset itself
2149                                 if len(obj) <= len(eroot) or not obj.startswith(eroot):
2150                                         show_unmerge("---", unmerge_desc["!prefix"], file_type, obj)
2151                                         continue
2152
2153                                 statobj = None
2154                                 try:
2155                                         statobj = os.stat(obj)
2156                                 except OSError:
2157                                         pass
2158                                 lstatobj = None
2159                                 try:
2160                                         lstatobj = os.lstat(obj)
2161                                 except (OSError, AttributeError):
2162                                         pass
2163                                 islink = lstatobj is not None and stat.S_ISLNK(lstatobj.st_mode)
2164                                 if lstatobj is None:
2165                                                 show_unmerge("---", unmerge_desc["!found"], file_type, obj)
2166                                                 continue
2167                                 # don't use EROOT, CONTENTS entries already contain EPREFIX
2168                                 if obj.startswith(real_root):
2169                                         relative_path = obj[real_root_len:]
2170                                         is_owned = False
2171                                         for dblnk in others_in_slot:
2172                                                 if dblnk.isowner(relative_path):
2173                                                         is_owned = True
2174                                                         break
2175
2176                                         if file_type == "sym" and is_owned and \
2177                                                 (islink and statobj and stat.S_ISDIR(statobj.st_mode)):
2178                                                 # A new instance of this package claims the file, so
2179                                                 # don't unmerge it. If the file is symlink to a
2180                                                 # directory and the unmerging package installed it as
2181                                                 # a symlink, but the new owner has it listed as a
2182                                                 # directory, then we'll produce a warning since the
2183                                                 # symlink is a sort of orphan in this case (see
2184                                                 # bug #326685).
2185                                                 symlink_orphan = False
2186                                                 for dblnk in others_in_slot:
2187                                                         parent_contents_key = \
2188                                                                 dblnk._match_contents(relative_path)
2189                                                         if not parent_contents_key:
2190                                                                 continue
2191                                                         if not parent_contents_key.startswith(
2192                                                                 real_root):
2193                                                                 continue
2194                                                         if dblnk.getcontents()[
2195                                                                 parent_contents_key][0] == "dir":
2196                                                                 symlink_orphan = True
2197                                                                 break
2198
2199                                                 if symlink_orphan:
2200                                                         protected_symlinks.setdefault(
2201                                                                 (statobj.st_dev, statobj.st_ino),
2202                                                                 []).append(relative_path)
2203
2204                                         if is_owned:
2205                                                 show_unmerge("---", unmerge_desc["replaced"], file_type, obj)
2206                                                 continue
2207                                         elif relative_path in cfgfiledict:
2208                                                 stale_confmem.append(relative_path)
2209                                 # next line includes a tweak to protect modules from being unmerged,
2210                                 # but we don't protect modules from being overwritten if they are
2211                                 # upgraded. We effectively only want one half of the config protection
2212                                 # functionality for /lib/modules. For portage-ng both capabilities
2213                                 # should be able to be independently specified.
2214                                 # TODO: For rebuilds, re-parent previous modules to the new
2215                                 # installed instance (so they are not orphans). For normal
2216                                 # uninstall (not rebuild/reinstall), remove the modules along
2217                                 # with all other files (leave no orphans).
2218                                 if obj.startswith(modprotect):
2219                                         show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
2220                                         continue
2221
2222                                 # Don't unlink symlinks to directories here since that can
2223                                 # remove /lib and /usr/lib symlinks.
2224                                 if unmerge_orphans and \
2225                                         lstatobj and not stat.S_ISDIR(lstatobj.st_mode) and \
2226                                         not (islink and statobj and stat.S_ISDIR(statobj.st_mode)) and \
2227                                         not self.isprotected(obj):
2228                                         try:
2229                                                 unlink(obj, lstatobj)
2230                                         except EnvironmentError as e:
2231                                                 if e.errno not in ignored_unlink_errnos:
2232                                                         raise
2233                                                 del e
2234                                         show_unmerge("<<<", "", file_type, obj)
2235                                         continue
2236
2237                                 lmtime = str(lstatobj[stat.ST_MTIME])
2238                                 if (pkgfiles[objkey][0] not in ("dir", "fif", "dev")) and (lmtime != pkgfiles[objkey][1]):
2239                                         show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
2240                                         continue
2241
2242                                 if pkgfiles[objkey][0] == "dir":
2243                                         if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
2244                                                 show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
2245                                                 continue
2246                                         mydirs.add((obj, (lstatobj.st_dev, lstatobj.st_ino)))
2247                                 elif pkgfiles[objkey][0] == "sym":
2248                                         if not islink:
2249                                                 show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
2250                                                 continue
2251
2252                                         # If this symlink points to a directory then we don't want
2253                                         # to unmerge it if there are any other packages that
2254                                         # installed files into the directory via this symlink
2255                                         # (see bug #326685).
2256                                         # TODO: Resolving a symlink to a directory will require
2257                                         # simulation if $ROOT != / and the link is not relative.
2258                                         if islink and statobj and stat.S_ISDIR(statobj.st_mode) \
2259                                                 and obj.startswith(real_root):
2260
2261                                                 relative_path = obj[real_root_len:]
2262                                                 try:
2263                                                         target_dir_contents = os.listdir(obj)
2264                                                 except OSError:
2265                                                         pass
2266                                                 else:
2267                                                         if target_dir_contents:
2268                                                                 # If all the children are regular files owned
2269                                                                 # by this package, then the symlink should be
2270                                                                 # safe to unmerge.
2271                                                                 all_owned = True
2272                                                                 for child in target_dir_contents:
2273                                                                         child = os.path.join(relative_path, child)
2274                                                                         if not self.isowner(child):
2275                                                                                 all_owned = False
2276                                                                                 break
2277                                                                         try:
2278                                                                                 child_lstat = os.lstat(os.path.join(
2279                                                                                         real_root, child.lstrip(os.sep)))
2280                                                                         except OSError:
2281                                                                                 continue
2282
2283                                                                         if not stat.S_ISREG(child_lstat.st_mode):
2284                                                                                 # Nested symlinks or directories make
2285                                                                                 # the issue very complex, so just
2286                                                                                 # preserve the symlink in order to be
2287                                                                                 # on the safe side.
2288                                                                                 all_owned = False
2289                                                                                 break
2290
2291                                                                 if not all_owned:
2292                                                                         protected_symlinks.setdefault(
2293                                                                                 (statobj.st_dev, statobj.st_ino),
2294                                                                                 []).append(relative_path)
2295                                                                         show_unmerge("---", unmerge_desc["!empty"],
2296                                                                                 file_type, obj)
2297                                                                         continue
2298
2299                                         # Go ahead and unlink symlinks to directories here when
2300                                         # they're actually recorded as symlinks in the contents.
2301                                         # Normally, symlinks such as /lib -> lib64 are not recorded
2302                                         # as symlinks in the contents of a package.  If a package
2303                                         # installs something into ${D}/lib/, it is recorded in the
2304                                         # contents as a directory even if it happens to correspond
2305                                         # to a symlink when it's merged to the live filesystem.
2306                                         try:
2307                                                 unlink(obj, lstatobj)
2308                                                 show_unmerge("<<<", "", file_type, obj)
2309                                         except (OSError, IOError) as e:
2310                                                 if e.errno not in ignored_unlink_errnos:
2311                                                         raise
2312                                                 del e
2313                                                 show_unmerge("!!!", "", file_type, obj)
2314                                 elif pkgfiles[objkey][0] == "obj":
2315                                         if statobj is None or not stat.S_ISREG(statobj.st_mode):
2316                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2317                                                 continue
2318                                         mymd5 = None
2319                                         try:
2320                                                 mymd5 = perf_md5(obj, calc_prelink=calc_prelink)
2321                                         except FileNotFound as e:
2322                                                 # the file has disappeared between now and our stat call
2323                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2324                                                 continue
2325
2326                                         # string.lower is needed because db entries used to be in upper-case.  The
2327                                         # string.lower allows for backwards compatibility.
2328                                         if mymd5 != pkgfiles[objkey][2].lower():
2329                                                 show_unmerge("---", unmerge_desc["!md5"], file_type, obj)
2330                                                 continue
2331                                         try:
2332                                                 unlink(obj, lstatobj)
2333                                         except (OSError, IOError) as e:
2334                                                 if e.errno not in ignored_unlink_errnos:
2335                                                         raise
2336                                                 del e
2337                                         show_unmerge("<<<", "", file_type, obj)
2338                                 elif pkgfiles[objkey][0] == "fif":
2339                                         if not stat.S_ISFIFO(lstatobj[stat.ST_MODE]):
2340                                                 show_unmerge("---", unmerge_desc["!fif"], file_type, obj)
2341                                                 continue
2342                                         show_unmerge("---", "", file_type, obj)
2343                                 elif pkgfiles[objkey][0] == "dev":
2344                                         show_unmerge("---", "", file_type, obj)
2345
2346                         self._unmerge_dirs(mydirs, infodirs_inodes,
2347                                 protected_symlinks, unmerge_desc, unlink, os)
2348                         mydirs.clear()
2349
2350                 if protected_symlinks:
2351                         self._unmerge_protected_symlinks(others_in_slot, infodirs_inodes,
2352                                 protected_symlinks, unmerge_desc, unlink, os)
2353
2354                 if protected_symlinks:
2355                         msg = "One or more symlinks to directories have been " + \
2356                                 "preserved in order to ensure that files installed " + \
2357                                 "via these symlinks remain accessible:"
2358                         lines = textwrap.wrap(msg, 72)
2359                         lines.append("")
2360                         flat_list = set()
2361                         flat_list.update(*protected_symlinks.values())
2362                         flat_list = sorted(flat_list)
2363                         for f in flat_list:
2364                                 lines.append("\t%s" % (os.path.join(real_root,
2365                                         f.lstrip(os.sep))))
2366                         lines.append("")
2367                         self._elog("eerror", "postrm", lines)
2368
2369                 # Remove stale entries from config memory.
2370                 if stale_confmem:
2371                         for filename in stale_confmem:
2372                                 del cfgfiledict[filename]
2373                         writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
2374
2375                 #remove self from vartree database so that our own virtual gets zapped if we're the last node
2376                 self.vartree.zap(self.mycpv)
2377
2378         def _unmerge_protected_symlinks(self, others_in_slot, infodirs_inodes,
2379                 protected_symlinks, unmerge_desc, unlink, os):
2380
2381                 real_root = self.settings['ROOT']
2382                 show_unmerge = self._show_unmerge
2383                 ignored_unlink_errnos = self._ignored_unlink_errnos
2384
2385                 flat_list = set()
2386                 flat_list.update(*protected_symlinks.values())
2387                 flat_list = sorted(flat_list)
2388
2389                 for f in flat_list:
2390                         for dblnk in others_in_slot:
2391                                 if dblnk.isowner(f):
2392                                         # If another package in the same slot installed
2393                                         # a file via a protected symlink, return early
2394                                         # and don't bother searching for any other owners.
2395                                         return
2396
2397                 msg = []
2398                 msg.append("")
2399                 msg.append(_("Directory symlink(s) may need protection:"))
2400                 msg.append("")
2401
2402                 for f in flat_list:
2403                         msg.append("\t%s" % \
2404                                 os.path.join(real_root, f.lstrip(os.path.sep)))
2405
2406                 msg.append("")
2407                 msg.append(_("Searching all installed"
2408                         " packages for files installed via above symlink(s)..."))
2409                 msg.append("")
2410                 self._elog("elog", "postrm", msg)
2411
2412                 self.lockdb()
2413                 try:
2414                         owners = self.vartree.dbapi._owners.get_owners(flat_list)
2415                         self.vartree.dbapi.flush_cache()
2416                 finally:
2417                         self.unlockdb()
2418
2419                 for owner in list(owners):
2420                         if owner.mycpv == self.mycpv:
2421                                 owners.pop(owner, None)
2422
2423                 if not owners:
2424                         msg = []
2425                         msg.append(_("The above directory symlink(s) are all "
2426                                 "safe to remove. Removing them now..."))
2427                         msg.append("")
2428                         self._elog("elog", "postrm", msg)
2429                         dirs = set()
2430                         for unmerge_syms in protected_symlinks.values():
2431                                 for relative_path in unmerge_syms:
2432                                         obj = os.path.join(real_root,
2433                                                 relative_path.lstrip(os.sep))
2434                                         parent = os.path.dirname(obj)
2435                                         while len(parent) > len(self._eroot):
2436                                                 try:
2437                                                         lstatobj = os.lstat(parent)
2438                                                 except OSError:
2439                                                         break
2440                                                 else:
2441                                                         dirs.add((parent,
2442                                                                 (lstatobj.st_dev, lstatobj.st_ino)))
2443                                                         parent = os.path.dirname(parent)
2444                                         try:
2445                                                 unlink(obj, os.lstat(obj))
2446                                                 show_unmerge("<<<", "", "sym", obj)
2447                                         except (OSError, IOError) as e:
2448                                                 if e.errno not in ignored_unlink_errnos:
2449                                                         raise
2450                                                 del e
2451                                                 show_unmerge("!!!", "", "sym", obj)
2452
2453                         protected_symlinks.clear()
2454                         self._unmerge_dirs(dirs, infodirs_inodes,
2455                                 protected_symlinks, unmerge_desc, unlink, os)
2456                         dirs.clear()
2457
2458         def _unmerge_dirs(self, dirs, infodirs_inodes,
2459                 protected_symlinks, unmerge_desc, unlink, os):
2460
2461                 show_unmerge = self._show_unmerge
2462                 infodir_cleanup = self._infodir_cleanup
2463                 ignored_unlink_errnos = self._ignored_unlink_errnos
2464                 ignored_rmdir_errnos = self._ignored_rmdir_errnos
2465                 real_root = self.settings['ROOT']
2466
2467                 dirs = sorted(dirs)
2468                 dirs.reverse()
2469
2470                 for obj, inode_key in dirs:
2471                         # Treat any directory named "info" as a candidate here,
2472                         # since it might have been in INFOPATH previously even
2473                         # though it may not be there now.
2474                         if inode_key in infodirs_inodes or \
2475                                 os.path.basename(obj) == "info":
2476                                 try:
2477                                         remaining = os.listdir(obj)
2478                                 except OSError:
2479                                         pass
2480                                 else:
2481                                         cleanup_info_dir = ()
2482                                         if remaining and \
2483                                                 len(remaining) <= len(infodir_cleanup):
2484                                                 if not set(remaining).difference(infodir_cleanup):
2485                                                         cleanup_info_dir = remaining
2486
2487                                         for child in cleanup_info_dir:
2488                                                 child = os.path.join(obj, child)
2489                                                 try:
2490                                                         lstatobj = os.lstat(child)
2491                                                         if stat.S_ISREG(lstatobj.st_mode):
2492                                                                 unlink(child, lstatobj)
2493                                                                 show_unmerge("<<<", "", "obj", child)
2494                                                 except EnvironmentError as e:
2495                                                         if e.errno not in ignored_unlink_errnos:
2496                                                                 raise
2497                                                         del e
2498                                                         show_unmerge("!!!", "", "obj", child)
2499                         try:
2500                                 if bsd_chflags:
2501                                         lstatobj = os.lstat(obj)
2502                                         if lstatobj.st_flags != 0:
2503                                                 bsd_chflags.lchflags(obj, 0)
2504                                         parent_name = os.path.dirname(obj)
2505                                         # Use normal stat/chflags for the parent since we want to
2506                                         # follow any symlinks to the real parent directory.
2507                                         pflags = os.stat(parent_name).st_flags
2508                                         if pflags != 0:
2509                                                 bsd_chflags.chflags(parent_name, 0)
2510                                 try:
2511                                         os.rmdir(obj)
2512                                 finally:
2513                                         if bsd_chflags and pflags != 0:
2514                                                 # Restore the parent flags we saved before unlinking
2515                                                 bsd_chflags.chflags(parent_name, pflags)
2516                                 show_unmerge("<<<", "", "dir", obj)
2517                         except EnvironmentError as e:
2518                                 if e.errno not in ignored_rmdir_errnos:
2519                                         raise
2520                                 if e.errno != errno.ENOENT:
2521                                         show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
2522                                 del e
2523                         else:
2524                                 # When a directory is successfully removed, there's
2525                                 # no need to protect symlinks that point to it.
2526                                 unmerge_syms = protected_symlinks.pop(inode_key, None)
2527                                 if unmerge_syms is not None:
2528                                         for relative_path in unmerge_syms:
2529                                                 obj = os.path.join(real_root,
2530                                                         relative_path.lstrip(os.sep))
2531                                                 try:
2532                                                         unlink(obj, os.lstat(obj))
2533                                                         show_unmerge("<<<", "", "sym", obj)
2534                                                 except (OSError, IOError) as e:
2535                                                         if e.errno not in ignored_unlink_errnos:
2536                                                                 raise
2537                                                         del e
2538                                                         show_unmerge("!!!", "", "sym", obj)
2539
2540         def isowner(self, filename, destroot=None):
2541                 """ 
2542                 Check if a file belongs to this package. This may
2543                 result in a stat call for the parent directory of
2544                 every installed file, since the inode numbers are
2545                 used to work around the problem of ambiguous paths
2546                 caused by symlinked directories. The results of
2547                 stat calls are cached to optimize multiple calls
2548                 to this method.
2549
2550                 @param filename:
2551                 @type filename:
2552                 @param destroot:
2553                 @type destroot:
2554                 @rtype: Boolean
2555                 @return:
2556                 1. True if this package owns the file.
2557                 2. False if this package does not own the file.
2558                 """
2559
2560                 if destroot is not None and destroot != self._eroot:
2561                         warnings.warn("The second parameter of the " + \
2562                                 "portage.dbapi.vartree.dblink.isowner()" + \
2563                                 " is now unused. Instead " + \
2564                                 "self.settings['EROOT'] will be used.",
2565                                 DeprecationWarning, stacklevel=2)
2566
2567                 return bool(self._match_contents(filename))
2568
2569         def _match_contents(self, filename, destroot=None):
2570                 """
2571                 The matching contents entry is returned, which is useful
2572                 since the path may differ from the one given by the caller,
2573                 due to symlinks.
2574
2575                 @rtype: String
2576                 @return: the contents entry corresponding to the given path, or False
2577                         if the file is not owned by this package.
2578                 """
2579
2580                 filename = _unicode_decode(filename,
2581                         encoding=_encodings['content'], errors='strict')
2582
2583                 if destroot is not None and destroot != self._eroot:
2584                         warnings.warn("The second parameter of the " + \
2585                                 "portage.dbapi.vartree.dblink._match_contents()" + \
2586                                 " is now unused. Instead " + \
2587                                 "self.settings['ROOT'] will be used.",
2588                                 DeprecationWarning, stacklevel=2)
2589
2590                 # don't use EROOT here, image already contains EPREFIX
2591                 destroot = self.settings['ROOT']
2592
2593                 # The given filename argument might have a different encoding than the
2594                 # the filenames contained in the contents, so use separate wrapped os
2595                 # modules for each. The basename is more likely to contain non-ascii
2596                 # characters than the directory path, so use os_filename_arg for all
2597                 # operations involving the basename of the filename arg.
2598                 os_filename_arg = _os_merge
2599                 os = _os_merge
2600
2601                 try:
2602                         _unicode_encode(filename,
2603                                 encoding=_encodings['merge'], errors='strict')
2604                 except UnicodeEncodeError:
2605                         # The package appears to have been merged with a
2606                         # different value of sys.getfilesystemencoding(),
2607                         # so fall back to utf_8 if appropriate.
2608                         try:
2609                                 _unicode_encode(filename,
2610                                         encoding=_encodings['fs'], errors='strict')
2611                         except UnicodeEncodeError:
2612                                 pass
2613                         else:
2614                                 os_filename_arg = portage.os
2615
2616                 destfile = normalize_path(
2617                         os_filename_arg.path.join(destroot,
2618                         filename.lstrip(os_filename_arg.path.sep)))
2619
2620                 pkgfiles = self.getcontents()
2621                 if pkgfiles and destfile in pkgfiles:
2622                         return destfile
2623                 if pkgfiles:
2624                         basename = os_filename_arg.path.basename(destfile)
2625                         if self._contents_basenames is None:
2626
2627                                 try:
2628                                         for x in pkgfiles:
2629                                                 _unicode_encode(x,
2630                                                         encoding=_encodings['merge'],
2631                                                         errors='strict')
2632                                 except UnicodeEncodeError:
2633                                         # The package appears to have been merged with a
2634                                         # different value of sys.getfilesystemencoding(),
2635                                         # so fall back to utf_8 if appropriate.
2636                                         try:
2637                                                 for x in pkgfiles:
2638                                                         _unicode_encode(x,
2639                                                                 encoding=_encodings['fs'],
2640                                                                 errors='strict')
2641                                         except UnicodeEncodeError:
2642                                                 pass
2643                                         else:
2644                                                 os = portage.os
2645
2646                                 self._contents_basenames = set(
2647                                         os.path.basename(x) for x in pkgfiles)
2648                         if basename not in self._contents_basenames:
2649                                 # This is a shortcut that, in most cases, allows us to
2650                                 # eliminate this package as an owner without the need
2651                                 # to examine inode numbers of parent directories.
2652                                 return False
2653
2654                         # Use stat rather than lstat since we want to follow
2655                         # any symlinks to the real parent directory.
2656                         parent_path = os_filename_arg.path.dirname(destfile)
2657                         try:
2658                                 parent_stat = os_filename_arg.stat(parent_path)
2659                         except EnvironmentError as e:
2660                                 if e.errno != errno.ENOENT:
2661                                         raise
2662                                 del e
2663                                 return False
2664                         if self._contents_inodes is None:
2665
2666                                 if os is _os_merge:
2667                                         try:
2668                                                 for x in pkgfiles:
2669                                                         _unicode_encode(x,
2670                                                                 encoding=_encodings['merge'],
2671                                                                 errors='strict')
2672                                         except UnicodeEncodeError:
2673                                                 # The package appears to have been merged with a 
2674                                                 # different value of sys.getfilesystemencoding(),
2675                                                 # so fall back to utf_8 if appropriate.
2676                                                 try:
2677                                                         for x in pkgfiles:
2678                                                                 _unicode_encode(x,
2679                                                                         encoding=_encodings['fs'],
2680                                                                         errors='strict')
2681                                                 except UnicodeEncodeError:
2682                                                         pass
2683                                                 else:
2684                                                         os = portage.os
2685
2686                                 self._contents_inodes = {}
2687                                 parent_paths = set()
2688                                 for x in pkgfiles:
2689                                         p_path = os.path.dirname(x)
2690                                         if p_path in parent_paths:
2691                                                 continue
2692                                         parent_paths.add(p_path)
2693                                         try:
2694                                                 s = os.stat(p_path)
2695                                         except OSError:
2696                                                 pass
2697                                         else:
2698                                                 inode_key = (s.st_dev, s.st_ino)
2699                                                 # Use lists of paths in case multiple
2700                                                 # paths reference the same inode.
2701                                                 p_path_list = self._contents_inodes.get(inode_key)
2702                                                 if p_path_list is None:
2703                                                         p_path_list = []
2704                                                         self._contents_inodes[inode_key] = p_path_list
2705                                                 if p_path not in p_path_list:
2706                                                         p_path_list.append(p_path)
2707
2708                         p_path_list = self._contents_inodes.get(
2709                                 (parent_stat.st_dev, parent_stat.st_ino))
2710                         if p_path_list:
2711                                 for p_path in p_path_list:
2712                                         x = os_filename_arg.path.join(p_path, basename)
2713                                         if x in pkgfiles:
2714                                                 return x
2715
2716                 return False
2717
2718         def _linkmap_rebuild(self, **kwargs):
2719                 """
2720                 Rebuild the self._linkmap if it's not broken due to missing
2721                 scanelf binary. Also, return early if preserve-libs is disabled
2722                 and the preserve-libs registry is empty.
2723                 """
2724                 if self._linkmap_broken or \
2725                         self.vartree.dbapi._linkmap is None or \
2726                         self.vartree.dbapi._plib_registry is None or \
2727                         ("preserve-libs" not in self.settings.features and \
2728                         not self.vartree.dbapi._plib_registry.hasEntries()):
2729                         return
2730                 try:
2731                         self.vartree.dbapi._linkmap.rebuild(**kwargs)
2732                 except CommandNotFound as e:
2733                         self._linkmap_broken = True
2734                         self._display_merge(_("!!! Disabling preserve-libs " \
2735                                 "due to error: Command Not Found: %s\n") % (e,),
2736                                 level=logging.ERROR, noiselevel=-1)
2737
2738         def _find_libs_to_preserve(self, unmerge=False):
2739                 """
2740                 Get set of relative paths for libraries to be preserved. When
2741                 unmerge is False, file paths to preserve are selected from
2742                 self._installed_instance. Otherwise, paths are selected from
2743                 self.
2744                 """
2745                 if self._linkmap_broken or \
2746                         self.vartree.dbapi._linkmap is None or \
2747                         self.vartree.dbapi._plib_registry is None or \
2748                         (not unmerge and self._installed_instance is None) or \
2749                         "preserve-libs" not in self.settings.features:
2750                         return set()
2751
2752                 os = _os_merge
2753                 linkmap = self.vartree.dbapi._linkmap
2754                 if unmerge:
2755                         installed_instance = self
2756                 else:
2757                         installed_instance = self._installed_instance
2758                 old_contents = installed_instance.getcontents()
2759                 root = self.settings['ROOT']
2760                 root_len = len(root) - 1
2761                 lib_graph = digraph()
2762                 path_node_map = {}
2763
2764                 def path_to_node(path):
2765                         node = path_node_map.get(path)
2766                         if node is None:
2767                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2768                                 alt_path_node = lib_graph.get(node)
2769                                 if alt_path_node is not None:
2770                                         node = alt_path_node
2771                                 node.alt_paths.add(path)
2772                                 path_node_map[path] = node
2773                         return node
2774
2775                 consumer_map = {}
2776                 provider_nodes = set()
2777                 # Create provider nodes and add them to the graph.
2778                 for f_abs in old_contents:
2779
2780                         if os is _os_merge:
2781                                 try:
2782                                         _unicode_encode(f_abs,
2783                                                 encoding=_encodings['merge'], errors='strict')
2784                                 except UnicodeEncodeError:
2785                                         # The package appears to have been merged with a 
2786                                         # different value of sys.getfilesystemencoding(),
2787                                         # so fall back to utf_8 if appropriate.
2788                                         try:
2789                                                 _unicode_encode(f_abs,
2790                                                         encoding=_encodings['fs'], errors='strict')
2791                                         except UnicodeEncodeError:
2792                                                 pass
2793                                         else:
2794                                                 os = portage.os
2795
2796                         f = f_abs[root_len:]
2797                         if not unmerge and self.isowner(f):
2798                                 # We have an indentically named replacement file,
2799                                 # so we don't try to preserve the old copy.
2800                                 continue
2801                         try:
2802                                 consumers = linkmap.findConsumers(f,
2803                                         exclude_providers=(installed_instance.isowner,))
2804                         except KeyError:
2805                                 continue
2806                         if not consumers:
2807                                 continue
2808                         provider_node = path_to_node(f)
2809                         lib_graph.add(provider_node, None)
2810                         provider_nodes.add(provider_node)
2811                         consumer_map[provider_node] = consumers
2812
2813                 # Create consumer nodes and add them to the graph.
2814                 # Note that consumers can also be providers.
2815                 for provider_node, consumers in consumer_map.items():
2816                         for c in consumers:
2817                                 consumer_node = path_to_node(c)
2818                                 if installed_instance.isowner(c) and \
2819                                         consumer_node not in provider_nodes:
2820                                         # This is not a provider, so it will be uninstalled.
2821                                         continue
2822                                 lib_graph.add(provider_node, consumer_node)
2823
2824                 # Locate nodes which should be preserved. They consist of all
2825                 # providers that are reachable from consumers that are not
2826                 # providers themselves.
2827                 preserve_nodes = set()
2828                 for consumer_node in lib_graph.root_nodes():
2829                         if consumer_node in provider_nodes:
2830                                 continue
2831                         # Preserve all providers that are reachable from this consumer.
2832                         node_stack = lib_graph.child_nodes(consumer_node)
2833                         while node_stack:
2834                                 provider_node = node_stack.pop()
2835                                 if provider_node in preserve_nodes:
2836                                         continue
2837                                 preserve_nodes.add(provider_node)
2838                                 node_stack.extend(lib_graph.child_nodes(provider_node))
2839
2840                 preserve_paths = set()
2841                 for preserve_node in preserve_nodes:
2842                         # Preserve the library itself, and also preserve the
2843                         # soname symlink which is the only symlink that is
2844                         # strictly required.
2845                         hardlinks = set()
2846                         soname_symlinks = set()
2847                         soname = linkmap.getSoname(next(iter(preserve_node.alt_paths)))
2848                         for f in preserve_node.alt_paths:
2849                                 f_abs = os.path.join(root, f.lstrip(os.sep))
2850                                 try:
2851                                         if stat.S_ISREG(os.lstat(f_abs).st_mode):
2852                                                 hardlinks.add(f)
2853                                         elif os.path.basename(f) == soname:
2854                                                 soname_symlinks.add(f)
2855                                 except OSError:
2856                                         pass
2857
2858                         if hardlinks:
2859                                 preserve_paths.update(hardlinks)
2860                                 preserve_paths.update(soname_symlinks)
2861
2862                 return preserve_paths
2863
2864         def _add_preserve_libs_to_contents(self, preserve_paths):
2865                 """
2866                 Preserve libs returned from _find_libs_to_preserve().
2867                 """
2868
2869                 if not preserve_paths:
2870                         return
2871
2872                 os = _os_merge
2873                 showMessage = self._display_merge
2874                 root = self.settings['ROOT']
2875
2876                 # Copy contents entries from the old package to the new one.
2877                 new_contents = self.getcontents().copy()
2878                 old_contents = self._installed_instance.getcontents()
2879                 for f in sorted(preserve_paths):
2880                         f = _unicode_decode(f,
2881                                 encoding=_encodings['content'], errors='strict')
2882                         f_abs = os.path.join(root, f.lstrip(os.sep))
2883                         contents_entry = old_contents.get(f_abs)
2884                         if contents_entry is None:
2885                                 # This will probably never happen, but it might if one of the
2886                                 # paths returned from findConsumers() refers to one of the libs
2887                                 # that should be preserved yet the path is not listed in the
2888                                 # contents. Such a path might belong to some other package, so
2889                                 # it shouldn't be preserved here.
2890                                 showMessage(_("!!! File '%s' will not be preserved "
2891                                         "due to missing contents entry\n") % (f_abs,),
2892                                         level=logging.ERROR, noiselevel=-1)
2893                                 preserve_paths.remove(f)
2894                                 continue
2895                         new_contents[f_abs] = contents_entry
2896                         obj_type = contents_entry[0]
2897                         showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
2898                                 noiselevel=-1)
2899                         # Add parent directories to contents if necessary.
2900                         parent_dir = os.path.dirname(f_abs)
2901                         while len(parent_dir) > len(root):
2902                                 new_contents[parent_dir] = ["dir"]
2903                                 prev = parent_dir
2904                                 parent_dir = os.path.dirname(parent_dir)
2905                                 if prev == parent_dir:
2906                                         break
2907                 outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
2908                 write_contents(new_contents, root, outfile)
2909                 outfile.close()
2910                 self._clear_contents_cache()
2911
2912         def _find_unused_preserved_libs(self, unmerge_no_replacement):
2913                 """
2914                 Find preserved libraries that don't have any consumers left.
2915                 """
2916
2917                 if self._linkmap_broken or \
2918                         self.vartree.dbapi._linkmap is None or \
2919                         self.vartree.dbapi._plib_registry is None or \
2920                         not self.vartree.dbapi._plib_registry.hasEntries():
2921                         return {}
2922
2923                 # Since preserved libraries can be consumers of other preserved
2924                 # libraries, use a graph to track consumer relationships.
2925                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2926                 linkmap = self.vartree.dbapi._linkmap
2927                 lib_graph = digraph()
2928                 preserved_nodes = set()
2929                 preserved_paths = set()
2930                 path_cpv_map = {}
2931                 path_node_map = {}
2932                 root = self.settings['ROOT']
2933
2934                 def path_to_node(path):
2935                         node = path_node_map.get(path)
2936                         if node is None:
2937                                 node = LinkageMap._LibGraphNode(linkmap._obj_key(path))
2938                                 alt_path_node = lib_graph.get(node)
2939                                 if alt_path_node is not None:
2940                                         node = alt_path_node
2941                                 node.alt_paths.add(path)
2942                                 path_node_map[path] = node
2943                         return node
2944
2945                 for cpv, plibs in plib_dict.items():
2946                         for f in plibs:
2947                                 path_cpv_map[f] = cpv
2948                                 preserved_node = path_to_node(f)
2949                                 if not preserved_node.file_exists():
2950                                         continue
2951                                 lib_graph.add(preserved_node, None)
2952                                 preserved_paths.add(f)
2953                                 preserved_nodes.add(preserved_node)
2954                                 for c in self.vartree.dbapi._linkmap.findConsumers(f):
2955                                         consumer_node = path_to_node(c)
2956                                         if not consumer_node.file_exists():
2957                                                 continue
2958                                         # Note that consumers may also be providers.
2959                                         lib_graph.add(preserved_node, consumer_node)
2960
2961                 # Eliminate consumers having providers with the same soname as an
2962                 # installed library that is not preserved. This eliminates
2963                 # libraries that are erroneously preserved due to a move from one
2964                 # directory to another.
2965                 # Also eliminate consumers that are going to be unmerged if
2966                 # unmerge_no_replacement is True.
2967                 provider_cache = {}
2968                 for preserved_node in preserved_nodes:
2969                         soname = linkmap.getSoname(preserved_node)
2970                         for consumer_node in lib_graph.parent_nodes(preserved_node):
2971                                 if consumer_node in preserved_nodes:
2972                                         continue
2973                                 if unmerge_no_replacement:
2974                                         will_be_unmerged = True
2975                                         for path in consumer_node.alt_paths:
2976                                                 if not self.isowner(path):
2977                                                         will_be_unmerged = False
2978                                                         break
2979                                         if will_be_unmerged:
2980                                                 # This consumer is not preserved and it is
2981                                                 # being unmerged, so drop this edge.
2982                                                 lib_graph.remove_edge(preserved_node, consumer_node)
2983                                                 continue
2984
2985                                 providers = provider_cache.get(consumer_node)
2986                                 if providers is None:
2987                                         providers = linkmap.findProviders(consumer_node)
2988                                         provider_cache[consumer_node] = providers
2989                                 providers = providers.get(soname)
2990                                 if providers is None:
2991                                         continue
2992                                 for provider in providers:
2993                                         if provider in preserved_paths:
2994                                                 continue
2995                                         provider_node = path_to_node(provider)
2996                                         if not provider_node.file_exists():
2997                                                 continue
2998                                         if provider_node in preserved_nodes:
2999                                                 continue
3000                                         # An alternative provider seems to be
3001                                         # installed, so drop this edge.
3002                                         lib_graph.remove_edge(preserved_node, consumer_node)
3003                                         break
3004
3005                 cpv_lib_map = {}
3006                 while lib_graph:
3007                         root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
3008                         if not root_nodes:
3009                                 break
3010                         lib_graph.difference_update(root_nodes)
3011                         unlink_list = set()
3012                         for node in root_nodes:
3013                                 unlink_list.update(node.alt_paths)
3014                         unlink_list = sorted(unlink_list)
3015                         for obj in unlink_list:
3016                                 cpv = path_cpv_map.get(obj)
3017                                 if cpv is None:
3018                                         # This means that a symlink is in the preserved libs
3019                                         # registry, but the actual lib it points to is not.
3020                                         self._display_merge(_("!!! symlink to lib is preserved, "
3021                                                 "but not the lib itself:\n!!! '%s'\n") % (obj,),
3022                                                 level=logging.ERROR, noiselevel=-1)
3023                                         continue
3024                                 removed = cpv_lib_map.get(cpv)
3025                                 if removed is None:
3026                                         removed = set()
3027                                         cpv_lib_map[cpv] = removed
3028                                 removed.add(obj)
3029
3030                 return cpv_lib_map
3031
3032         def _remove_preserved_libs(self, cpv_lib_map):
3033                 """
3034                 Remove files returned from _find_unused_preserved_libs().
3035                 """
3036
3037                 os = _os_merge
3038
3039                 files_to_remove = set()
3040                 for files in cpv_lib_map.values():
3041                         files_to_remove.update(files)
3042                 files_to_remove = sorted(files_to_remove)
3043                 showMessage = self._display_merge
3044                 root = self.settings['ROOT']
3045
3046                 parent_dirs = set()
3047                 for obj in files_to_remove:
3048                         obj = os.path.join(root, obj.lstrip(os.sep))
3049                         parent_dirs.add(os.path.dirname(obj))
3050                         if os.path.islink(obj):
3051                                 obj_type = _("sym")
3052                         else:
3053                                 obj_type = _("obj")
3054                         try:
3055                                 os.unlink(obj)
3056                         except OSError as e:
3057                                 if e.errno != errno.ENOENT:
3058                                         raise
3059                                 del e
3060                         else:
3061                                 showMessage(_("<<< !needed  %s %s\n") % (obj_type, obj),
3062                                         noiselevel=-1)
3063
3064                 # Remove empty parent directories if possible.
3065                 while parent_dirs:
3066                         x = parent_dirs.pop()
3067                         while True:
3068                                 try:
3069                                         os.rmdir(x)
3070                                 except OSError:
3071                                         break
3072                                 prev = x
3073                                 x = os.path.dirname(x)
3074                                 if x == prev:
3075                                         break
3076
3077                 self.vartree.dbapi._plib_registry.pruneNonExisting()
3078
3079         def _collision_protect(self, srcroot, destroot, mypkglist,
3080                 file_list, symlink_list):
3081
3082                         os = _os_merge
3083
3084                         collision_ignore = []
3085                         for x in portage.util.shlex_split(
3086                                 self.settings.get("COLLISION_IGNORE", "")):
3087                                 if os.path.isdir(os.path.join(self._eroot, x.lstrip(os.sep))):
3088                                         x = normalize_path(x)
3089                                         x += "/*"
3090                                 collision_ignore.append(x)
3091
3092                         # For collisions with preserved libraries, the current package
3093                         # will assume ownership and the libraries will be unregistered.
3094                         if self.vartree.dbapi._plib_registry is None:
3095                                 # preserve-libs is entirely disabled
3096                                 plib_cpv_map = None
3097                                 plib_paths = None
3098                                 plib_inodes = {}
3099                         else:
3100                                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
3101                                 plib_cpv_map = {}
3102                                 plib_paths = set()
3103                                 for cpv, paths in plib_dict.items():
3104                                         plib_paths.update(paths)
3105                                         for f in paths:
3106                                                 plib_cpv_map[f] = cpv
3107                                 plib_inodes = self._lstat_inode_map(plib_paths)
3108
3109                         plib_collisions = {}
3110
3111                         showMessage = self._display_merge
3112                         stopmerge = False
3113                         collisions = []
3114                         symlink_collisions = []
3115                         destroot = self.settings['ROOT']
3116                         showMessage(_(" %s checking %d files for package collisions\n") % \
3117                                 (colorize("GOOD", "*"), len(file_list) + len(symlink_list)))
3118                         for i, (f, f_type) in enumerate(chain(
3119                                 ((f, "reg") for f in file_list),
3120                                 ((f, "sym") for f in symlink_list))):
3121                                 if i % 1000 == 0 and i != 0:
3122                                         showMessage(_("%d files checked ...\n") % i)
3123
3124                                 dest_path = normalize_path(
3125                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3126                                 try:
3127                                         dest_lstat = os.lstat(dest_path)
3128                                 except EnvironmentError as e:
3129                                         if e.errno == errno.ENOENT:
3130                                                 del e
3131                                                 continue
3132                                         elif e.errno == errno.ENOTDIR:
3133                                                 del e
3134                                                 # A non-directory is in a location where this package
3135                                                 # expects to have a directory.
3136                                                 dest_lstat = None
3137                                                 parent_path = dest_path
3138                                                 while len(parent_path) > len(destroot):
3139                                                         parent_path = os.path.dirname(parent_path)
3140                                                         try:
3141                                                                 dest_lstat = os.lstat(parent_path)
3142                                                                 break
3143                                                         except EnvironmentError as e:
3144                                                                 if e.errno != errno.ENOTDIR:
3145                                                                         raise
3146                                                                 del e
3147                                                 if not dest_lstat:
3148                                                         raise AssertionError(
3149                                                                 "unable to find non-directory " + \
3150                                                                 "parent for '%s'" % dest_path)
3151                                                 dest_path = parent_path
3152                                                 f = os.path.sep + dest_path[len(destroot):]
3153                                                 if f in collisions:
3154                                                         continue
3155                                         else:
3156                                                 raise
3157                                 if f[0] != "/":
3158                                         f="/"+f
3159
3160                                 if stat.S_ISDIR(dest_lstat.st_mode):
3161                                         if f_type == "sym":
3162                                                 # This case is explicitly banned
3163                                                 # by PMS (see bug #326685).
3164                                                 symlink_collisions.append(f)
3165                                                 collisions.append(f)
3166                                                 continue
3167
3168                                 plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
3169                                 if plibs:
3170                                         for path in plibs:
3171                                                 cpv = plib_cpv_map[path]
3172                                                 paths = plib_collisions.get(cpv)
3173                                                 if paths is None:
3174                                                         paths = set()
3175                                                         plib_collisions[cpv] = paths
3176                                                 paths.add(path)
3177                                         # The current package will assume ownership and the
3178                                         # libraries will be unregistered, so exclude this
3179                                         # path from the normal collisions.
3180                                         continue
3181
3182                                 isowned = False
3183                                 full_path = os.path.join(destroot, f.lstrip(os.path.sep))
3184                                 for ver in mypkglist:
3185                                         if ver.isowner(f):
3186                                                 isowned = True
3187                                                 break
3188                                 if not isowned and self.isprotected(full_path):
3189                                         isowned = True
3190                                 if not isowned:
3191                                         f_match = full_path[len(self._eroot)-1:]
3192                                         stopmerge = True
3193                                         for pattern in collision_ignore:
3194                                                 if fnmatch.fnmatch(f_match, pattern):
3195                                                         stopmerge = False
3196                                                         break
3197                                         if stopmerge:
3198                                                 collisions.append(f)
3199                         return collisions, symlink_collisions, plib_collisions
3200
3201         def _lstat_inode_map(self, path_iter):
3202                 """
3203                 Use lstat to create a map of the form:
3204                   {(st_dev, st_ino) : set([path1, path2, ...])}
3205                 Multiple paths may reference the same inode due to hardlinks.
3206                 All lstat() calls are relative to self.myroot.
3207                 """
3208
3209                 os = _os_merge
3210
3211                 root = self.settings['ROOT']
3212                 inode_map = {}
3213                 for f in path_iter:
3214                         path = os.path.join(root, f.lstrip(os.sep))
3215                         try:
3216                                 st = os.lstat(path)
3217                         except OSError as e:
3218                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3219                                         raise
3220                                 del e
3221                                 continue
3222                         key = (st.st_dev, st.st_ino)
3223                         paths = inode_map.get(key)
3224                         if paths is None:
3225                                 paths = set()
3226                                 inode_map[key] = paths
3227                         paths.add(f)
3228                 return inode_map
3229
3230         def _security_check(self, installed_instances):
3231                 if not installed_instances:
3232                         return 0
3233
3234                 os = _os_merge
3235
3236                 showMessage = self._display_merge
3237
3238                 file_paths = set()
3239                 for dblnk in installed_instances:
3240                         file_paths.update(dblnk.getcontents())
3241                 inode_map = {}
3242                 real_paths = set()
3243                 for i, path in enumerate(file_paths):
3244
3245                         if os is _os_merge:
3246                                 try:
3247                                         _unicode_encode(path,
3248                                                 encoding=_encodings['merge'], errors='strict')
3249                                 except UnicodeEncodeError:
3250                                         # The package appears to have been merged with a 
3251                                         # different value of sys.getfilesystemencoding(),
3252                                         # so fall back to utf_8 if appropriate.
3253                                         try:
3254                                                 _unicode_encode(path,
3255                                                         encoding=_encodings['fs'], errors='strict')
3256                                         except UnicodeEncodeError:
3257                                                 pass
3258                                         else:
3259                                                 os = portage.os
3260
3261                         try:
3262                                 s = os.lstat(path)
3263                         except OSError as e:
3264                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
3265                                         raise
3266                                 del e
3267                                 continue
3268                         if not stat.S_ISREG(s.st_mode):
3269                                 continue
3270                         path = os.path.realpath(path)
3271                         if path in real_paths:
3272                                 continue
3273                         real_paths.add(path)
3274                         if s.st_nlink > 1 and \
3275                                 s.st_mode & (stat.S_ISUID | stat.S_ISGID):
3276                                 k = (s.st_dev, s.st_ino)
3277                                 inode_map.setdefault(k, []).append((path, s))
3278                 suspicious_hardlinks = []
3279                 for path_list in inode_map.values():
3280                         path, s = path_list[0]
3281                         if len(path_list) == s.st_nlink:
3282                                 # All hardlinks seem to be owned by this package.
3283                                 continue
3284                         suspicious_hardlinks.append(path_list)
3285                 if not suspicious_hardlinks:
3286                         return 0
3287
3288                 msg = []
3289                 msg.append(_("suid/sgid file(s) "
3290                         "with suspicious hardlink(s):"))
3291                 msg.append("")
3292                 for path_list in suspicious_hardlinks:
3293                         for path, s in path_list:
3294                                 msg.append("\t%s" % path)
3295                 msg.append("")
3296                 msg.append(_("See the Gentoo Security Handbook " 
3297                         "guide for advice on how to proceed."))
3298
3299                 self._eerror("preinst", msg)
3300
3301                 return 1
3302
3303         def _eqawarn(self, phase, lines):
3304                 self._elog("eqawarn", phase, lines)
3305
3306         def _eerror(self, phase, lines):
3307                 self._elog("eerror", phase, lines)
3308
3309         def _elog(self, funcname, phase, lines):
3310                 func = getattr(portage.elog.messages, funcname)
3311                 if self._scheduler is None:
3312                         for l in lines:
3313                                 func(l, phase=phase, key=self.mycpv)
3314                 else:
3315                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
3316                         log_path = None
3317                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
3318                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
3319                         out = io.StringIO()
3320                         for line in lines:
3321                                 func(line, phase=phase, key=self.mycpv, out=out)
3322                         msg = out.getvalue()
3323                         self._scheduler.output(msg,
3324                                 background=background, log_path=log_path)
3325
3326         def _elog_process(self, phasefilter=None):
3327                 cpv = self.mycpv
3328                 if self._pipe is None:
3329                         elog_process(cpv, self.settings, phasefilter=phasefilter)
3330                 else:
3331                         logdir = os.path.join(self.settings["T"], "logging")
3332                         ebuild_logentries = collect_ebuild_messages(logdir)
3333                         py_logentries = collect_messages(key=cpv).get(cpv, {})
3334                         logentries = _merge_logentries(py_logentries, ebuild_logentries)
3335                         funcnames = {
3336                                 "INFO": "einfo",
3337                                 "LOG": "elog",
3338                                 "WARN": "ewarn",
3339                                 "QA": "eqawarn",
3340                                 "ERROR": "eerror"
3341                         }
3342                         str_buffer = []
3343                         for phase, messages in logentries.items():
3344                                 for key, lines in messages:
3345                                         funcname = funcnames[key]
3346                                         if isinstance(lines, basestring):
3347                                                 lines = [lines]
3348                                         for line in lines:
3349                                                 for line in line.split('\n'):
3350                                                         fields = (funcname, phase, cpv, line)
3351                                                         str_buffer.append(' '.join(fields))
3352                                                         str_buffer.append('\n')
3353                         if str_buffer:
3354                                 os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
3355
3356         def _emerge_log(self, msg):
3357                 emergelog(False, msg)
3358
3359         def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0,
3360                 mydbapi=None, prev_mtimes=None, counter=None):
3361                 """
3362                 
3363                 This function does the following:
3364                 
3365                 calls self._preserve_libs if FEATURES=preserve-libs
3366                 calls self._collision_protect if FEATURES=collision-protect
3367                 calls doebuild(mydo=pkg_preinst)
3368                 Merges the package to the livefs
3369                 unmerges old version (if required)
3370                 calls doebuild(mydo=pkg_postinst)
3371                 calls env_update
3372                 
3373                 @param srcroot: Typically this is ${D}
3374                 @type srcroot: String (Path)
3375                 @param destroot: ignored, self.settings['ROOT'] is used instead
3376                 @type destroot: String (Path)
3377                 @param inforoot: root of the vardb entry ?
3378                 @type inforoot: String (Path)
3379                 @param myebuild: path to the ebuild that we are processing
3380                 @type myebuild: String (Path)
3381                 @param mydbapi: dbapi which is handed to doebuild.
3382                 @type mydbapi: portdbapi instance
3383                 @param prev_mtimes: { Filename:mtime } mapping for env_update
3384                 @type prev_mtimes: Dictionary
3385                 @rtype: Boolean
3386                 @return:
3387                 1. 0 on success
3388                 2. 1 on failure
3389                 
3390                 secondhand is a list of symlinks that have been skipped due to their target
3391                 not existing; we will merge these symlinks at a later time.
3392                 """
3393
3394                 os = _os_merge
3395
3396                 srcroot = _unicode_decode(srcroot,
3397                         encoding=_encodings['content'], errors='strict')
3398                 destroot = self.settings['ROOT']
3399                 inforoot = _unicode_decode(inforoot,
3400                         encoding=_encodings['content'], errors='strict')
3401                 myebuild = _unicode_decode(myebuild,
3402                         encoding=_encodings['content'], errors='strict')
3403
3404                 showMessage = self._display_merge
3405                 srcroot = normalize_path(srcroot).rstrip(os.path.sep) + os.path.sep
3406
3407                 if not os.path.isdir(srcroot):
3408                         showMessage(_("!!! Directory Not Found: D='%s'\n") % srcroot,
3409                                 level=logging.ERROR, noiselevel=-1)
3410                         return 1
3411
3412                 slot = ''
3413                 for var_name in ('CHOST', 'SLOT'):
3414                         if var_name == 'CHOST' and self.cat == 'virtual':
3415                                 try:
3416                                         os.unlink(os.path.join(inforoot, var_name))
3417                                 except OSError:
3418                                         pass
3419                                 continue
3420
3421                         f = None
3422                         try:
3423                                 f = io.open(_unicode_encode(
3424                                         os.path.join(inforoot, var_name),
3425                                         encoding=_encodings['fs'], errors='strict'),
3426                                         mode='r', encoding=_encodings['repo.content'],
3427                                         errors='replace')
3428                                 val = f.readline().strip()
3429                         except EnvironmentError as e:
3430                                 if e.errno != errno.ENOENT:
3431                                         raise
3432                                 del e
3433                                 val = ''
3434                         finally:
3435                                 if f is not None:
3436                                         f.close()
3437
3438                         if var_name == 'SLOT':
3439                                 slot = val
3440
3441                                 if not slot.strip():
3442                                         slot = self.settings.get(var_name, '')
3443                                         if not slot.strip():
3444                                                 showMessage(_("!!! SLOT is undefined\n"),
3445                                                         level=logging.ERROR, noiselevel=-1)
3446                                                 return 1
3447                                         write_atomic(os.path.join(inforoot, var_name), slot + '\n')
3448
3449                         if val != self.settings.get(var_name, ''):
3450                                 self._eqawarn('preinst',
3451                                         [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
3452                                         {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
3453
3454                 def eerror(lines):
3455                         self._eerror("preinst", lines)
3456
3457                 if not os.path.exists(self.dbcatdir):
3458                         ensure_dirs(self.dbcatdir)
3459
3460                 cp = self.mysplit[0]
3461                 slot_atom = "%s:%s" % (cp, slot)
3462
3463                 # filter any old-style virtual matches
3464                 slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
3465                         if cpv_getkey(cpv) == cp]
3466
3467                 if self.mycpv not in slot_matches and \
3468                         self.vartree.dbapi.cpv_exists(self.mycpv):
3469                         # handle multislot or unapplied slotmove
3470                         slot_matches.append(self.mycpv)
3471
3472                 others_in_slot = []
3473                 from portage import config
3474                 for cur_cpv in slot_matches:
3475                         # Clone the config in case one of these has to be unmerged since
3476                         # we need it to have private ${T} etc... for things like elog.
3477                         settings_clone = config(clone=self.settings)
3478                         settings_clone.pop("PORTAGE_BUILDIR_LOCKED", None)
3479                         settings_clone.reset()
3480                         others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
3481                                 settings=settings_clone,
3482                                 vartree=self.vartree, treetype="vartree",
3483                                 scheduler=self._scheduler, pipe=self._pipe))
3484
3485                 retval = self._security_check(others_in_slot)
3486                 if retval:
3487                         return retval
3488
3489                 if slot_matches:
3490                         # Used by self.isprotected().
3491                         max_dblnk = None
3492                         max_counter = -1
3493                         for dblnk in others_in_slot:
3494                                 cur_counter = self.vartree.dbapi.cpv_counter(dblnk.mycpv)
3495                                 if cur_counter > max_counter:
3496                                         max_counter = cur_counter
3497                                         max_dblnk = dblnk
3498                         self._installed_instance = max_dblnk
3499
3500                 if self.settings.get("INSTALL_MASK") or \
3501                         "nodoc" in self.settings.features or \
3502                         "noinfo" in self.settings.features or \
3503                         "noman" in self.settings.features:
3504                         # Apply INSTALL_MASK before collision-protect, since it may
3505                         # be useful to avoid collisions in some scenarios.
3506                         phase = MiscFunctionsProcess(background=False,
3507                                 commands=["preinst_mask"], phase="preinst",
3508                                 scheduler=self._scheduler, settings=self.settings)
3509                         phase.start()
3510                         phase.wait()
3511
3512                 # We check for unicode encoding issues after src_install. However,
3513                 # the check must be repeated here for binary packages (it's
3514                 # inexpensive since we call os.walk() here anyway).
3515                 unicode_errors = []
3516                 line_ending_re = re.compile('[\n\r]')
3517                 srcroot_len = len(srcroot)
3518                 ed_len = len(self.settings["ED"])
3519
3520                 while True:
3521
3522                         unicode_error = False
3523                         eagain_error = False
3524
3525                         myfilelist = []
3526                         mylinklist = []
3527                         paths_with_newlines = []
3528                         def onerror(e):
3529                                 raise
3530                         walk_iter = os.walk(srcroot, onerror=onerror)
3531                         while True:
3532                                 try:
3533                                         parent, dirs, files = next(walk_iter)
3534                                 except StopIteration:
3535                                         break
3536                                 except OSError as e:
3537                                         if e.errno != errno.EAGAIN:
3538                                                 raise
3539                                         # Observed with PyPy 1.8.
3540                                         eagain_error = True
3541                                         break
3542
3543                                 try:
3544                                         parent = _unicode_decode(parent,
3545                                                 encoding=_encodings['merge'], errors='strict')
3546                                 except UnicodeDecodeError:
3547                                         new_parent = _unicode_decode(parent,
3548                                                 encoding=_encodings['merge'], errors='replace')
3549                                         new_parent = _unicode_encode(new_parent,
3550                                                 encoding='ascii', errors='backslashreplace')
3551                                         new_parent = _unicode_decode(new_parent,
3552                                                 encoding=_encodings['merge'], errors='replace')
3553                                         os.rename(parent, new_parent)
3554                                         unicode_error = True
3555                                         unicode_errors.append(new_parent[ed_len:])
3556                                         break
3557
3558                                 for fname in files:
3559                                         try:
3560                                                 fname = _unicode_decode(fname,
3561                                                         encoding=_encodings['merge'], errors='strict')
3562                                         except UnicodeDecodeError:
3563                                                 fpath = portage._os.path.join(
3564                                                         parent.encode(_encodings['merge']), fname)
3565                                                 new_fname = _unicode_decode(fname,
3566                                                         encoding=_encodings['merge'], errors='replace')
3567                                                 new_fname = _unicode_encode(new_fname,
3568                                                         encoding='ascii', errors='backslashreplace')
3569                                                 new_fname = _unicode_decode(new_fname,
3570                                                         encoding=_encodings['merge'], errors='replace')
3571                                                 new_fpath = os.path.join(parent, new_fname)
3572                                                 os.rename(fpath, new_fpath)
3573                                                 unicode_error = True
3574                                                 unicode_errors.append(new_fpath[ed_len:])
3575                                                 fname = new_fname
3576                                                 fpath = new_fpath
3577                                         else:
3578                                                 fpath = os.path.join(parent, fname)
3579
3580                                         relative_path = fpath[srcroot_len:]
3581
3582                                         if line_ending_re.search(relative_path) is not None:
3583                                                 paths_with_newlines.append(relative_path)
3584
3585                                         file_mode = os.lstat(fpath).st_mode
3586                                         if stat.S_ISREG(file_mode):
3587                                                 myfilelist.append(relative_path)
3588                                         elif stat.S_ISLNK(file_mode):
3589                                                 # Note: os.walk puts symlinks to directories in the "dirs"
3590                                                 # list and it does not traverse them since that could lead
3591                                                 # to an infinite recursion loop.
3592                                                 mylinklist.append(relative_path)
3593
3594                                 if unicode_error:
3595                                         break
3596
3597                         if not (unicode_error or eagain_error):
3598                                 break
3599
3600                 if unicode_errors:
3601                         self._elog("eqawarn", "preinst",
3602                                 _merge_unicode_error(unicode_errors))
3603
3604                 if paths_with_newlines:
3605                         msg = []
3606                         msg.append(_("This package installs one or more files containing line ending characters:"))
3607                         msg.append("")
3608                         paths_with_newlines.sort()
3609                         for f in paths_with_newlines:
3610                                 msg.append("\t/%s" % (f.replace("\n", "\\n").replace("\r", "\\r")))
3611                         msg.append("")
3612                         msg.append(_("package %s NOT merged") % self.mycpv)
3613                         msg.append("")
3614                         eerror(msg)
3615                         return 1
3616
3617                 # If there are no files to merge, and an installed package in the same
3618                 # slot has files, it probably means that something went wrong.
3619                 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
3620                         not myfilelist and not mylinklist and others_in_slot:
3621                         installed_files = None
3622                         for other_dblink in others_in_slot:
3623                                 installed_files = other_dblink.getcontents()
3624                                 if not installed_files:
3625                                         continue
3626                                 from textwrap import wrap
3627                                 wrap_width = 72
3628                                 msg = []
3629                                 d = {
3630                                         "new_cpv":self.mycpv,
3631                                         "old_cpv":other_dblink.mycpv
3632                                 }
3633                                 msg.extend(wrap(_("The '%(new_cpv)s' package will not install "
3634                                         "any files, but the currently installed '%(old_cpv)s'"
3635                                         " package has the following files: ") % d, wrap_width))
3636                                 msg.append("")
3637                                 msg.extend(sorted(installed_files))
3638                                 msg.append("")
3639                                 msg.append(_("package %s NOT merged") % self.mycpv)
3640                                 msg.append("")
3641                                 msg.extend(wrap(
3642                                         _("Manually run `emerge --unmerge =%s` if you "
3643                                         "really want to remove the above files. Set "
3644                                         "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
3645                                         "/etc/make.conf if you do not want to "
3646                                         "abort in cases like this.") % other_dblink.mycpv,
3647                                         wrap_width))
3648                                 eerror(msg)
3649                         if installed_files:
3650                                 return 1
3651
3652                 # Make sure the ebuild environment is initialized and that ${T}/elog
3653                 # exists for logging of collision-protect eerror messages.
3654                 if myebuild is None:
3655                         myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3656                 doebuild_environment(myebuild, "preinst",
3657                         settings=self.settings, db=mydbapi)
3658                 self.settings["REPLACING_VERSIONS"] = " ".join(
3659                         [portage.versions.cpv_getversion(other.mycpv)
3660                         for other in others_in_slot])
3661                 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
3662
3663                 # check for package collisions
3664                 blockers = self._blockers
3665                 if blockers is None:
3666                         blockers = []
3667                 collisions, symlink_collisions, plib_collisions = \
3668                         self._collision_protect(srcroot, destroot,
3669                         others_in_slot + blockers, myfilelist, mylinklist)
3670
3671                 if symlink_collisions:
3672                         # Symlink collisions need to be distinguished from other types
3673                         # of collisions, in order to avoid confusion (see bug #409359).
3674                         msg = _("Package '%s' has one or more collisions "
3675                                 "between symlinks and directories, which is explicitly "
3676                                 "forbidden by PMS section 13.4 (see bug #326685):") % \
3677                                 (self.settings.mycpv,)
3678                         msg = textwrap.wrap(msg, 70)
3679                         msg.append("")
3680                         for f in symlink_collisions:
3681                                 msg.append("\t%s" % os.path.join(destroot,
3682                                         f.lstrip(os.path.sep)))
3683                         msg.append("")
3684                         self._elog("eerror", "preinst", msg)
3685
3686                 if collisions:
3687                         collision_protect = "collision-protect" in self.settings.features
3688                         protect_owned = "protect-owned" in self.settings.features
3689                         msg = _("This package will overwrite one or more files that"
3690                         " may belong to other packages (see list below).")
3691                         if not (collision_protect or protect_owned):
3692                                 msg += _(" Add either \"collision-protect\" or" 
3693                                 " \"protect-owned\" to FEATURES in"
3694                                 " make.conf if you would like the merge to abort"
3695                                 " in cases like this. See the make.conf man page for"
3696                                 " more information about these features.")
3697                         if self.settings.get("PORTAGE_QUIET") != "1":
3698                                 msg += _(" You can use a command such as"
3699                                 " `portageq owners / <filename>` to identify the"
3700                                 " installed package that owns a file. If portageq"
3701                                 " reports that only one package owns a file then do NOT"
3702                                 " file a bug report. A bug report is only useful if it"
3703                                 " identifies at least two or more packages that are known"
3704                                 " to install the same file(s)."
3705                                 " If a collision occurs and you"
3706                                 " can not explain where the file came from then you"
3707                                 " should simply ignore the collision since there is not"
3708                                 " enough information to determine if a real problem"
3709                                 " exists. Please do NOT file a bug report at"
3710                                 " http://bugs.gentoo.org unless you report exactly which"
3711                                 " two packages install the same file(s). Once again,"
3712                                 " please do NOT file a bug report unless you have"
3713                                 " completely understood the above message.")
3714
3715                         self.settings["EBUILD_PHASE"] = "preinst"
3716                         from textwrap import wrap
3717                         msg = wrap(msg, 70)
3718                         if collision_protect:
3719                                 msg.append("")
3720                                 msg.append(_("package %s NOT merged") % self.settings.mycpv)
3721                         msg.append("")
3722                         msg.append(_("Detected file collision(s):"))
3723                         msg.append("")
3724
3725                         for f in collisions:
3726                                 msg.append("\t%s" % \
3727                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3728
3729                         eerror(msg)
3730
3731                         owners = None
3732                         if collision_protect or protect_owned or symlink_collisions:
3733                                 msg = []
3734                                 msg.append("")
3735                                 msg.append(_("Searching all installed"
3736                                         " packages for file collisions..."))
3737                                 msg.append("")
3738                                 msg.append(_("Press Ctrl-C to Stop"))
3739                                 msg.append("")
3740                                 eerror(msg)
3741
3742                                 if len(collisions) > 20:
3743                                         # get_owners is slow for large numbers of files, so
3744                                         # don't look them all up.
3745                                         collisions = collisions[:20]
3746                                 self.lockdb()
3747                                 try:
3748                                         owners = self.vartree.dbapi._owners.get_owners(collisions)
3749                                         self.vartree.dbapi.flush_cache()
3750                                 finally:
3751                                         self.unlockdb()
3752
3753                                 for pkg, owned_files in owners.items():
3754                                         cpv = pkg.mycpv
3755                                         msg = []
3756                                         msg.append("%s" % cpv)
3757                                         for f in sorted(owned_files):
3758                                                 msg.append("\t%s" % os.path.join(destroot,
3759                                                         f.lstrip(os.path.sep)))
3760                                         msg.append("")
3761                                         eerror(msg)
3762
3763                                 if not owners:
3764                                         eerror([_("None of the installed"
3765                                                 " packages claim the file(s)."), ""])
3766
3767                         symlink_abort_msg =_("Package '%s' NOT merged since it has "
3768                                 "one or more collisions between symlinks and directories, "
3769                                 "which is explicitly forbidden by PMS section 13.4 "
3770                                 "(see bug #326685).")
3771
3772                         # The explanation about the collision and how to solve
3773                         # it may not be visible via a scrollback buffer, especially
3774                         # if the number of file collisions is large. Therefore,
3775                         # show a summary at the end.
3776                         abort = False
3777                         if symlink_collisions:
3778                                 abort = True
3779                                 msg = symlink_abort_msg % (self.settings.mycpv,)
3780                         elif collision_protect:
3781                                 abort = True
3782                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3783                                         self.settings.mycpv
3784                         elif protect_owned and owners:
3785                                 abort = True
3786                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3787                                         self.settings.mycpv
3788                         else:
3789                                 msg = _("Package '%s' merged despite file collisions.") % \
3790                                         self.settings.mycpv
3791                         msg += _(" If necessary, refer to your elog "
3792                                 "messages for the whole content of the above message.")
3793                         eerror(wrap(msg, 70))
3794
3795                         if abort:
3796                                 return 1
3797
3798                 # The merge process may move files out of the image directory,
3799                 # which causes invalidation of the .installed flag.
3800                 try:
3801                         os.unlink(os.path.join(
3802                                 os.path.dirname(normalize_path(srcroot)), ".installed"))
3803                 except OSError as e:
3804                         if e.errno != errno.ENOENT:
3805                                 raise
3806                         del e
3807
3808                 self.dbdir = self.dbtmpdir
3809                 self.delete()
3810                 ensure_dirs(self.dbtmpdir)
3811
3812                 # run preinst script
3813                 showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % \
3814                         {"cpv":self.mycpv, "destroot":destroot})
3815                 phase = EbuildPhase(background=False, phase="preinst",
3816                         scheduler=self._scheduler, settings=self.settings)
3817                 phase.start()
3818                 a = phase.wait()
3819
3820                 # XXX: Decide how to handle failures here.
3821                 if a != os.EX_OK:
3822                         showMessage(_("!!! FAILED preinst: ")+str(a)+"\n",
3823                                 level=logging.ERROR, noiselevel=-1)
3824                         return a
3825
3826                 # copy "info" files (like SLOT, CFLAGS, etc.) into the database
3827                 for x in os.listdir(inforoot):
3828                         self.copyfile(inforoot+"/"+x)
3829
3830                 # write local package counter for recording
3831                 if counter is None:
3832                         counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
3833                 f = io.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
3834                         encoding=_encodings['fs'], errors='strict'),
3835                         mode='w', encoding=_encodings['repo.content'],
3836                         errors='backslashreplace')
3837                 f.write(_unicode_decode(str(counter)))
3838                 f.close()
3839
3840                 self.updateprotect()
3841
3842                 #if we have a file containing previously-merged config file md5sums, grab it.
3843                 self.vartree.dbapi._fs_lock()
3844                 try:
3845                         cfgfiledict = grabdict(self.vartree.dbapi._conf_mem_file)
3846                         if "NOCONFMEM" in self.settings:
3847                                 cfgfiledict["IGNORE"]=1
3848                         else:
3849                                 cfgfiledict["IGNORE"]=0
3850
3851                         # Always behave like --noconfmem is enabled for downgrades
3852                         # so that people who don't know about this option are less
3853                         # likely to get confused when doing upgrade/downgrade cycles.
3854                         pv_split = catpkgsplit(self.mycpv)[1:]
3855                         for other in others_in_slot:
3856                                 if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0:
3857                                         cfgfiledict["IGNORE"] = 1
3858                                         break
3859
3860                         rval = self._merge_contents(srcroot, destroot, cfgfiledict)
3861                         if rval != os.EX_OK:
3862                                 return rval
3863                 finally:
3864                         self.vartree.dbapi._fs_unlock()
3865
3866                 # These caches are populated during collision-protect and the data
3867                 # they contain is now invalid. It's very important to invalidate
3868                 # the contents_inodes cache so that FEATURES=unmerge-orphans
3869                 # doesn't unmerge anything that belongs to this package that has
3870                 # just been merged.
3871                 for dblnk in others_in_slot:
3872                         dblnk._clear_contents_cache()
3873                 self._clear_contents_cache()
3874
3875                 linkmap = self.vartree.dbapi._linkmap
3876                 plib_registry = self.vartree.dbapi._plib_registry
3877                 # We initialize preserve_paths to an empty set rather
3878                 # than None here because it plays an important role
3879                 # in prune_plib_registry logic by serving to indicate
3880                 # that we have a replacement for a package that's
3881                 # being unmerged.
3882
3883                 preserve_paths = set()
3884                 needed = None
3885                 if not (self._linkmap_broken or linkmap is None or
3886                         plib_registry is None):
3887                         self.vartree.dbapi._fs_lock()
3888                         plib_registry.lock()
3889                         try:
3890                                 plib_registry.load()
3891                                 needed = os.path.join(inforoot, linkmap._needed_aux_key)
3892                                 self._linkmap_rebuild(include_file=needed)
3893
3894                                 # Preserve old libs if they are still in use
3895                                 # TODO: Handle cases where the previous instance
3896                                 # has already been uninstalled but it still has some
3897                                 # preserved libraries in the registry that we may
3898                                 # want to preserve here.
3899                                 preserve_paths = self._find_libs_to_preserve()
3900                         finally:
3901                                 plib_registry.unlock()
3902                                 self.vartree.dbapi._fs_unlock()
3903
3904                         if preserve_paths:
3905                                 self._add_preserve_libs_to_contents(preserve_paths)
3906
3907                 # If portage is reinstalling itself, remove the old
3908                 # version now since we want to use the temporary
3909                 # PORTAGE_BIN_PATH that will be removed when we return.
3910                 reinstall_self = False
3911                 if self.myroot == "/" and \
3912                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]):
3913                         reinstall_self = True
3914
3915                 emerge_log = self._emerge_log
3916
3917                 # If we have any preserved libraries then autoclean
3918                 # is forced so that preserve-libs logic doesn't have
3919                 # to account for the additional complexity of the
3920                 # AUTOCLEAN=no mode.
3921                 autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes" \
3922                         or preserve_paths
3923
3924                 if autoclean:
3925                         emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
3926
3927                 others_in_slot.append(self)  # self has just been merged
3928                 for dblnk in list(others_in_slot):
3929                         if dblnk is self:
3930                                 continue
3931                         if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
3932                                 continue
3933                         showMessage(_(">>> Safely unmerging already-installed instance...\n"))
3934                         emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
3935                         others_in_slot.remove(dblnk) # dblnk will unmerge itself now
3936                         dblnk._linkmap_broken = self._linkmap_broken
3937                         dblnk.settings["REPLACED_BY_VERSION"] = portage.versions.cpv_getversion(self.mycpv)
3938                         dblnk.settings.backup_changes("REPLACED_BY_VERSION")
3939                         unmerge_rval = dblnk.unmerge(ldpath_mtimes=prev_mtimes,
3940                                 others_in_slot=others_in_slot, needed=needed,
3941                                 preserve_paths=preserve_paths)
3942                         dblnk.settings.pop("REPLACED_BY_VERSION", None)
3943
3944                         if unmerge_rval == os.EX_OK:
3945                                 emerge_log(_(" >>> unmerge success: %s") % (dblnk.mycpv,))
3946                         else:
3947                                 emerge_log(_(" !!! unmerge FAILURE: %s") % (dblnk.mycpv,))
3948
3949                         self.lockdb()
3950                         try:
3951                                 # TODO: Check status and abort if necessary.
3952                                 dblnk.delete()
3953                         finally:
3954                                 self.unlockdb()
3955                         showMessage(_(">>> Original instance of package unmerged safely.\n"))
3956
3957                 if len(others_in_slot) > 1:
3958                         showMessage(colorize("WARN", _("WARNING:"))
3959                                 + _(" AUTOCLEAN is disabled.  This can cause serious"
3960                                 " problems due to overlapping packages.\n"),
3961                                 level=logging.WARN, noiselevel=-1)
3962
3963                 # We hold both directory locks.
3964                 self.dbdir = self.dbpkgdir
3965                 self.lockdb()
3966                 try:
3967                         self.delete()
3968                         _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
3969                 finally:
3970                         self.unlockdb()
3971
3972                 # Check for file collisions with blocking packages
3973                 # and remove any colliding files from their CONTENTS
3974                 # since they now belong to this package.
3975                 self._clear_contents_cache()
3976                 contents = self.getcontents()
3977                 destroot_len = len(destroot) - 1
3978                 self.lockdb()
3979                 try:
3980                         for blocker in blockers:
3981                                 self.vartree.dbapi.removeFromContents(blocker, iter(contents),
3982                                         relative_paths=False)
3983                 finally:
3984                         self.unlockdb()
3985
3986                 plib_registry = self.vartree.dbapi._plib_registry
3987                 if plib_registry:
3988                         self.vartree.dbapi._fs_lock()
3989                         plib_registry.lock()
3990                         try:
3991                                 plib_registry.load()
3992
3993                                 if preserve_paths:
3994                                         # keep track of the libs we preserved
3995                                         plib_registry.register(self.mycpv, slot, counter,
3996                                                 sorted(preserve_paths))
3997
3998                                 # Unregister any preserved libs that this package has overwritten
3999                                 # and update the contents of the packages that owned them.
4000                                 plib_dict = plib_registry.getPreservedLibs()
4001                                 for cpv, paths in plib_collisions.items():
4002                                         if cpv not in plib_dict:
4003                                                 continue
4004                                         has_vdb_entry = False
4005                                         if cpv != self.mycpv:
4006                                                 # If we've replaced another instance with the
4007                                                 # same cpv then the vdb entry no longer belongs
4008                                                 # to it, so we'll have to get the slot and counter
4009                                                 # from plib_registry._data instead.
4010                                                 self.vartree.dbapi.lock()
4011                                                 try:
4012                                                         try:
4013                                                                 slot, counter = self.vartree.dbapi.aux_get(
4014                                                                         cpv, ["SLOT", "COUNTER"])
4015                                                         except KeyError:
4016                                                                 pass
4017                                                         else:
4018                                                                 has_vdb_entry = True
4019                                                                 self.vartree.dbapi.removeFromContents(
4020                                                                         cpv, paths)
4021                                                 finally:
4022                                                         self.vartree.dbapi.unlock()
4023
4024                                         if not has_vdb_entry:
4025                                                 # It's possible for previously unmerged packages
4026                                                 # to have preserved libs in the registry, so try
4027                                                 # to retrieve the slot and counter from there.
4028                                                 has_registry_entry = False
4029                                                 for plib_cps, (plib_cpv, plib_counter, plib_paths) in \
4030                                                         plib_registry._data.items():
4031                                                         if plib_cpv != cpv:
4032                                                                 continue
4033                                                         try:
4034                                                                 cp, slot = plib_cps.split(":", 1)
4035                                                         except ValueError:
4036                                                                 continue
4037                                                         counter = plib_counter
4038                                                         has_registry_entry = True
4039                                                         break
4040
4041                                                 if not has_registry_entry:
4042                                                         continue
4043
4044                                         remaining = [f for f in plib_dict[cpv] if f not in paths]
4045                                         plib_registry.register(cpv, slot, counter, remaining)
4046
4047                                 plib_registry.store()
4048                         finally:
4049                                 plib_registry.unlock()
4050                                 self.vartree.dbapi._fs_unlock()
4051
4052                 self.vartree.dbapi._add(self)
4053                 contents = self.getcontents()
4054
4055                 #do postinst script
4056                 self.settings["PORTAGE_UPDATE_ENV"] = \
4057                         os.path.join(self.dbpkgdir, "environment.bz2")
4058                 self.settings.backup_changes("PORTAGE_UPDATE_ENV")
4059                 try:
4060                         phase = EbuildPhase(background=False, phase="postinst",
4061                                 scheduler=self._scheduler, settings=self.settings)
4062                         phase.start()
4063                         a = phase.wait()
4064                         if a == os.EX_OK:
4065                                 showMessage(_(">>> %s merged.\n") % self.mycpv)
4066                 finally:
4067                         self.settings.pop("PORTAGE_UPDATE_ENV", None)
4068
4069                 if a != os.EX_OK:
4070                         # It's stupid to bail out here, so keep going regardless of
4071                         # phase return code.
4072                         showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
4073                                 level=logging.ERROR, noiselevel=-1)
4074
4075                 #update environment settings, library paths. DO NOT change symlinks.
4076                 env_update(
4077                         target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
4078                         contents=contents, env=self.settings,
4079                         writemsg_level=self._display_merge, vardbapi=self.vartree.dbapi)
4080
4081                 # For gcc upgrades, preserved libs have to be removed after the
4082                 # the library path has been updated.
4083                 self._prune_plib_registry()
4084
4085                 return os.EX_OK
4086
4087         def _new_backup_path(self, p):
4088                 """
4089                 The works for any type path, such as a regular file, symlink,
4090                 or directory. The parent directory is assumed to exist.
4091                 The returned filename is of the form p + '.backup.' + x, where
4092                 x guarantees that the returned path does not exist yet.
4093                 """
4094                 os = _os_merge
4095
4096                 x = -1
4097                 while True:
4098                         x += 1
4099                         backup_p = p + '.backup.' + str(x).rjust(4, '0')
4100                         try:
4101                                 os.lstat(backup_p)
4102                         except OSError:
4103                                 break
4104
4105                 return backup_p
4106
4107         def _merge_contents(self, srcroot, destroot, cfgfiledict):
4108
4109                 cfgfiledict_orig = cfgfiledict.copy()
4110
4111                 # open CONTENTS file (possibly overwriting old one) for recording
4112                 # Use atomic_ofstream for automatic coercion of raw bytes to
4113                 # unicode, in order to prevent TypeError when writing raw bytes
4114                 # to TextIOWrapper with python2.
4115                 outfile = atomic_ofstream(_unicode_encode(
4116                         os.path.join(self.dbtmpdir, 'CONTENTS'),
4117                         encoding=_encodings['fs'], errors='strict'),
4118                         mode='w', encoding=_encodings['repo.content'],
4119                         errors='backslashreplace')
4120
4121                 # Don't bump mtimes on merge since some application require
4122                 # preservation of timestamps.  This means that the unmerge phase must
4123                 # check to see if file belongs to an installed instance in the same
4124                 # slot.
4125                 mymtime = None
4126
4127                 # set umask to 0 for merging; back up umask, save old one in prevmask (since this is a global change)
4128                 prevmask = os.umask(0)
4129                 secondhand = []
4130
4131                 # we do a first merge; this will recurse through all files in our srcroot but also build up a
4132                 # "second hand" of symlinks to merge later
4133                 if self.mergeme(srcroot, destroot, outfile, secondhand,
4134                         self.settings["EPREFIX"].lstrip(os.sep), cfgfiledict, mymtime):
4135                         return 1
4136
4137                 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore.  The rest are
4138                 # broken symlinks.  We'll merge them too.
4139                 lastlen = 0
4140                 while len(secondhand) and len(secondhand)!=lastlen:
4141                         # clear the thirdhand.  Anything from our second hand that
4142                         # couldn't get merged will be added to thirdhand.
4143
4144                         thirdhand = []
4145                         if self.mergeme(srcroot, destroot, outfile, thirdhand,
4146                                 secondhand, cfgfiledict, mymtime):
4147                                 return 1
4148
4149                         #swap hands
4150                         lastlen = len(secondhand)
4151
4152                         # our thirdhand now becomes our secondhand.  It's ok to throw
4153                         # away secondhand since thirdhand contains all the stuff that
4154                         # couldn't be merged.
4155                         secondhand = thirdhand
4156
4157                 if len(secondhand):
4158                         # force merge of remaining symlinks (broken or circular; oh well)
4159                         if self.mergeme(srcroot, destroot, outfile, None,
4160                                 secondhand, cfgfiledict, mymtime):
4161                                 return 1
4162
4163                 #restore umask
4164                 os.umask(prevmask)
4165
4166                 #if we opened it, close it
4167                 outfile.flush()
4168                 outfile.close()
4169
4170                 # write out our collection of md5sums
4171                 if cfgfiledict != cfgfiledict_orig:
4172                         cfgfiledict.pop("IGNORE", None)
4173                         try:
4174                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
4175                         except InvalidLocation:
4176                                 self.settings._init_dirs()
4177                                 writedict(cfgfiledict, self.vartree.dbapi._conf_mem_file)
4178
4179                 return os.EX_OK
4180
4181         def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
4182                 """
4183                 
4184                 This function handles actual merging of the package contents to the livefs.
4185                 It also handles config protection.
4186                 
4187                 @param srcroot: Where are we copying files from (usually ${D})
4188                 @type srcroot: String (Path)
4189                 @param destroot: Typically ${ROOT}
4190                 @type destroot: String (Path)
4191                 @param outfile: File to log operations to
4192                 @type outfile: File Object
4193                 @param secondhand: A set of items to merge in pass two (usually
4194                 or symlinks that point to non-existing files that may get merged later)
4195                 @type secondhand: List
4196                 @param stufftomerge: Either a diretory to merge, or a list of items.
4197                 @type stufftomerge: String or List
4198                 @param cfgfiledict: { File:mtime } mapping for config_protected files
4199                 @type cfgfiledict: Dictionary
4200                 @param thismtime: The current time (typically long(time.time())
4201                 @type thismtime: Long
4202                 @rtype: None or Boolean
4203                 @return:
4204                 1. True on failure
4205                 2. None otherwise
4206                 
4207                 """
4208
4209                 showMessage = self._display_merge
4210                 writemsg = self._display_merge
4211
4212                 os = _os_merge
4213                 sep = os.sep
4214                 join = os.path.join
4215                 srcroot = normalize_path(srcroot).rstrip(sep) + sep
4216                 destroot = normalize_path(destroot).rstrip(sep) + sep
4217                 calc_prelink = "prelink-checksums" in self.settings.features
4218
4219                 protect_if_modified = \
4220                         "config-protect-if-modified" in self.settings.features and \
4221                         self._installed_instance is not None
4222
4223                 # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
4224                 if isinstance(stufftomerge, basestring):
4225                         #A directory is specified.  Figure out protection paths, listdir() it and process it.
4226                         mergelist = os.listdir(join(srcroot, stufftomerge))
4227                         offset = stufftomerge
4228                 else:
4229                         mergelist = stufftomerge
4230                         offset = ""
4231
4232                 for i, x in enumerate(mergelist):
4233
4234                         mysrc = join(srcroot, offset, x)
4235                         mydest = join(destroot, offset, x)
4236                         # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
4237                         myrealdest = join(sep, offset, x)
4238                         # stat file once, test using S_* macros many times (faster that way)
4239                         mystat = os.lstat(mysrc)
4240                         mymode = mystat[stat.ST_MODE]
4241                         # handy variables; mydest is the target object on the live filesystems;
4242                         # mysrc is the source object in the temporary install dir
4243                         try:
4244                                 mydstat = os.lstat(mydest)
4245                                 mydmode = mydstat.st_mode
4246                         except OSError as e:
4247                                 if e.errno != errno.ENOENT:
4248                                         raise
4249                                 del e
4250                                 #dest file doesn't exist
4251                                 mydstat = None
4252                                 mydmode = None
4253
4254                         if stat.S_ISLNK(mymode):
4255                                 # we are merging a symbolic link
4256                                 # The file name of mysrc and the actual file that it points to
4257                                 # will have earlier been forcefully converted to the 'merge'
4258                                 # encoding if necessary, but the content of the symbolic link
4259                                 # may need to be forcefully converted here.
4260                                 myto = _os.readlink(_unicode_encode(mysrc,
4261                                         encoding=_encodings['merge'], errors='strict'))
4262                                 try:
4263                                         myto = _unicode_decode(myto,
4264                                                 encoding=_encodings['merge'], errors='strict')
4265                                 except UnicodeDecodeError:
4266                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4267                                                 errors='replace')
4268                                         myto = _unicode_encode(myto, encoding='ascii',
4269                                                 errors='backslashreplace')
4270                                         myto = _unicode_decode(myto, encoding=_encodings['merge'],
4271                                                 errors='replace')
4272                                         os.unlink(mysrc)
4273                                         os.symlink(myto, mysrc)
4274
4275                                 # Pass in the symlink target in order to bypass the
4276                                 # os.readlink() call inside abssymlink(), since that
4277                                 # call is unsafe if the merge encoding is not ascii
4278                                 # or utf_8 (see bug #382021).
4279                                 myabsto = abssymlink(mysrc, target=myto)
4280
4281                                 if myabsto.startswith(srcroot):
4282                                         myabsto = myabsto[len(srcroot):]
4283                                 myabsto = myabsto.lstrip(sep)
4284                                 if self.settings and self.settings["D"]:
4285                                         if myto.startswith(self.settings["D"]):
4286                                                 myto = myto[len(self.settings["D"]):]
4287                                 # myrealto contains the path of the real file to which this symlink points.
4288                                 # we can simply test for existence of this file to see if the target has been merged yet
4289                                 myrealto = normalize_path(os.path.join(destroot, myabsto))
4290                                 if mydmode!=None:
4291                                         #destination exists
4292                                         if stat.S_ISDIR(mydmode):
4293                                                 # we can't merge a symlink over a directory
4294                                                 newdest = self._new_backup_path(mydest)
4295                                                 msg = []
4296                                                 msg.append("")
4297                                                 msg.append(_("Installation of a symlink is blocked by a directory:"))
4298                                                 msg.append("  '%s'" % mydest)
4299                                                 msg.append(_("This symlink will be merged with a different name:"))
4300                                                 msg.append("  '%s'" % newdest)
4301                                                 msg.append("")
4302                                                 self._eerror("preinst", msg)
4303                                                 mydest = newdest
4304
4305                                         elif not stat.S_ISLNK(mydmode):
4306                                                 if os.path.exists(mysrc) and stat.S_ISDIR(os.stat(mysrc)[stat.ST_MODE]):
4307                                                         # Kill file blocking installation of symlink to dir #71787
4308                                                         pass
4309                                                 elif self.isprotected(mydest):
4310                                                         # Use md5 of the target in ${D} if it exists...
4311                                                         try:
4312                                                                 newmd5 = perform_md5(join(srcroot, myabsto))
4313                                                         except FileNotFound:
4314                                                                 # Maybe the target is merged already.
4315                                                                 try:
4316                                                                         newmd5 = perform_md5(myrealto)
4317                                                                 except FileNotFound:
4318                                                                         newmd5 = None
4319                                                         mydest = new_protect_filename(mydest, newmd5=newmd5)
4320
4321                                 # if secondhand is None it means we're operating in "force" mode and should not create a second hand.
4322                                 if (secondhand != None) and (not os.path.exists(myrealto)):
4323                                         # either the target directory doesn't exist yet or the target file doesn't exist -- or
4324                                         # the target is a broken symlink.  We will add this file to our "second hand" and merge
4325                                         # it later.
4326                                         secondhand.append(mysrc[len(srcroot):])
4327                                         continue
4328                                 # unlinking no longer necessary; "movefile" will overwrite symlinks atomically and correctly
4329                                 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4330                                         sstat=mystat, mysettings=self.settings,
4331                                         encoding=_encodings['merge'])
4332                                 if mymtime != None:
4333                                         showMessage(">>> %s -> %s\n" % (mydest, myto))
4334                                         outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
4335                                 else:
4336                                         showMessage(_("!!! Failed to move file.\n"),
4337                                                 level=logging.ERROR, noiselevel=-1)
4338                                         showMessage("!!! %s -> %s\n" % (mydest, myto),
4339                                                 level=logging.ERROR, noiselevel=-1)
4340                                         return 1
4341                         elif stat.S_ISDIR(mymode):
4342                                 # we are merging a directory
4343                                 if mydmode != None:
4344                                         # destination exists
4345
4346                                         if bsd_chflags:
4347                                                 # Save then clear flags on dest.
4348                                                 dflags = mydstat.st_flags
4349                                                 if dflags != 0:
4350                                                         bsd_chflags.lchflags(mydest, 0)
4351
4352                                         if not os.access(mydest, os.W_OK):
4353                                                 pkgstuff = pkgsplit(self.pkg)
4354                                                 writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1)
4355                                                 writemsg(_("!!! Please check permissions and directories for broken symlinks.\n"))
4356                                                 writemsg(_("!!! You may start the merge process again by using ebuild:\n"))
4357                                                 writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n")
4358                                                 writemsg(_("!!! And finish by running this: env-update\n\n"))
4359                                                 return 1
4360
4361                                         if stat.S_ISDIR(mydmode) or \
4362                                                 (stat.S_ISLNK(mydmode) and os.path.isdir(mydest)):
4363                                                 # a symlink to an existing directory will work for us; keep it:
4364                                                 showMessage("--- %s/\n" % mydest)
4365                                                 if bsd_chflags:
4366                                                         bsd_chflags.lchflags(mydest, dflags)
4367                                         else:
4368                                                 # a non-directory and non-symlink-to-directory.  Won't work for us.  Move out of the way.
4369                                                 backup_dest = self._new_backup_path(mydest)
4370                                                 msg = []
4371                                                 msg.append("")
4372                                                 msg.append(_("Installation of a directory is blocked by a file:"))
4373                                                 msg.append("  '%s'" % mydest)
4374                                                 msg.append(_("This file will be renamed to a different name:"))
4375                                                 msg.append("  '%s'" % backup_dest)
4376                                                 msg.append("")
4377                                                 self._eerror("preinst", msg)
4378                                                 if movefile(mydest, backup_dest,
4379                                                         mysettings=self.settings,
4380                                                         encoding=_encodings['merge']) is None:
4381                                                         return 1
4382                                                 showMessage(_("bak %s %s.backup\n") % (mydest, mydest),
4383                                                         level=logging.ERROR, noiselevel=-1)
4384                                                 #now create our directory
4385                                                 try:
4386                                                         if self.settings.selinux_enabled():
4387                                                                 _selinux_merge.mkdir(mydest, mysrc)
4388                                                         else:
4389                                                                 os.mkdir(mydest)
4390                                                 except OSError as e:
4391                                                         # Error handling should be equivalent to
4392                                                         # portage.util.ensure_dirs() for cases
4393                                                         # like bug #187518.
4394                                                         if e.errno in (errno.EEXIST,):
4395                                                                 pass
4396                                                         elif os.path.isdir(mydest):
4397                                                                 pass
4398                                                         else:
4399                                                                 raise
4400                                                         del e
4401
4402                                                 if bsd_chflags:
4403                                                         bsd_chflags.lchflags(mydest, dflags)
4404                                                 os.chmod(mydest, mystat[0])
4405                                                 os.chown(mydest, mystat[4], mystat[5])
4406                                                 showMessage(">>> %s/\n" % mydest)
4407                                 else:
4408                                         try:
4409                                                 #destination doesn't exist
4410                                                 if self.settings.selinux_enabled():
4411                                                         _selinux_merge.mkdir(mydest, mysrc)
4412                                                 else:
4413                                                         os.mkdir(mydest)
4414                                         except OSError as e:
4415                                                 # Error handling should be equivalent to
4416                                                 # portage.util.ensure_dirs() for cases
4417                                                 # like bug #187518.
4418                                                 if e.errno in (errno.EEXIST,):
4419                                                         pass
4420                                                 elif os.path.isdir(mydest):
4421                                                         pass
4422                                                 else:
4423                                                         raise
4424                                                 del e
4425                                         os.chmod(mydest, mystat[0])
4426                                         os.chown(mydest, mystat[4], mystat[5])
4427                                         showMessage(">>> %s/\n" % mydest)
4428                                 outfile.write("dir "+myrealdest+"\n")
4429                                 # recurse and merge this directory
4430                                 if self.mergeme(srcroot, destroot, outfile, secondhand,
4431                                         join(offset, x), cfgfiledict, thismtime):
4432                                         return 1
4433                         elif stat.S_ISREG(mymode):
4434                                 # we are merging a regular file
4435                                 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
4436                                 # calculate config file protection stuff
4437                                 mydestdir = os.path.dirname(mydest)
4438                                 moveme = 1
4439                                 zing = "!!!"
4440                                 mymtime = None
4441                                 protected = self.isprotected(mydest)
4442                                 if mydmode != None:
4443                                         # destination file exists
4444                                         
4445                                         if stat.S_ISDIR(mydmode):
4446                                                 # install of destination is blocked by an existing directory with the same name
4447                                                 newdest = self._new_backup_path(mydest)
4448                                                 msg = []
4449                                                 msg.append("")
4450                                                 msg.append(_("Installation of a regular file is blocked by a directory:"))
4451                                                 msg.append("  '%s'" % mydest)
4452                                                 msg.append(_("This file will be merged with a different name:"))
4453                                                 msg.append("  '%s'" % newdest)
4454                                                 msg.append("")
4455                                                 self._eerror("preinst", msg)
4456                                                 mydest = newdest
4457
4458                                         elif stat.S_ISREG(mydmode) or (stat.S_ISLNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST_MODE])):
4459                                                 # install of destination is blocked by an existing regular file,
4460                                                 # or by a symlink to an existing regular file;
4461                                                 # now, config file management may come into play.
4462                                                 # we only need to tweak mydest if cfg file management is in play.
4463                                                 if protected:
4464                                                         destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
4465                                                         if protect_if_modified:
4466                                                                 contents_key = \
4467                                                                         self._installed_instance._match_contents(myrealdest)
4468                                                                 if contents_key:
4469                                                                         inst_info = self._installed_instance.getcontents()[contents_key]
4470                                                                         if inst_info[0] == "obj" and inst_info[2] == destmd5:
4471                                                                                 protected = False
4472
4473                                                 if protected:
4474                                                         # we have a protection path; enable config file management.
4475                                                         cfgprot = 0
4476                                                         if mymd5 == destmd5:
4477                                                                 #file already in place; simply update mtimes of destination
4478                                                                 moveme = 1
4479                                                         else:
4480                                                                 if mymd5 == cfgfiledict.get(myrealdest, [None])[0]:
4481                                                                         """ An identical update has previously been
4482                                                                         merged.  Skip it unless the user has chosen
4483                                                                         --noconfmem."""
4484                                                                         moveme = cfgfiledict["IGNORE"]
4485                                                                         cfgprot = cfgfiledict["IGNORE"]
4486                                                                         if not moveme:
4487                                                                                 zing = "---"
4488                                                                                 mymtime = mystat[stat.ST_MTIME]
4489                                                                 else:
4490                                                                         moveme = 1
4491                                                                         cfgprot = 1
4492                                                         if moveme:
4493                                                                 # Merging a new file, so update confmem.
4494                                                                 cfgfiledict[myrealdest] = [mymd5]
4495                                                         elif destmd5 == cfgfiledict.get(myrealdest, [None])[0]:
4496                                                                 """A previously remembered update has been
4497                                                                 accepted, so it is removed from confmem."""
4498                                                                 del cfgfiledict[myrealdest]
4499
4500                                                         if cfgprot:
4501                                                                 mydest = new_protect_filename(mydest, newmd5=mymd5)
4502
4503                                 # whether config protection or not, we merge the new file the
4504                                 # same way.  Unless moveme=0 (blocking directory)
4505                                 if moveme:
4506                                         # Create hardlinks only for source files that already exist
4507                                         # as hardlinks (having identical st_dev and st_ino).
4508                                         hardlink_key = (mystat.st_dev, mystat.st_ino)
4509
4510                                         hardlink_candidates = self._hardlink_merge_map.get(hardlink_key)
4511                                         if hardlink_candidates is None:
4512                                                 hardlink_candidates = []
4513                                                 self._hardlink_merge_map[hardlink_key] = hardlink_candidates
4514
4515                                         mymtime = movefile(mysrc, mydest, newmtime=thismtime,
4516                                                 sstat=mystat, mysettings=self.settings,
4517                                                 hardlink_candidates=hardlink_candidates,
4518                                                 encoding=_encodings['merge'])
4519                                         if mymtime is None:
4520                                                 return 1
4521                                         hardlink_candidates.append(mydest)
4522                                         zing = ">>>"
4523
4524                                 if mymtime != None:
4525                                         outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
4526                                 showMessage("%s %s\n" % (zing,mydest))
4527                         else:
4528                                 # we are merging a fifo or device node
4529                                 zing = "!!!"
4530                                 if mydmode is None:
4531                                         # destination doesn't exist
4532                                         if movefile(mysrc, mydest, newmtime=thismtime,
4533                                                 sstat=mystat, mysettings=self.settings,
4534                                                 encoding=_encodings['merge']) is not None:
4535                                                 zing = ">>>"
4536                                         else:
4537                                                 return 1
4538                                 if stat.S_ISFIFO(mymode):
4539                                         outfile.write("fif %s\n" % myrealdest)
4540                                 else:
4541                                         outfile.write("dev %s\n" % myrealdest)
4542                                 showMessage(zing + " " + mydest + "\n")
4543
4544         def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
4545                 mydbapi=None, prev_mtimes=None, counter=None):
4546                 """
4547                 @param myroot: ignored, self._eroot is used instead
4548                 """
4549                 myroot = None
4550                 retval = -1
4551                 parallel_install = "parallel-install" in self.settings.features
4552                 if not parallel_install:
4553                         self.lockdb()
4554                 self.vartree.dbapi._bump_mtime(self.mycpv)
4555                 if self._scheduler is None:
4556                         self._scheduler = PollScheduler().sched_iface
4557                 try:
4558                         retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
4559                                 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes,
4560                                 counter=counter)
4561
4562                         # If PORTAGE_BUILDDIR doesn't exist, then it probably means
4563                         # fail-clean is enabled, and the success/die hooks have
4564                         # already been called by EbuildPhase.
4565                         if os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
4566
4567                                 if retval == os.EX_OK:
4568                                         phase = 'success_hooks'
4569                                 else:
4570                                         phase = 'die_hooks'
4571
4572                                 ebuild_phase = MiscFunctionsProcess(
4573                                         background=False, commands=[phase],
4574                                         scheduler=self._scheduler, settings=self.settings)
4575                                 ebuild_phase.start()
4576                                 ebuild_phase.wait()
4577                                 self._elog_process()
4578
4579                                 if 'noclean' not in self.settings.features and \
4580                                         (retval == os.EX_OK or \
4581                                         'fail-clean' in self.settings.features):
4582                                         if myebuild is None:
4583                                                 myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
4584
4585                                         doebuild_environment(myebuild, "clean",
4586                                                 settings=self.settings, db=mydbapi)
4587                                         phase = EbuildPhase(background=False, phase="clean",
4588                                                 scheduler=self._scheduler, settings=self.settings)
4589                                         phase.start()
4590                                         phase.wait()
4591                 finally:
4592                         self.settings.pop('REPLACING_VERSIONS', None)
4593                         if self.vartree.dbapi._linkmap is None:
4594                                 # preserve-libs is entirely disabled
4595                                 pass
4596                         else:
4597                                 self.vartree.dbapi._linkmap._clear_cache()
4598                         self.vartree.dbapi._bump_mtime(self.mycpv)
4599                         if not parallel_install:
4600                                 self.unlockdb()
4601                 return retval
4602
4603         def getstring(self,name):
4604                 "returns contents of a file with whitespace converted to spaces"
4605                 if not os.path.exists(self.dbdir+"/"+name):
4606                         return ""
4607                 mydata = io.open(
4608                         _unicode_encode(os.path.join(self.dbdir, name),
4609                         encoding=_encodings['fs'], errors='strict'),
4610                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4611                         ).read().split()
4612                 return " ".join(mydata)
4613
4614         def copyfile(self,fname):
4615                 shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname))
4616
4617         def getfile(self,fname):
4618                 if not os.path.exists(self.dbdir+"/"+fname):
4619                         return ""
4620                 return io.open(_unicode_encode(os.path.join(self.dbdir, fname),
4621                         encoding=_encodings['fs'], errors='strict'), 
4622                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4623                         ).read()
4624
4625         def setfile(self,fname,data):
4626                 kwargs = {}
4627                 if fname == 'environment.bz2' or not isinstance(data, basestring):
4628                         kwargs['mode'] = 'wb'
4629                 else:
4630                         kwargs['mode'] = 'w'
4631                         kwargs['encoding'] = _encodings['repo.content']
4632                 write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
4633
4634         def getelements(self,ename):
4635                 if not os.path.exists(self.dbdir+"/"+ename):
4636                         return []
4637                 mylines = io.open(_unicode_encode(
4638                         os.path.join(self.dbdir, ename),
4639                         encoding=_encodings['fs'], errors='strict'),
4640                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4641                         ).readlines()
4642                 myreturn = []
4643                 for x in mylines:
4644                         for y in x[:-1].split():
4645                                 myreturn.append(y)
4646                 return myreturn
4647
4648         def setelements(self,mylist,ename):
4649                 myelement = io.open(_unicode_encode(
4650                         os.path.join(self.dbdir, ename),
4651                         encoding=_encodings['fs'], errors='strict'),
4652                         mode='w', encoding=_encodings['repo.content'],
4653                         errors='backslashreplace')
4654                 for x in mylist:
4655                         myelement.write(_unicode_decode(x+"\n"))
4656                 myelement.close()
4657
4658         def isregular(self):
4659                 "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
4660                 return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
4661
4662 def merge(mycat, mypkg, pkgloc, infloc,
4663         myroot=None, settings=None, myebuild=None,
4664         mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
4665         scheduler=None):
4666         """
4667         @param myroot: ignored, settings['EROOT'] is used instead
4668         """
4669         myroot = None
4670         if settings is None:
4671                 raise TypeError("settings argument is required")
4672         if not os.access(settings['EROOT'], os.W_OK):
4673                 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings['EROOT'],
4674                         noiselevel=-1)
4675                 return errno.EACCES
4676         background = (settings.get('PORTAGE_BACKGROUND') == '1')
4677         merge_task = MergeProcess(
4678                 mycat=mycat, mypkg=mypkg, settings=settings,
4679                 treetype=mytree, vartree=vartree,
4680                 scheduler=(scheduler or PollScheduler().sched_iface),
4681                 background=background, blockers=blockers, pkgloc=pkgloc,
4682                 infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
4683                 prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'))
4684         merge_task.start()
4685         retcode = merge_task.wait()
4686         return retcode
4687
4688 def unmerge(cat, pkg, myroot=None, settings=None,
4689         mytrimworld=None, vartree=None,
4690         ldpath_mtimes=None, scheduler=None):
4691         """
4692         @param myroot: ignored, settings['EROOT'] is used instead
4693         @param mytrimworld: ignored
4694         """
4695         myroot = None
4696         if settings is None:
4697                 raise TypeError("settings argument is required")
4698         mylink = dblink(cat, pkg, settings=settings, treetype="vartree",
4699                 vartree=vartree, scheduler=scheduler)
4700         vartree = mylink.vartree
4701         parallel_install = "parallel-install" in settings.features
4702         if not parallel_install:
4703                 mylink.lockdb()
4704         try:
4705                 if mylink.exists():
4706                         retval = mylink.unmerge(ldpath_mtimes=ldpath_mtimes)
4707                         if retval == os.EX_OK:
4708                                 mylink.lockdb()
4709                                 try:
4710                                         mylink.delete()
4711                                 finally:
4712                                         mylink.unlockdb()
4713                         return retval
4714                 return os.EX_OK
4715         finally:
4716                 if vartree.dbapi._linkmap is None:
4717                         # preserve-libs is entirely disabled
4718                         pass
4719                 else:
4720                         vartree.dbapi._linkmap._clear_cache()
4721                 if not parallel_install:
4722                         mylink.unlockdb()
4723
4724 def write_contents(contents, root, f):
4725         """
4726         Write contents to any file like object. The file will be left open.
4727         """
4728         root_len = len(root) - 1
4729         for filename in sorted(contents):
4730                 entry_data = contents[filename]
4731                 entry_type = entry_data[0]
4732                 relative_filename = filename[root_len:]
4733                 if entry_type == "obj":
4734                         entry_type, mtime, md5sum = entry_data
4735                         line = "%s %s %s %s\n" % \
4736                                 (entry_type, relative_filename, md5sum, mtime)
4737                 elif entry_type == "sym":
4738                         entry_type, mtime, link = entry_data
4739                         line = "%s %s -> %s %s\n" % \
4740                                 (entry_type, relative_filename, link, mtime)
4741                 else: # dir, dev, fif
4742                         line = "%s %s\n" % (entry_type, relative_filename)
4743                 f.write(line)
4744
4745 def tar_contents(contents, root, tar, protect=None, onProgress=None):
4746         os = _os_merge
4747         encoding = _encodings['merge']
4748
4749         try:
4750                 for x in contents:
4751                         _unicode_encode(x,
4752                                 encoding=_encodings['merge'],
4753                                 errors='strict')
4754         except UnicodeEncodeError:
4755                 # The package appears to have been merged with a
4756                 # different value of sys.getfilesystemencoding(),
4757                 # so fall back to utf_8 if appropriate.
4758                 try:
4759                         for x in contents:
4760                                 _unicode_encode(x,
4761                                         encoding=_encodings['fs'],
4762                                         errors='strict')
4763                 except UnicodeEncodeError:
4764                         pass
4765                 else:
4766                         os = portage.os
4767                         encoding = _encodings['fs']
4768
4769         tar.encoding = encoding
4770         root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
4771         id_strings = {}
4772         maxval = len(contents)
4773         curval = 0
4774         if onProgress:
4775                 onProgress(maxval, 0)
4776         paths = list(contents)
4777         paths.sort()
4778         for path in paths:
4779                 curval += 1
4780                 try:
4781                         lst = os.lstat(path)
4782                 except OSError as e:
4783                         if e.errno != errno.ENOENT:
4784                                 raise
4785                         del e
4786                         if onProgress:
4787                                 onProgress(maxval, curval)
4788                         continue
4789                 contents_type = contents[path][0]
4790                 if path.startswith(root):
4791                         arcname = "./" + path[len(root):]
4792                 else:
4793                         raise ValueError("invalid root argument: '%s'" % root)
4794                 live_path = path
4795                 if 'dir' == contents_type and \
4796                         not stat.S_ISDIR(lst.st_mode) and \
4797                         os.path.isdir(live_path):
4798                         # Even though this was a directory in the original ${D}, it exists
4799                         # as a symlink to a directory in the live filesystem.  It must be
4800                         # recorded as a real directory in the tar file to ensure that tar
4801                         # can properly extract it's children.
4802                         live_path = os.path.realpath(live_path)
4803                         lst = os.lstat(live_path)
4804
4805                 # Since os.lstat() inside TarFile.gettarinfo() can trigger a
4806                 # UnicodeEncodeError when python has something other than utf_8
4807                 # return from sys.getfilesystemencoding() (as in bug #388773),
4808                 # we implement the needed functionality here, using the result
4809                 # of our successful lstat call. An alternative to this would be
4810                 # to pass in the fileobj argument to TarFile.gettarinfo(), so
4811                 # that it could use fstat instead of lstat. However, that would
4812                 # have the unwanted effect of dereferencing symlinks.
4813
4814                 tarinfo = tar.tarinfo()
4815                 tarinfo.name = arcname
4816                 tarinfo.mode = lst.st_mode
4817                 tarinfo.uid = lst.st_uid
4818                 tarinfo.gid = lst.st_gid
4819                 tarinfo.size = 0
4820                 tarinfo.mtime = lst.st_mtime
4821                 tarinfo.linkname = ""
4822                 if stat.S_ISREG(lst.st_mode):
4823                         inode = (lst.st_ino, lst.st_dev)
4824                         if (lst.st_nlink > 1 and
4825                                 inode in tar.inodes and
4826                                 arcname != tar.inodes[inode]):
4827                                 tarinfo.type = tarfile.LNKTYPE
4828                                 tarinfo.linkname = tar.inodes[inode]
4829                         else:
4830                                 tar.inodes[inode] = arcname
4831                                 tarinfo.type = tarfile.REGTYPE
4832                                 tarinfo.size = lst.st_size
4833                 elif stat.S_ISDIR(lst.st_mode):
4834                         tarinfo.type = tarfile.DIRTYPE
4835                 elif stat.S_ISLNK(lst.st_mode):
4836                         tarinfo.type = tarfile.SYMTYPE
4837                         tarinfo.linkname = os.readlink(live_path)
4838                 else:
4839                         continue
4840                 try:
4841                         tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
4842                 except KeyError:
4843                         pass
4844                 try:
4845                         tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
4846                 except KeyError:
4847                         pass
4848
4849                 if stat.S_ISREG(lst.st_mode):
4850                         if protect and protect(path):
4851                                 # Create an empty file as a place holder in order to avoid
4852                                 # potential collision-protect issues.
4853                                 f = tempfile.TemporaryFile()
4854                                 f.write(_unicode_encode(
4855                                         "# empty file because --include-config=n " + \
4856                                         "when `quickpkg` was used\n"))
4857                                 f.flush()
4858                                 f.seek(0)
4859                                 tarinfo.size = os.fstat(f.fileno()).st_size
4860                                 tar.addfile(tarinfo, f)
4861                                 f.close()
4862                         else:
4863                                 f = open(_unicode_encode(path,
4864                                         encoding=encoding,
4865                                         errors='strict'), 'rb')
4866                                 try:
4867                                         tar.addfile(tarinfo, f)
4868                                 finally:
4869                                         f.close()
4870                 else:
4871                         tar.addfile(tarinfo)
4872                 if onProgress:
4873                         onProgress(maxval, curval)