treewalk: fix plib_collisions with same cpv
[portage.git] / pym / portage / dbapi / vartree.py
1 # Copyright 1998-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = [
5         "vardbapi", "vartree", "dblink"] + \
6         ["write_contents", "tar_contents"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10         'portage.checksum:_perform_md5_merge@perform_md5',
11         'portage.data:portage_gid,portage_uid,secpass',
12         'portage.dbapi.dep_expand:dep_expand',
13         'portage.dbapi._MergeProcess:MergeProcess',
14         'portage.dep:dep_getkey,isjustname,match_from_list,' + \
15                 'use_reduce,_slot_re',
16         'portage.elog:collect_ebuild_messages,collect_messages,' + \
17                 'elog_process,_merge_logentries',
18         'portage.locks:lockdir,unlockdir,lockfile,unlockfile',
19         'portage.output:bold,colorize',
20         'portage.package.ebuild.doebuild:doebuild_environment,' + \
21                 '_spawn_phase',
22         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
23         'portage.update:fixdbentries',
24         'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
25                 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
26                 'grabdict,normalize_path,new_protect_filename',
27         'portage.util.digraph:digraph',
28         'portage.util.env_update:env_update',
29         'portage.util.listdir:dircache,listdir',
30         'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
31         'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
32         'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \
33                 '_pkgsplit@pkgsplit',
34 )
35
36 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
37         PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
38 from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
39 from portage.dbapi import dbapi
40 from portage.dep import _slot_separator
41 from portage.exception import CommandNotFound, \
42         InvalidData, InvalidPackageName, \
43         FileNotFound, PermissionDenied, UnsupportedAPIException
44 from portage.localization import _
45 from portage.util.movefile import movefile
46
47 from portage import abssymlink, _movefile, bsd_chflags
48
49 # This is a special version of the os module, wrapped for unicode support.
50 from portage import os
51 from portage import _encodings
52 from portage import _os_merge
53 from portage import _selinux_merge
54 from portage import _unicode_decode
55 from portage import _unicode_encode
56
57 from _emerge.AsynchronousLock import AsynchronousLock
58 from _emerge.EbuildBuildDir import EbuildBuildDir
59 from _emerge.EbuildPhase import EbuildPhase
60 from _emerge.PollScheduler import PollScheduler
61 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
62
63 import codecs
64 import gc
65 import re, shutil, stat, errno, subprocess
66 import logging
67 import os as _os
68 import stat
69 import sys
70 import tempfile
71 import time
72 import warnings
73
74 try:
75         import cPickle as pickle
76 except ImportError:
77         import pickle
78
79 if sys.hexversion >= 0x3000000:
80         basestring = str
81         long = int
82
83 class vardbapi(dbapi):
84
85         _excluded_dirs = ["CVS", "lost+found"]
86         _excluded_dirs = [re.escape(x) for x in _excluded_dirs]
87         _excluded_dirs = re.compile(r'^(\..*|-MERGING-.*|' + \
88                 "|".join(_excluded_dirs) + r')$')
89
90         _aux_cache_version        = "1"
91         _owners_cache_version     = "1"
92
93         # Number of uncached packages to trigger cache update, since
94         # it's wasteful to update it for every vdb change.
95         _aux_cache_threshold = 5
96
97         _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
98         _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
99
100         def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
101                 """
102                 The categories parameter is unused since the dbapi class
103                 now has a categories property that is generated from the
104                 available packages.
105                 """
106
107                 # Used by emerge to check whether any packages
108                 # have been added or removed.
109                 self._pkgs_changed = False
110
111                 # The _aux_cache_threshold doesn't work as designed
112                 # if the cache is flushed from a subprocess, so we
113                 # use this to avoid waste vdb cache updates.
114                 self._flush_cache_enabled = True
115
116                 #cache for category directory mtimes
117                 self.mtdircache = {}
118
119                 #cache for dependency checks
120                 self.matchcache = {}
121
122                 #cache for cp_list results
123                 self.cpcache = {}
124
125                 self.blockers = None
126                 if settings is None:
127                         settings = portage.settings
128                 self.settings = settings
129                 self.root = settings['ROOT']
130
131                 if _unused_param is not None and _unused_param != self.root:
132                         warnings.warn("The first parameter of the " + \
133                                 "portage.dbapi.vartree.vardbapi" + \
134                                 " constructor is now unused. Use " + \
135                                 "settings['ROOT'] instead.",
136                                 DeprecationWarning, stacklevel=2)
137
138                 self._eroot = settings['EROOT']
139                 self._dbroot = self._eroot + VDB_PATH
140                 self._lock = None
141                 self._lock_count = 0
142
143                 if vartree is None:
144                         vartree = portage.db[self.root]["vartree"]
145                 self.vartree = vartree
146                 self._aux_cache_keys = set(
147                         ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
148                         "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
149                         "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
150                         "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
151                         "REQUIRED_USE"])
152                 self._aux_cache_obj = None
153                 self._aux_cache_filename = os.path.join(self._eroot,
154                         CACHE_PATH, "vdb_metadata.pickle")
155                 self._counter_path = os.path.join(self._eroot,
156                         CACHE_PATH, "counter")
157
158                 self._plib_registry = None
159                 if _ENABLE_PRESERVE_LIBS:
160                         self._plib_registry = PreservedLibsRegistry(self.root,
161                                 os.path.join(self._eroot, PRIVATE_PATH,
162                                 "preserved_libs_registry"))
163
164                 self._linkmap = None
165                 if _ENABLE_DYN_LINK_MAP:
166                         self._linkmap = LinkageMap(self)
167                 self._owners = self._owners_db(self)
168
169                 self._cached_counter = None
170
171         def getpath(self, mykey, filename=None):
172                 # This is an optimized hotspot, so don't use unicode-wrapped
173                 # os module and don't use os.path.join().
174                 rValue = self._eroot + VDB_PATH + _os.sep + mykey
175                 if filename is not None:
176                         # If filename is always relative, we can do just
177                         # rValue += _os.sep + filename
178                         rValue = _os.path.join(rValue, filename)
179                 return rValue
180
181         def lock(self):
182                 """
183                 Acquire a reentrant lock, blocking, for cooperation with concurrent
184                 processes. State is inherited by subprocesses, allowing subprocesses
185                 to reenter a lock that was acquired by a parent process. However,
186                 a lock can be released only by the same process that acquired it.
187                 """
188                 if self._lock_count:
189                         self._lock_count += 1
190                 else:
191                         if self._lock is not None:
192                                 raise AssertionError("already locked")
193                         # At least the parent needs to exist for the lock file.
194                         ensure_dirs(self._dbroot)
195                         self._lock = lockdir(self._dbroot)
196                         self._lock_count += 1
197
198         def unlock(self):
199                 """
200                 Release a lock, decrementing the recursion level. Each unlock() call
201                 must be matched with a prior lock() call, or else an AssertionError
202                 will be raised if unlock() is called while not locked.
203                 """
204                 if self._lock_count > 1:
205                         self._lock_count -= 1
206                 else:
207                         if self._lock is None:
208                                 raise AssertionError("not locked")
209                         self._lock_count = 0
210                         unlockdir(self._lock)
211                         self._lock = None
212
213         def _bump_mtime(self, cpv):
214                 """
215                 This is called before an after any modifications, so that consumers
216                 can use directory mtimes to validate caches. See bug #290428.
217                 """
218                 base = self._eroot + VDB_PATH
219                 cat = catsplit(cpv)[0]
220                 catdir = base + _os.sep + cat
221                 t = time.time()
222                 t = (t, t)
223                 try:
224                         for x in (catdir, base):
225                                 os.utime(x, t)
226                 except OSError:
227                         ensure_dirs(catdir)
228
229         def cpv_exists(self, mykey, myrepo=None):
230                 "Tells us whether an actual ebuild exists on disk (no masking)"
231                 return os.path.exists(self.getpath(mykey))
232
233         def cpv_counter(self, mycpv):
234                 "This method will grab the COUNTER. Returns a counter value."
235                 try:
236                         return long(self.aux_get(mycpv, ["COUNTER"])[0])
237                 except (KeyError, ValueError):
238                         pass
239                 writemsg_level(_("portage: COUNTER for %s was corrupted; " \
240                         "resetting to value of 0\n") % (mycpv,),
241                         level=logging.ERROR, noiselevel=-1)
242                 return 0
243
244         def cpv_inject(self, mycpv):
245                 "injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
246                 ensure_dirs(self.getpath(mycpv))
247                 counter = self.counter_tick(mycpv=mycpv)
248                 # write local package counter so that emerge clean does the right thing
249                 write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
250
251         def isInjected(self, mycpv):
252                 if self.cpv_exists(mycpv):
253                         if os.path.exists(self.getpath(mycpv, filename="INJECTED")):
254                                 return True
255                         if not os.path.exists(self.getpath(mycpv, filename="CONTENTS")):
256                                 return True
257                 return False
258
259         def move_ent(self, mylist, repo_match=None):
260                 origcp = mylist[1]
261                 newcp = mylist[2]
262
263                 # sanity check
264                 for atom in (origcp, newcp):
265                         if not isjustname(atom):
266                                 raise InvalidPackageName(str(atom))
267                 origmatches = self.match(origcp, use_cache=0)
268                 moves = 0
269                 if not origmatches:
270                         return moves
271                 for mycpv in origmatches:
272                         mycpv_cp = cpv_getkey(mycpv)
273                         if mycpv_cp != origcp:
274                                 # Ignore PROVIDE virtual match.
275                                 continue
276                         if repo_match is not None \
277                                 and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
278                                 continue
279                         mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
280                         mynewcat = catsplit(newcp)[0]
281                         origpath = self.getpath(mycpv)
282                         if not os.path.exists(origpath):
283                                 continue
284                         moves += 1
285                         if not os.path.exists(self.getpath(mynewcat)):
286                                 #create the directory
287                                 ensure_dirs(self.getpath(mynewcat))
288                         newpath = self.getpath(mynewcpv)
289                         if os.path.exists(newpath):
290                                 #dest already exists; keep this puppy where it is.
291                                 continue
292                         _movefile(origpath, newpath, mysettings=self.settings)
293                         self._clear_pkg_cache(self._dblink(mycpv))
294                         self._clear_pkg_cache(self._dblink(mynewcpv))
295
296                         # We need to rename the ebuild now.
297                         old_pf = catsplit(mycpv)[1]
298                         new_pf = catsplit(mynewcpv)[1]
299                         if new_pf != old_pf:
300                                 try:
301                                         os.rename(os.path.join(newpath, old_pf + ".ebuild"),
302                                                 os.path.join(newpath, new_pf + ".ebuild"))
303                                 except EnvironmentError as e:
304                                         if e.errno != errno.ENOENT:
305                                                 raise
306                                         del e
307                         write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
308                         write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
309                         fixdbentries([mylist], newpath)
310                 return moves
311
312         def cp_list(self, mycp, use_cache=1):
313                 mysplit=catsplit(mycp)
314                 if mysplit[0] == '*':
315                         mysplit[0] = mysplit[0][1:]
316                 try:
317                         mystat = os.stat(self.getpath(mysplit[0])).st_mtime
318                 except OSError:
319                         mystat = 0
320                 if use_cache and mycp in self.cpcache:
321                         cpc = self.cpcache[mycp]
322                         if cpc[0] == mystat:
323                                 return cpc[1][:]
324                 cat_dir = self.getpath(mysplit[0])
325                 try:
326                         dir_list = os.listdir(cat_dir)
327                 except EnvironmentError as e:
328                         if e.errno == PermissionDenied.errno:
329                                 raise PermissionDenied(cat_dir)
330                         del e
331                         dir_list = []
332
333                 returnme = []
334                 for x in dir_list:
335                         if self._excluded_dirs.match(x) is not None:
336                                 continue
337                         ps = pkgsplit(x)
338                         if not ps:
339                                 self.invalidentry(os.path.join(self.getpath(mysplit[0]), x))
340                                 continue
341                         if len(mysplit) > 1:
342                                 if ps[0] == mysplit[1]:
343                                         returnme.append(mysplit[0]+"/"+x)
344                 self._cpv_sort_ascending(returnme)
345                 if use_cache:
346                         self.cpcache[mycp] = [mystat, returnme[:]]
347                 elif mycp in self.cpcache:
348                         del self.cpcache[mycp]
349                 return returnme
350
351         def cpv_all(self, use_cache=1):
352                 """
353                 Set use_cache=0 to bypass the portage.cachedir() cache in cases
354                 when the accuracy of mtime staleness checks should not be trusted
355                 (generally this is only necessary in critical sections that
356                 involve merge or unmerge of packages).
357                 """
358                 returnme = []
359                 basepath = os.path.join(self._eroot, VDB_PATH) + os.path.sep
360
361                 if use_cache:
362                         from portage import listdir
363                 else:
364                         def listdir(p, **kwargs):
365                                 try:
366                                         return [x for x in os.listdir(p) \
367                                                 if os.path.isdir(os.path.join(p, x))]
368                                 except EnvironmentError as e:
369                                         if e.errno == PermissionDenied.errno:
370                                                 raise PermissionDenied(p)
371                                         del e
372                                         return []
373
374                 for x in listdir(basepath, EmptyOnError=1, ignorecvs=1, dirsonly=1):
375                         if self._excluded_dirs.match(x) is not None:
376                                 continue
377                         if not self._category_re.match(x):
378                                 continue
379                         for y in listdir(basepath + x, EmptyOnError=1, dirsonly=1):
380                                 if self._excluded_dirs.match(y) is not None:
381                                         continue
382                                 subpath = x + "/" + y
383                                 # -MERGING- should never be a cpv, nor should files.
384                                 try:
385                                         if catpkgsplit(subpath) is None:
386                                                 self.invalidentry(self.getpath(subpath))
387                                                 continue
388                                 except InvalidData:
389                                         self.invalidentry(self.getpath(subpath))
390                                         continue
391                                 returnme.append(subpath)
392
393                 return returnme
394
395         def cp_all(self, use_cache=1):
396                 mylist = self.cpv_all(use_cache=use_cache)
397                 d={}
398                 for y in mylist:
399                         if y[0] == '*':
400                                 y = y[1:]
401                         try:
402                                 mysplit = catpkgsplit(y)
403                         except InvalidData:
404                                 self.invalidentry(self.getpath(y))
405                                 continue
406                         if not mysplit:
407                                 self.invalidentry(self.getpath(y))
408                                 continue
409                         d[mysplit[0]+"/"+mysplit[1]] = None
410                 return list(d)
411
412         def checkblockers(self, origdep):
413                 pass
414
415         def _clear_cache(self):
416                 self.mtdircache.clear()
417                 self.matchcache.clear()
418                 self.cpcache.clear()
419                 self._aux_cache_obj = None
420
421         def _add(self, pkg_dblink):
422                 self._pkgs_changed = True
423                 self._clear_pkg_cache(pkg_dblink)
424
425         def _remove(self, pkg_dblink):
426                 self._pkgs_changed = True
427                 self._clear_pkg_cache(pkg_dblink)
428
429         def _clear_pkg_cache(self, pkg_dblink):
430                 # Due to 1 second mtime granularity in <python-2.5, mtime checks
431                 # are not always sufficient to invalidate vardbapi caches. Therefore,
432                 # the caches need to be actively invalidated here.
433                 self.mtdircache.pop(pkg_dblink.cat, None)
434                 self.matchcache.pop(pkg_dblink.cat, None)
435                 self.cpcache.pop(pkg_dblink.mysplit[0], None)
436                 dircache.pop(pkg_dblink.dbcatdir, None)
437
438         def match(self, origdep, use_cache=1):
439                 "caching match function"
440                 mydep = dep_expand(
441                         origdep, mydb=self, use_cache=use_cache, settings=self.settings)
442                 mykey = dep_getkey(mydep)
443                 mycat = catsplit(mykey)[0]
444                 if not use_cache:
445                         if mycat in self.matchcache:
446                                 del self.mtdircache[mycat]
447                                 del self.matchcache[mycat]
448                         return list(self._iter_match(mydep,
449                                 self.cp_list(mydep.cp, use_cache=use_cache)))
450                 try:
451                         curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
452                 except (IOError, OSError):
453                         curmtime=0
454
455                 if mycat not in self.matchcache or \
456                         self.mtdircache[mycat] != curmtime:
457                         # clear cache entry
458                         self.mtdircache[mycat] = curmtime
459                         self.matchcache[mycat] = {}
460                 if mydep not in self.matchcache[mycat]:
461                         mymatch = list(self._iter_match(mydep,
462                                 self.cp_list(mydep.cp, use_cache=use_cache)))
463                         self.matchcache[mycat][mydep] = mymatch
464                 return self.matchcache[mycat][mydep][:]
465
466         def findname(self, mycpv, myrepo=None):
467                 return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
468
469         def flush_cache(self):
470                 """If the current user has permission and the internal aux_get cache has
471                 been updated, save it to disk and mark it unmodified.  This is called
472                 by emerge after it has loaded the full vdb for use in dependency
473                 calculations.  Currently, the cache is only written if the user has
474                 superuser privileges (since that's required to obtain a lock), but all
475                 users have read access and benefit from faster metadata lookups (as
476                 long as at least part of the cache is still valid)."""
477                 if self._flush_cache_enabled and \
478                         self._aux_cache is not None and \
479                         len(self._aux_cache["modified"]) >= self._aux_cache_threshold and \
480                         secpass >= 2:
481                         self._owners.populate() # index any unindexed contents
482                         valid_nodes = set(self.cpv_all())
483                         for cpv in list(self._aux_cache["packages"]):
484                                 if cpv not in valid_nodes:
485                                         del self._aux_cache["packages"][cpv]
486                         del self._aux_cache["modified"]
487                         try:
488                                 f = atomic_ofstream(self._aux_cache_filename, 'wb')
489                                 pickle.dump(self._aux_cache, f, protocol=2)
490                                 f.close()
491                                 apply_secpass_permissions(
492                                         self._aux_cache_filename, gid=portage_gid, mode=0o644)
493                         except (IOError, OSError) as e:
494                                 pass
495                         self._aux_cache["modified"] = set()
496
497         @property
498         def _aux_cache(self):
499                 if self._aux_cache_obj is None:
500                         self._aux_cache_init()
501                 return self._aux_cache_obj
502
503         def _aux_cache_init(self):
504                 aux_cache = None
505                 open_kwargs = {}
506                 if sys.hexversion >= 0x3000000:
507                         # Buffered io triggers extreme performance issues in
508                         # Unpickler.load() (problem observed with python-3.0.1).
509                         # Unfortunately, performance is still poor relative to
510                         # python-2.x, but buffering makes it much worse.
511                         open_kwargs["buffering"] = 0
512                 try:
513                         f = open(_unicode_encode(self._aux_cache_filename,
514                                 encoding=_encodings['fs'], errors='strict'),
515                                 mode='rb', **open_kwargs)
516                         mypickle = pickle.Unpickler(f)
517                         try:
518                                 mypickle.find_global = None
519                         except AttributeError:
520                                 # TODO: If py3k, override Unpickler.find_class().
521                                 pass
522                         aux_cache = mypickle.load()
523                         f.close()
524                         del f
525                 except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError) as e:
526                         if isinstance(e, pickle.UnpicklingError):
527                                 writemsg(_unicode_decode(_("!!! Error loading '%s': %s\n")) % \
528                                         (self._aux_cache_filename, e), noiselevel=-1)
529                         del e
530
531                 if not aux_cache or \
532                         not isinstance(aux_cache, dict) or \
533                         aux_cache.get("version") != self._aux_cache_version or \
534                         not aux_cache.get("packages"):
535                         aux_cache = {"version": self._aux_cache_version}
536                         aux_cache["packages"] = {}
537
538                 owners = aux_cache.get("owners")
539                 if owners is not None:
540                         if not isinstance(owners, dict):
541                                 owners = None
542                         elif "version" not in owners:
543                                 owners = None
544                         elif owners["version"] != self._owners_cache_version:
545                                 owners = None
546                         elif "base_names" not in owners:
547                                 owners = None
548                         elif not isinstance(owners["base_names"], dict):
549                                 owners = None
550
551                 if owners is None:
552                         owners = {
553                                 "base_names" : {},
554                                 "version"    : self._owners_cache_version
555                         }
556                         aux_cache["owners"] = owners
557
558                 aux_cache["modified"] = set()
559                 self._aux_cache_obj = aux_cache
560
561         def aux_get(self, mycpv, wants, myrepo = None):
562                 """This automatically caches selected keys that are frequently needed
563                 by emerge for dependency calculations.  The cached metadata is
564                 considered valid if the mtime of the package directory has not changed
565                 since the data was cached.  The cache is stored in a pickled dict
566                 object with the following format:
567
568                 {version:"1", "packages":{cpv1:(mtime,{k1,v1, k2,v2, ...}), cpv2...}}
569
570                 If an error occurs while loading the cache pickle or the version is
571                 unrecognized, the cache will simple be recreated from scratch (it is
572                 completely disposable).
573                 """
574                 cache_these_wants = self._aux_cache_keys.intersection(wants)
575                 for x in wants:
576                         if self._aux_cache_keys_re.match(x) is not None:
577                                 cache_these_wants.add(x)
578
579                 if not cache_these_wants:
580                         return self._aux_get(mycpv, wants)
581
582                 cache_these = set(self._aux_cache_keys)
583                 cache_these.update(cache_these_wants)
584
585                 mydir = self.getpath(mycpv)
586                 mydir_stat = None
587                 try:
588                         mydir_stat = os.stat(mydir)
589                 except OSError as e:
590                         if e.errno != errno.ENOENT:
591                                 raise
592                         raise KeyError(mycpv)
593                 mydir_mtime = mydir_stat[stat.ST_MTIME]
594                 pkg_data = self._aux_cache["packages"].get(mycpv)
595                 pull_me = cache_these.union(wants)
596                 mydata = {"_mtime_" : mydir_mtime}
597                 cache_valid = False
598                 cache_incomplete = False
599                 cache_mtime = None
600                 metadata = None
601                 if pkg_data is not None:
602                         if not isinstance(pkg_data, tuple) or len(pkg_data) != 2:
603                                 pkg_data = None
604                         else:
605                                 cache_mtime, metadata = pkg_data
606                                 if not isinstance(cache_mtime, (long, int)) or \
607                                         not isinstance(metadata, dict):
608                                         pkg_data = None
609
610                 if pkg_data:
611                         cache_mtime, metadata = pkg_data
612                         cache_valid = cache_mtime == mydir_mtime
613                 if cache_valid:
614                         # Migrate old metadata to unicode.
615                         for k, v in metadata.items():
616                                 metadata[k] = _unicode_decode(v,
617                                         encoding=_encodings['repo.content'], errors='replace')
618
619                         mydata.update(metadata)
620                         pull_me.difference_update(mydata)
621
622                 if pull_me:
623                         # pull any needed data and cache it
624                         aux_keys = list(pull_me)
625                         for k, v in zip(aux_keys,
626                                 self._aux_get(mycpv, aux_keys, st=mydir_stat)):
627                                 mydata[k] = v
628                         if not cache_valid or cache_these.difference(metadata):
629                                 cache_data = {}
630                                 if cache_valid and metadata:
631                                         cache_data.update(metadata)
632                                 for aux_key in cache_these:
633                                         cache_data[aux_key] = mydata[aux_key]
634                                 self._aux_cache["packages"][mycpv] = (mydir_mtime, cache_data)
635                                 self._aux_cache["modified"].add(mycpv)
636
637                 if _slot_re.match(mydata['SLOT']) is None:
638                         # Empty or invalid slot triggers InvalidAtom exceptions when
639                         # generating slot atoms for packages, so translate it to '0' here.
640                         mydata['SLOT'] = _unicode_decode('0')
641
642                 return [mydata[x] for x in wants]
643
644         def _aux_get(self, mycpv, wants, st=None):
645                 mydir = self.getpath(mycpv)
646                 if st is None:
647                         try:
648                                 st = os.stat(mydir)
649                         except OSError as e:
650                                 if e.errno == errno.ENOENT:
651                                         raise KeyError(mycpv)
652                                 elif e.errno == PermissionDenied.errno:
653                                         raise PermissionDenied(mydir)
654                                 else:
655                                         raise
656                 if not stat.S_ISDIR(st.st_mode):
657                         raise KeyError(mycpv)
658                 results = []
659                 for x in wants:
660                         if x == "_mtime_":
661                                 results.append(st[stat.ST_MTIME])
662                                 continue
663                         try:
664                                 myf = codecs.open(
665                                         _unicode_encode(os.path.join(mydir, x),
666                                         encoding=_encodings['fs'], errors='strict'),
667                                         mode='r', encoding=_encodings['repo.content'],
668                                         errors='replace')
669                                 try:
670                                         myd = myf.read()
671                                 finally:
672                                         myf.close()
673                                 # Preserve \n for metadata that is known to
674                                 # contain multiple lines.
675                                 if self._aux_multi_line_re.match(x) is None:
676                                         myd = " ".join(myd.split())
677                         except IOError:
678                                 myd = _unicode_decode('')
679                         if x == "EAPI" and not myd:
680                                 results.append(_unicode_decode('0'))
681                         else:
682                                 results.append(myd)
683                 return results
684
685         def aux_update(self, cpv, values):
686                 mylink = self._dblink(cpv)
687                 if not mylink.exists():
688                         raise KeyError(cpv)
689                 self._bump_mtime(cpv)
690                 self._clear_pkg_cache(mylink)
691                 for k, v in values.items():
692                         if v:
693                                 mylink.setfile(k, v)
694                         else:
695                                 try:
696                                         os.unlink(os.path.join(self.getpath(cpv), k))
697                                 except EnvironmentError:
698                                         pass
699                 self._bump_mtime(cpv)
700
701         def counter_tick(self, myroot=None, mycpv=None):
702                 """
703                 @param myroot: ignored, self._eroot is used instead
704                 """
705                 return self.counter_tick_core(incrementing=1, mycpv=mycpv)
706
707         def get_counter_tick_core(self, myroot=None, mycpv=None):
708                 """
709                 Use this method to retrieve the counter instead
710                 of having to trust the value of a global counter
711                 file that can lead to invalid COUNTER
712                 generation. When cache is valid, the package COUNTER
713                 files are not read and we rely on the timestamp of
714                 the package directory to validate cache. The stat
715                 calls should only take a short time, so performance
716                 is sufficient without having to rely on a potentially
717                 corrupt global counter file.
718
719                 The global counter file located at
720                 $CACHE_PATH/counter serves to record the
721                 counter of the last installed package and
722                 it also corresponds to the total number of
723                 installation actions that have occurred in
724                 the history of this package database.
725
726                 @param myroot: ignored, self._eroot is used instead
727                 """
728                 myroot = None
729                 new_vdb = False
730                 counter = -1
731                 try:
732                         cfile = codecs.open(
733                                 _unicode_encode(self._counter_path,
734                                 encoding=_encodings['fs'], errors='strict'),
735                                 mode='r', encoding=_encodings['repo.content'],
736                                 errors='replace')
737                 except EnvironmentError as e:
738                         new_vdb = not bool(self.cpv_all())
739                         if not new_vdb:
740                                 writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
741                                         self._counter_path, noiselevel=-1)
742                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
743                         del e
744                 else:
745                         try:
746                                 try:
747                                         counter = long(cfile.readline().strip())
748                                 finally:
749                                         cfile.close()
750                         except (OverflowError, ValueError) as e:
751                                 writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
752                                         self._counter_path, noiselevel=-1)
753                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
754                                 del e
755
756                 if self._cached_counter == counter:
757                         max_counter = counter
758                 else:
759                         # We must ensure that we return a counter
760                         # value that is at least as large as the
761                         # highest one from the installed packages,
762                         # since having a corrupt value that is too low
763                         # can trigger incorrect AUTOCLEAN behavior due
764                         # to newly installed packages having lower
765                         # COUNTERs than the previous version in the
766                         # same slot.
767                         max_counter = counter
768                         for cpv in self.cpv_all():
769                                 try:
770                                         pkg_counter = int(self.aux_get(cpv, ["COUNTER"])[0])
771                                 except (KeyError, OverflowError, ValueError):
772                                         continue
773                                 if pkg_counter > max_counter:
774                                         max_counter = pkg_counter
775
776                 if counter < 0 and not new_vdb:
777                         writemsg(_("!!! Initializing COUNTER to " \
778                                 "value of %d\n") % max_counter, noiselevel=-1)
779
780                 return max_counter + 1
781
782         def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
783                 """
784                 This method will grab the next COUNTER value and record it back
785                 to the global file.  Returns new counter value.
786
787                 @param myroot: ignored, self._eroot is used instead
788                 @param mycpv: ignored
789                 """
790                 myroot = None
791                 mycpv = None
792                 self.lock()
793                 try:
794                         counter = self.get_counter_tick_core() - 1
795                         if self._cached_counter != counter:
796                                 if incrementing:
797                                         #increment counter
798                                         counter += 1
799                                         # use same permissions as config._init_dirs()
800                                         ensure_dirs(os.path.dirname(self._counter_path),
801                                                 gid=portage_gid, mode=0o2750, mask=0o2)
802                                         # update new global counter file
803                                         write_atomic(self._counter_path, str(counter))
804                                 self._cached_counter = counter
805                 finally:
806                         self.unlock()
807
808                 return counter
809
810         def _dblink(self, cpv):
811                 category, pf = catsplit(cpv)
812                 return dblink(category, pf, settings=self.settings,
813                         vartree=self.vartree, treetype="vartree")
814
815         def removeFromContents(self, pkg, paths, relative_paths=True):
816                 """
817                 @param pkg: cpv for an installed package
818                 @type pkg: string
819                 @param paths: paths of files to remove from contents
820                 @type paths: iterable
821                 """
822                 if not hasattr(pkg, "getcontents"):
823                         pkg = self._dblink(pkg)
824                 root = self.settings['ROOT']
825                 root_len = len(root) - 1
826                 new_contents = pkg.getcontents().copy()
827                 removed = 0
828
829                 for filename in paths:
830                         filename = _unicode_decode(filename,
831                                 encoding=_encodings['content'], errors='strict')
832                         filename = normalize_path(filename)
833                         if relative_paths:
834                                 relative_filename = filename
835                         else:
836                                 relative_filename = filename[root_len:]
837                         contents_key = pkg._match_contents(relative_filename)
838                         if contents_key:
839                                 del new_contents[contents_key]
840                                 removed += 1
841
842                 if removed:
843                         self._bump_mtime(pkg.mycpv)
844                         f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
845                         write_contents(new_contents, root, f)
846                         f.close()
847                         self._bump_mtime(pkg.mycpv)
848                         pkg._clear_contents_cache()
849
850         class _owners_cache(object):
851                 """
852                 This class maintains an hash table that serves to index package
853                 contents by mapping the basename of file to a list of possible
854                 packages that own it. This is used to optimize owner lookups
855                 by narrowing the search down to a smaller number of packages.
856                 """
857                 try:
858                         from hashlib import md5 as _new_hash
859                 except ImportError:
860                         from md5 import new as _new_hash
861
862                 _hash_bits = 16
863                 _hex_chars = int(_hash_bits / 4)
864
865                 def __init__(self, vardb):
866                         self._vardb = vardb
867
868                 def add(self, cpv):
869                         eroot_len = len(self._vardb._eroot)
870                         contents = self._vardb._dblink(cpv).getcontents()
871                         pkg_hash = self._hash_pkg(cpv)
872                         if not contents:
873                                 # Empty path is a code used to represent empty contents.
874                                 self._add_path("", pkg_hash)
875
876                         for x in contents:
877                                 self._add_path(x[eroot_len:], pkg_hash)
878
879                         self._vardb._aux_cache["modified"].add(cpv)
880
881                 def _add_path(self, path, pkg_hash):
882                         """
883                         Empty path is a code that represents empty contents.
884                         """
885                         if path:
886                                 name = os.path.basename(path.rstrip(os.path.sep))
887                                 if not name:
888                                         return
889                         else:
890                                 name = path
891                         name_hash = self._hash_str(name)
892                         base_names = self._vardb._aux_cache["owners"]["base_names"]
893                         pkgs = base_names.get(name_hash)
894                         if pkgs is None:
895                                 pkgs = {}
896                                 base_names[name_hash] = pkgs
897                         pkgs[pkg_hash] = None
898
899                 def _hash_str(self, s):
900                         h = self._new_hash()
901                         # Always use a constant utf_8 encoding here, since
902                         # the "default" encoding can change.
903                         h.update(_unicode_encode(s,
904                                 encoding=_encodings['repo.content'],
905                                 errors='backslashreplace'))
906                         h = h.hexdigest()
907                         h = h[-self._hex_chars:]
908                         h = int(h, 16)
909                         return h
910
911                 def _hash_pkg(self, cpv):
912                         counter, mtime = self._vardb.aux_get(
913                                 cpv, ["COUNTER", "_mtime_"])
914                         try:
915                                 counter = int(counter)
916                         except ValueError:
917                                 counter = 0
918                         return (cpv, counter, mtime)
919
920         class _owners_db(object):
921
922                 def __init__(self, vardb):
923                         self._vardb = vardb
924
925                 def populate(self):
926                         self._populate()
927
928                 def _populate(self):
929                         owners_cache = vardbapi._owners_cache(self._vardb)
930                         cached_hashes = set()
931                         base_names = self._vardb._aux_cache["owners"]["base_names"]
932
933                         # Take inventory of all cached package hashes.
934                         for name, hash_values in list(base_names.items()):
935                                 if not isinstance(hash_values, dict):
936                                         del base_names[name]
937                                         continue
938                                 cached_hashes.update(hash_values)
939
940                         # Create sets of valid package hashes and uncached packages.
941                         uncached_pkgs = set()
942                         hash_pkg = owners_cache._hash_pkg
943                         valid_pkg_hashes = set()
944                         for cpv in self._vardb.cpv_all():
945                                 hash_value = hash_pkg(cpv)
946                                 valid_pkg_hashes.add(hash_value)
947                                 if hash_value not in cached_hashes:
948                                         uncached_pkgs.add(cpv)
949
950                         # Cache any missing packages.
951                         for cpv in uncached_pkgs:
952                                 owners_cache.add(cpv)
953
954                         # Delete any stale cache.
955                         stale_hashes = cached_hashes.difference(valid_pkg_hashes)
956                         if stale_hashes:
957                                 for base_name_hash, bucket in list(base_names.items()):
958                                         for hash_value in stale_hashes.intersection(bucket):
959                                                 del bucket[hash_value]
960                                         if not bucket:
961                                                 del base_names[base_name_hash]
962
963                         return owners_cache
964
965                 def get_owners(self, path_iter):
966                         """
967                         @return the owners as a dblink -> set(files) mapping.
968                         """
969                         owners = {}
970                         for owner, f in self.iter_owners(path_iter):
971                                 owned_files = owners.get(owner)
972                                 if owned_files is None:
973                                         owned_files = set()
974                                         owners[owner] = owned_files
975                                 owned_files.add(f)
976                         return owners
977
978                 def getFileOwnerMap(self, path_iter):
979                         owners = self.get_owners(path_iter)
980                         file_owners = {}
981                         for pkg_dblink, files in owners.items():
982                                 for f in files:
983                                         owner_set = file_owners.get(f)
984                                         if owner_set is None:
985                                                 owner_set = set()
986                                                 file_owners[f] = owner_set
987                                         owner_set.add(pkg_dblink)
988                         return file_owners
989
990                 def iter_owners(self, path_iter):
991                         """
992                         Iterate over tuples of (dblink, path). In order to avoid
993                         consuming too many resources for too much time, resources
994                         are only allocated for the duration of a given iter_owners()
995                         call. Therefore, to maximize reuse of resources when searching
996                         for multiple files, it's best to search for them all in a single
997                         call.
998                         """
999
1000                         if not isinstance(path_iter, list):
1001                                 path_iter = list(path_iter)
1002                         owners_cache = self._populate()
1003                         vardb = self._vardb
1004                         root = vardb._eroot
1005                         hash_pkg = owners_cache._hash_pkg
1006                         hash_str = owners_cache._hash_str
1007                         base_names = self._vardb._aux_cache["owners"]["base_names"]
1008
1009                         dblink_cache = {}
1010
1011                         def dblink(cpv):
1012                                 x = dblink_cache.get(cpv)
1013                                 if x is None:
1014                                         if len(dblink_cache) > 20:
1015                                                 # Ensure that we don't run out of memory.
1016                                                 raise StopIteration()
1017                                         x = self._vardb._dblink(cpv)
1018                                         dblink_cache[cpv] = x
1019                                 return x
1020
1021                         while path_iter:
1022
1023                                 path = path_iter.pop()
1024                                 is_basename = os.sep != path[:1]
1025                                 if is_basename:
1026                                         name = path
1027                                 else:
1028                                         name = os.path.basename(path.rstrip(os.path.sep))
1029
1030                                 if not name:
1031                                         continue
1032
1033                                 name_hash = hash_str(name)
1034                                 pkgs = base_names.get(name_hash)
1035                                 owners = []
1036                                 if pkgs is not None:
1037                                         try:
1038                                                 for hash_value in pkgs:
1039                                                         if not isinstance(hash_value, tuple) or \
1040                                                                 len(hash_value) != 3:
1041                                                                 continue
1042                                                         cpv, counter, mtime = hash_value
1043                                                         if not isinstance(cpv, basestring):
1044                                                                 continue
1045                                                         try:
1046                                                                 current_hash = hash_pkg(cpv)
1047                                                         except KeyError:
1048                                                                 continue
1049
1050                                                         if current_hash != hash_value:
1051                                                                 continue
1052
1053                                                         if is_basename:
1054                                                                 for p in dblink(cpv).getcontents():
1055                                                                         if os.path.basename(p) == name:
1056                                                                                 owners.append((cpv, p[len(root):]))
1057                                                         else:
1058                                                                 if dblink(cpv).isowner(path):
1059                                                                         owners.append((cpv, path))
1060
1061                                         except StopIteration:
1062                                                 path_iter.append(path)
1063                                                 del owners[:]
1064                                                 dblink_cache.clear()
1065                                                 gc.collect()
1066                                                 for x in self._iter_owners_low_mem(path_iter):
1067                                                         yield x
1068                                                 return
1069                                         else:
1070                                                 for cpv, p in owners:
1071                                                         yield (dblink(cpv), p)
1072
1073                 def _iter_owners_low_mem(self, path_list):
1074                         """
1075                         This implemention will make a short-lived dblink instance (and
1076                         parse CONTENTS) for every single installed package. This is
1077                         slower and but uses less memory than the method which uses the
1078                         basename cache.
1079                         """
1080
1081                         if not path_list:
1082                                 return
1083
1084                         path_info_list = []
1085                         for path in path_list:
1086                                 is_basename = os.sep != path[:1]
1087                                 if is_basename:
1088                                         name = path
1089                                 else:
1090                                         name = os.path.basename(path.rstrip(os.path.sep))
1091                                 path_info_list.append((path, name, is_basename))
1092
1093                         root = self._vardb._eroot
1094                         for cpv in self._vardb.cpv_all():
1095                                 dblnk =  self._vardb._dblink(cpv)
1096
1097                                 for path, name, is_basename in path_info_list:
1098                                         if is_basename:
1099                                                 for p in dblnk.getcontents():
1100                                                         if os.path.basename(p) == name:
1101                                                                 yield dblnk, p[len(root):]
1102                                         else:
1103                                                 if dblnk.isowner(path):
1104                                                         yield dblnk, path
1105
1106 class vartree(object):
1107         "this tree will scan a var/db/pkg database located at root (passed to init)"
1108         def __init__(self, root=None, virtual=None, categories=None,
1109                 settings=None):
1110
1111                 if settings is None:
1112                         settings = portage.settings
1113                 self.root = settings['ROOT']
1114
1115                 if root is not None and root != self.root:
1116                         warnings.warn("The 'root' parameter of the " + \
1117                                 "portage.dbapi.vartree.vartree" + \
1118                                 " constructor is now unused. Use " + \
1119                                 "settings['ROOT'] instead.",
1120                                 DeprecationWarning, stacklevel=2)
1121
1122                 self.settings = settings
1123                 self.dbapi = vardbapi(settings=settings, vartree=self)
1124                 self.populated = 1
1125
1126         def getpath(self, mykey, filename=None):
1127                 return self.dbapi.getpath(mykey, filename=filename)
1128
1129         def zap(self, mycpv):
1130                 return
1131
1132         def inject(self, mycpv):
1133                 return
1134
1135         def get_provide(self, mycpv):
1136                 myprovides = []
1137                 mylines = None
1138                 try:
1139                         mylines, myuse = self.dbapi.aux_get(mycpv, ["PROVIDE", "USE"])
1140                         if mylines:
1141                                 myuse = myuse.split()
1142                                 mylines = use_reduce(mylines, uselist=myuse, flat=True)
1143                                 for myprovide in mylines:
1144                                         mys = catpkgsplit(myprovide)
1145                                         if not mys:
1146                                                 mys = myprovide.split("/")
1147                                         myprovides += [mys[0] + "/" + mys[1]]
1148                         return myprovides
1149                 except SystemExit as e:
1150                         raise
1151                 except Exception as e:
1152                         mydir = self.dbapi.getpath(mycpv)
1153                         writemsg(_("\nParse Error reading PROVIDE and USE in '%s'\n") % mydir,
1154                                 noiselevel=-1)
1155                         if mylines:
1156                                 writemsg(_("Possibly Invalid: '%s'\n") % str(mylines),
1157                                         noiselevel=-1)
1158                         writemsg(_("Exception: %s\n\n") % str(e), noiselevel=-1)
1159                         return []
1160
1161         def get_all_provides(self):
1162                 myprovides = {}
1163                 for node in self.getallcpv():
1164                         for mykey in self.get_provide(node):
1165                                 if mykey in myprovides:
1166                                         myprovides[mykey] += [node]
1167                                 else:
1168                                         myprovides[mykey] = [node]
1169                 return myprovides
1170
1171         def dep_bestmatch(self, mydep, use_cache=1):
1172                 "compatibility method -- all matches, not just visible ones"
1173                 #mymatch=best(match(dep_expand(mydep,self.dbapi),self.dbapi))
1174                 mymatch = best(self.dbapi.match(
1175                         dep_expand(mydep, mydb=self.dbapi, settings=self.settings),
1176                         use_cache=use_cache))
1177                 if mymatch is None:
1178                         return ""
1179                 else:
1180                         return mymatch
1181
1182         def dep_match(self, mydep, use_cache=1):
1183                 "compatibility method -- we want to see all matches, not just visible ones"
1184                 #mymatch = match(mydep,self.dbapi)
1185                 mymatch = self.dbapi.match(mydep, use_cache=use_cache)
1186                 if mymatch is None:
1187                         return []
1188                 else:
1189                         return mymatch
1190
1191         def exists_specific(self, cpv):
1192                 return self.dbapi.cpv_exists(cpv)
1193
1194         def getallcpv(self):
1195                 """temporary function, probably to be renamed --- Gets a list of all
1196                 category/package-versions installed on the system."""
1197                 return self.dbapi.cpv_all()
1198
1199         def getallnodes(self):
1200                 """new behavior: these are all *unmasked* nodes.  There may or may not be available
1201                 masked package for nodes in this nodes list."""
1202                 return self.dbapi.cp_all()
1203
1204         def getebuildpath(self, fullpackage):
1205                 cat, package = catsplit(fullpackage)
1206                 return self.getpath(fullpackage, filename=package+".ebuild")
1207
1208         def getslot(self, mycatpkg):
1209                 "Get a slot for a catpkg; assume it exists."
1210                 try:
1211                         return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1212                 except KeyError:
1213                         return ""
1214
1215         def populate(self):
1216                 self.populated=1
1217
1218 class dblink(object):
1219         """
1220         This class provides an interface to the installed package database
1221         At present this is implemented as a text backend in /var/db/pkg.
1222         """
1223
1224         import re
1225         _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
1226
1227         _contents_re = re.compile(r'^(' + \
1228                 r'(?P<dir>(dev|dir|fif) (.+))|' + \
1229                 r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
1230                 r'(?P<sym>(sym) (.+) -> (.+) ((\d+)|(?P<oldsym>(' + \
1231                 r'\(\d+, \d+L, \d+L, \d+, \d+, \d+, \d+L, \d+, (\d+), \d+\)))))' + \
1232                 r')$'
1233         )
1234
1235         def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
1236                 vartree=None, blockers=None, scheduler=None, pipe=None):
1237                 """
1238                 Creates a DBlink object for a given CPV.
1239                 The given CPV may not be present in the database already.
1240                 
1241                 @param cat: Category
1242                 @type cat: String
1243                 @param pkg: Package (PV)
1244                 @type pkg: String
1245                 @param myroot: ignored, settings['ROOT'] is used instead
1246                 @type myroot: String (Path)
1247                 @param settings: Typically portage.settings
1248                 @type settings: portage.config
1249                 @param treetype: one of ['porttree','bintree','vartree']
1250                 @type treetype: String
1251                 @param vartree: an instance of vartree corresponding to myroot.
1252                 @type vartree: vartree
1253                 """
1254
1255                 if settings is None:
1256                         raise TypeError("settings argument is required")
1257
1258                 mysettings = settings
1259                 myroot = settings['ROOT']
1260                 self.cat = cat
1261                 self.pkg = pkg
1262                 self.mycpv = self.cat + "/" + self.pkg
1263                 self.mysplit = list(catpkgsplit(self.mycpv)[1:])
1264                 self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0])
1265                 self.treetype = treetype
1266                 if vartree is None:
1267                         vartree = portage.db[myroot]["vartree"]
1268                 self.vartree = vartree
1269                 self._blockers = blockers
1270                 self._scheduler = scheduler
1271
1272                 # WARNING: EROOT support is experimental and may be incomplete
1273                 # for cases in which EPREFIX is non-empty.
1274                 self._eroot = mysettings['EROOT']
1275                 self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
1276                 self.dbcatdir = self.dbroot+"/"+cat
1277                 self.dbpkgdir = self.dbcatdir+"/"+pkg
1278                 self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg
1279                 self.dbdir = self.dbpkgdir
1280                 self.settings = mysettings
1281                 self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
1282
1283                 self.myroot=myroot
1284                 self._installed_instance = None
1285                 self.contentscache = None
1286                 self._contents_inodes = None
1287                 self._contents_basenames = None
1288                 self._linkmap_broken = False
1289                 self._md5_merge_map = {}
1290                 self._hash_key = (self.myroot, self.mycpv)
1291                 self._protect_obj = None
1292                 self._pipe = pipe
1293
1294         def __hash__(self):
1295                 return hash(self._hash_key)
1296
1297         def __eq__(self, other):
1298                 return isinstance(other, dblink) and \
1299                         self._hash_key == other._hash_key
1300
1301         def _get_protect_obj(self):
1302
1303                 if self._protect_obj is None:
1304                         self._protect_obj = ConfigProtect(self._eroot,
1305                         portage.util.shlex_split(
1306                                 self.settings.get("CONFIG_PROTECT", "")),
1307                         portage.util.shlex_split(
1308                                 self.settings.get("CONFIG_PROTECT_MASK", "")))
1309
1310                 return self._protect_obj
1311
1312         def isprotected(self, obj):
1313                 return self._get_protect_obj().isprotected(obj)
1314
1315         def updateprotect(self):
1316                 self._get_protect_obj().updateprotect()
1317
1318         def lockdb(self):
1319                 self.vartree.dbapi.lock()
1320
1321         def unlockdb(self):
1322                 self.vartree.dbapi.unlock()
1323
1324         def getpath(self):
1325                 "return path to location of db information (for >>> informational display)"
1326                 return self.dbdir
1327
1328         def exists(self):
1329                 "does the db entry exist?  boolean."
1330                 return os.path.exists(self.dbdir)
1331
1332         def delete(self):
1333                 """
1334                 Remove this entry from the database
1335                 """
1336                 if not os.path.exists(self.dbdir):
1337                         return
1338
1339                 # Check validity of self.dbdir before attempting to remove it.
1340                 if not self.dbdir.startswith(self.dbroot):
1341                         writemsg(_("portage.dblink.delete(): invalid dbdir: %s\n") % \
1342                                 self.dbdir, noiselevel=-1)
1343                         return
1344
1345                 shutil.rmtree(self.dbdir)
1346                 # If empty, remove parent category directory.
1347                 try:
1348                         os.rmdir(os.path.dirname(self.dbdir))
1349                 except OSError:
1350                         pass
1351                 self.vartree.dbapi._remove(self)
1352
1353         def clearcontents(self):
1354                 """
1355                 For a given db entry (self), erase the CONTENTS values.
1356                 """
1357                 self.lockdb()
1358                 try:
1359                         if os.path.exists(self.dbdir+"/CONTENTS"):
1360                                 os.unlink(self.dbdir+"/CONTENTS")
1361                 finally:
1362                         self.unlockdb()
1363
1364         def _clear_contents_cache(self):
1365                 self.contentscache = None
1366                 self._contents_inodes = None
1367                 self._contents_basenames = None
1368
1369         def getcontents(self):
1370                 """
1371                 Get the installed files of a given package (aka what that package installed)
1372                 """
1373                 contents_file = os.path.join(self.dbdir, "CONTENTS")
1374                 if self.contentscache is not None:
1375                         return self.contentscache
1376                 pkgfiles = {}
1377                 try:
1378                         myc = codecs.open(_unicode_encode(contents_file,
1379                                 encoding=_encodings['fs'], errors='strict'),
1380                                 mode='r', encoding=_encodings['repo.content'],
1381                                 errors='replace')
1382                 except EnvironmentError as e:
1383                         if e.errno != errno.ENOENT:
1384                                 raise
1385                         del e
1386                         self.contentscache = pkgfiles
1387                         return pkgfiles
1388                 mylines = myc.readlines()
1389                 myc.close()
1390                 null_byte = "\0"
1391                 normalize_needed = self._normalize_needed
1392                 contents_re = self._contents_re
1393                 obj_index = contents_re.groupindex['obj']
1394                 dir_index = contents_re.groupindex['dir']
1395                 sym_index = contents_re.groupindex['sym']
1396                 # The old symlink format may exist on systems that have packages
1397                 # which were installed many years ago (see bug #351814).
1398                 oldsym_index = contents_re.groupindex['oldsym']
1399                 # CONTENTS files already contain EPREFIX
1400                 myroot = self.settings['ROOT']
1401                 if myroot == os.path.sep:
1402                         myroot = None
1403                 # used to generate parent dir entries
1404                 dir_entry = (_unicode_decode("dir"),)
1405                 eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1406                 pos = 0
1407                 errors = []
1408                 for pos, line in enumerate(mylines):
1409                         if null_byte in line:
1410                                 # Null bytes are a common indication of corruption.
1411                                 errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1412                                 continue
1413                         line = line.rstrip("\n")
1414                         m = contents_re.match(line)
1415                         if m is None:
1416                                 errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1417                                 continue
1418
1419                         if m.group(obj_index) is not None:
1420                                 base = obj_index
1421                                 #format: type, mtime, md5sum
1422                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1423                         elif m.group(dir_index) is not None:
1424                                 base = dir_index
1425                                 #format: type
1426                                 data = (m.group(base+1),)
1427                         elif m.group(sym_index) is not None:
1428                                 base = sym_index
1429                                 if m.group(oldsym_index) is None:
1430                                         mtime = m.group(base+5)
1431                                 else:
1432                                         mtime = m.group(base+8)
1433                                 #format: type, mtime, dest
1434                                 data = (m.group(base+1), mtime, m.group(base+3))
1435                         else:
1436                                 # This won't happen as long the regular expression
1437                                 # is written to only match valid entries.
1438                                 raise AssertionError(_("required group not found " + \
1439                                         "in CONTENTS entry: '%s'") % line)
1440
1441                         path = m.group(base+2)
1442                         if normalize_needed.search(path) is not None:
1443                                 path = normalize_path(path)
1444                                 if not path.startswith(os.path.sep):
1445                                         path = os.path.sep + path
1446
1447                         if myroot is not None:
1448                                 path = os.path.join(myroot, path.lstrip(os.path.sep))
1449
1450                         # Implicitly add parent directories, since we can't necessarily
1451                         # assume that they are explicitly listed in CONTENTS, and it's
1452                         # useful for callers if they can rely on parent directory entries
1453                         # being generated here (crucial for things like dblink.isowner()).
1454                         path_split = path.split(os.sep)
1455                         path_split.pop()
1456                         while len(path_split) > eroot_split_len:
1457                                 parent = os.sep.join(path_split)
1458                                 if parent in pkgfiles:
1459                                         break
1460                                 pkgfiles[parent] = dir_entry
1461                                 path_split.pop()
1462
1463                         pkgfiles[path] = data
1464
1465                 if errors:
1466                         writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1467                         for pos, e in errors:
1468                                 writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1469                 self.contentscache = pkgfiles
1470                 return pkgfiles
1471
1472         def _prune_plib_registry(self, unmerge=False,
1473                 needed=None, preserve_paths=None):
1474                 # remove preserved libraries that don't have any consumers left
1475                 plib_registry = self.vartree.dbapi._plib_registry
1476                 if plib_registry:
1477                         plib_registry.lock()
1478                         try:
1479                                 plib_registry.load()
1480
1481                                 unmerge_with_replacement = \
1482                                         unmerge and preserve_paths is not None
1483                                 if unmerge_with_replacement:
1484                                         # If self.mycpv is about to be unmerged and we
1485                                         # have a replacement package, we want to exclude
1486                                         # the irrelevant NEEDED data that belongs to
1487                                         # files which are being unmerged now.
1488                                         exclude_pkgs = (self.mycpv,)
1489                                 else:
1490                                         exclude_pkgs = None
1491
1492                                 self._linkmap_rebuild(exclude_pkgs=exclude_pkgs,
1493                                         include_file=needed, preserve_paths=preserve_paths)
1494
1495                                 unmerge_preserve = None
1496                                 if unmerge and not unmerge_with_replacement:
1497                                         unmerge_preserve = \
1498                                                 self._find_libs_to_preserve(unmerge=True)
1499
1500                                 cpv_lib_map = self._find_unused_preserved_libs()
1501                                 if cpv_lib_map:
1502                                         self._remove_preserved_libs(cpv_lib_map)
1503                                         for cpv, removed in cpv_lib_map.items():
1504                                                 if not self.vartree.dbapi.cpv_exists(cpv):
1505                                                         continue
1506                                                 self.vartree.dbapi.removeFromContents(cpv, removed)
1507
1508                                 if unmerge:
1509                                         counter = self.vartree.dbapi.cpv_counter(self.mycpv)
1510                                         plib_registry.unregister(self.mycpv,
1511                                                 self.settings["SLOT"], counter)
1512                                         if unmerge_preserve:
1513                                                 plib_registry.register(self.mycpv,
1514                                                         self.settings["SLOT"], counter, unmerge_preserve)
1515                                                 # Remove the preserved files from our contents
1516                                                 # so that they won't be unmerged.
1517                                                 self.vartree.dbapi.removeFromContents(self,
1518                                                         unmerge_preserve)
1519
1520                                 plib_registry.store()
1521                         finally:
1522                                 plib_registry.unlock()
1523
1524         def unmerge(self, pkgfiles=None, trimworld=None, cleanup=True,
1525                 ldpath_mtimes=None, others_in_slot=None, needed=None,
1526                 preserve_paths=None):
1527                 """
1528                 Calls prerm
1529                 Unmerges a given package (CPV)
1530                 calls postrm
1531                 calls cleanrm
1532                 calls env_update
1533                 
1534                 @param pkgfiles: files to unmerge (generally self.getcontents() )
1535                 @type pkgfiles: Dictionary
1536                 @param trimworld: Unused
1537                 @type trimworld: Boolean
1538                 @param cleanup: cleanup to pass to doebuild (see doebuild)
1539                 @type cleanup: Boolean
1540                 @param ldpath_mtimes: mtimes to pass to env_update (see env_update)
1541                 @type ldpath_mtimes: Dictionary
1542                 @param others_in_slot: all dblink instances in this slot, excluding self
1543                 @type others_in_slot: list
1544                 @param needed: Filename containing libraries needed after unmerge.
1545                 @type needed: String
1546                 @param preserve_paths: Libraries preserved by a package instance that
1547                         is currently being merged. They need to be explicitly passed to the
1548                         LinkageMap, since they are not registered in the
1549                         PreservedLibsRegistry yet.
1550                 @type preserve_paths: set
1551                 @rtype: Integer
1552                 @returns:
1553                 1. os.EX_OK if everything went well.
1554                 2. return code of the failed phase (for prerm, postrm, cleanrm)
1555                 """
1556
1557                 if trimworld is not None:
1558                         warnings.warn("The trimworld parameter of the " + \
1559                                 "portage.dbapi.vartree.dblink.unmerge()" + \
1560                                 " method is now unused.",
1561                                 DeprecationWarning, stacklevel=2)
1562
1563                 background = False
1564                 if self._scheduler is None:
1565                         # We create a scheduler instance and use it to
1566                         # log unmerge output separately from merge output.
1567                         self._scheduler = PollScheduler().sched_iface
1568                 if self.settings.get("PORTAGE_BACKGROUND") == "subprocess":
1569                         if self.settings.get("PORTAGE_BACKGROUND_UNMERGE") == "1":
1570                                 self.settings["PORTAGE_BACKGROUND"] = "1"
1571                                 self.settings.backup_changes("PORTAGE_BACKGROUND")
1572                                 background = True
1573                         else:
1574                                 self.settings.pop("PORTAGE_BACKGROUND", None)
1575                 elif self.settings.get("PORTAGE_BACKGROUND") == "1":
1576                         background = True
1577
1578                 self.vartree.dbapi._bump_mtime(self.mycpv)
1579                 showMessage = self._display_merge
1580                 if self.vartree.dbapi._categories is not None:
1581                         self.vartree.dbapi._categories = None
1582                 # When others_in_slot is supplied, the security check has already been
1583                 # done for this slot, so it shouldn't be repeated until the next
1584                 # replacement or unmerge operation.
1585                 if others_in_slot is None:
1586                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1587                         slot_matches = self.vartree.dbapi.match(
1588                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1589                         others_in_slot = []
1590                         for cur_cpv in slot_matches:
1591                                 if cur_cpv == self.mycpv:
1592                                         continue
1593                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1594                                         settings=self.settings, vartree=self.vartree,
1595                                         treetype="vartree", pipe=self._pipe))
1596
1597                         retval = self._security_check([self] + others_in_slot)
1598                         if retval:
1599                                 return retval
1600
1601                 contents = self.getcontents()
1602                 # Now, don't assume that the name of the ebuild is the same as the
1603                 # name of the dir; the package may have been moved.
1604                 myebuildpath = None
1605                 failures = 0
1606                 ebuild_phase = "prerm"
1607                 log_path = None
1608                 mystuff = os.listdir(self.dbdir)
1609                 for x in mystuff:
1610                         if x.endswith(".ebuild"):
1611                                 myebuildpath = os.path.join(self.dbdir, self.pkg + ".ebuild")
1612                                 if x[:-7] != self.pkg:
1613                                         # Clean up after vardbapi.move_ent() breakage in
1614                                         # portage versions before 2.1.2
1615                                         os.rename(os.path.join(self.dbdir, x), myebuildpath)
1616                                         write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n")
1617                                 break
1618
1619                 self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi)
1620                 if myebuildpath:
1621                         try:
1622                                 doebuild_environment(myebuildpath, "prerm",
1623                                         settings=self.settings, db=self.vartree.dbapi)
1624                         except UnsupportedAPIException as e:
1625                                 failures += 1
1626                                 # Sometimes this happens due to corruption of the EAPI file.
1627                                 showMessage(_("!!! FAILED prerm: %s\n") % \
1628                                         os.path.join(self.dbdir, "EAPI"),
1629                                         level=logging.ERROR, noiselevel=-1)
1630                                 showMessage(_unicode_decode("%s\n") % (e,),
1631                                         level=logging.ERROR, noiselevel=-1)
1632                                 myebuildpath = None
1633
1634                 self._prune_plib_registry(unmerge=True, needed=needed,
1635                         preserve_paths=preserve_paths)
1636
1637                 builddir_lock = None
1638                 log_path = None
1639                 scheduler = self._scheduler
1640                 retval = os.EX_OK
1641                 try:
1642                         if myebuildpath:
1643                                 # Only create builddir_lock if doebuild_environment
1644                                 # succeeded, since that's needed to initialize
1645                                 # PORTAGE_BUILDDIR.
1646                                 builddir_lock = EbuildBuildDir(
1647                                         scheduler=scheduler,
1648                                         settings=self.settings)
1649                                 builddir_lock.lock()
1650                                 prepare_build_dirs(settings=self.settings, cleanup=True)
1651                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1652
1653                                 phase = EbuildPhase(background=background,
1654                                         phase=ebuild_phase, scheduler=scheduler,
1655                                         settings=self.settings)
1656                                 phase.start()
1657                                 retval = phase.wait()
1658
1659                                 # XXX: Decide how to handle failures here.
1660                                 if retval != os.EX_OK:
1661                                         failures += 1
1662                                         showMessage(_("!!! FAILED prerm: %s\n") % retval,
1663                                                 level=logging.ERROR, noiselevel=-1)
1664
1665                         conf_mem_file = os.path.join(self._eroot, CONFIG_MEMORY_FILE)
1666                         conf_mem_lock = lockfile(conf_mem_file)
1667                         try:
1668                                 self._unmerge_pkgfiles(pkgfiles, others_in_slot, conf_mem_file)
1669                         finally:
1670                                 unlockfile(conf_mem_lock)
1671                         self._clear_contents_cache()
1672
1673                         if myebuildpath:
1674                                 ebuild_phase = "postrm"
1675                                 phase = EbuildPhase(background=background,
1676                                         phase=ebuild_phase, scheduler=scheduler,
1677                                         settings=self.settings)
1678                                 phase.start()
1679                                 retval = phase.wait()
1680
1681                                 # XXX: Decide how to handle failures here.
1682                                 if retval != os.EX_OK:
1683                                         failures += 1
1684                                         showMessage(_("!!! FAILED postrm: %s\n") % retval,
1685                                                 level=logging.ERROR, noiselevel=-1)
1686
1687                 finally:
1688                         self.vartree.dbapi._bump_mtime(self.mycpv)
1689                         if builddir_lock:
1690                                 try:
1691                                         if myebuildpath:
1692                                                 if retval != os.EX_OK:
1693                                                         msg_lines = []
1694                                                         msg = _("The '%(ebuild_phase)s' "
1695                                                         "phase of the '%(cpv)s' package "
1696                                                         "has failed with exit value %(retval)s.") % \
1697                                                         {"ebuild_phase":ebuild_phase, "cpv":self.mycpv,
1698                                                         "retval":retval}
1699                                                         from textwrap import wrap
1700                                                         msg_lines.extend(wrap(msg, 72))
1701                                                         msg_lines.append("")
1702
1703                                                         ebuild_name = os.path.basename(myebuildpath)
1704                                                         ebuild_dir = os.path.dirname(myebuildpath)
1705                                                         msg = _("The problem occurred while executing "
1706                                                         "the ebuild file named '%(ebuild_name)s' "
1707                                                         "located in the '%(ebuild_dir)s' directory. "
1708                                                         "If necessary, manually remove "
1709                                                         "the environment.bz2 file and/or the "
1710                                                         "ebuild file located in that directory.") % \
1711                                                         {"ebuild_name":ebuild_name, "ebuild_dir":ebuild_dir}
1712                                                         msg_lines.extend(wrap(msg, 72))
1713                                                         msg_lines.append("")
1714
1715                                                         msg = _("Removal "
1716                                                         "of the environment.bz2 file is "
1717                                                         "preferred since it may allow the "
1718                                                         "removal phases to execute successfully. "
1719                                                         "The ebuild will be "
1720                                                         "sourced and the eclasses "
1721                                                         "from the current portage tree will be used "
1722                                                         "when necessary. Removal of "
1723                                                         "the ebuild file will cause the "
1724                                                         "pkg_prerm() and pkg_postrm() removal "
1725                                                         "phases to be skipped entirely.")
1726                                                         msg_lines.extend(wrap(msg, 72))
1727
1728                                                         self._eerror(ebuild_phase, msg_lines)
1729
1730                                         self._elog_process(phasefilter=("prerm", "postrm"))
1731
1732                                         if retval == os.EX_OK and builddir_lock is not None:
1733                                                 # myebuildpath might be None, so ensure
1734                                                 # it has a sane value for the clean phase,
1735                                                 # even though it won't really be sourced.
1736                                                 myebuildpath = os.path.join(self.dbdir,
1737                                                         self.pkg + ".ebuild")
1738                                                 doebuild_environment(myebuildpath, "cleanrm",
1739                                                         settings=self.settings, db=self.vartree.dbapi)
1740                                                 phase = EbuildPhase(background=background,
1741                                                         phase="cleanrm", scheduler=scheduler,
1742                                                         settings=self.settings)
1743                                                 phase.start()
1744                                                 retval = phase.wait()
1745                                 finally:
1746                                         if builddir_lock is not None:
1747                                                 builddir_lock.unlock()
1748
1749                 if log_path is not None:
1750
1751                         if not failures and 'unmerge-logs' not in self.settings.features:
1752                                 try:
1753                                         os.unlink(log_path)
1754                                 except OSError:
1755                                         pass
1756
1757                         try:
1758                                 st = os.stat(log_path)
1759                         except OSError:
1760                                 pass
1761                         else:
1762                                 if st.st_size == 0:
1763                                         try:
1764                                                 os.unlink(log_path)
1765                                         except OSError:
1766                                                 pass
1767
1768                 if log_path is not None and os.path.exists(log_path):
1769                         # Restore this since it gets lost somewhere above and it
1770                         # needs to be set for _display_merge() to be able to log.
1771                         # Note that the log isn't necessarily supposed to exist
1772                         # since if PORT_LOGDIR is unset then it's a temp file
1773                         # so it gets cleaned above.
1774                         self.settings["PORTAGE_LOG_FILE"] = log_path
1775                 else:
1776                         self.settings.pop("PORTAGE_LOG_FILE", None)
1777
1778                 # Lock the config memory file to prevent symlink creation
1779                 # in merge_contents from overlapping with env-update.
1780                 conf_mem_file = os.path.join(self._eroot, CONFIG_MEMORY_FILE)
1781                 conf_mem_lock = lockfile(conf_mem_file)
1782                 try:
1783                         env_update(target_root=self.settings['ROOT'],
1784                                 prev_mtimes=ldpath_mtimes,
1785                                 contents=contents, env=self.settings.environ(),
1786                                 writemsg_level=self._display_merge)
1787                 finally:
1788                         unlockfile(conf_mem_lock)
1789
1790                 return os.EX_OK
1791
1792         def _display_merge(self, msg, level=0, noiselevel=0):
1793                 if not self._verbose and noiselevel >= 0 and level < logging.WARN:
1794                         return
1795                 if self._scheduler is None:
1796                         writemsg_level(msg, level=level, noiselevel=noiselevel)
1797                 else:
1798                         log_path = None
1799                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
1800                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1801                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
1802
1803                         if background and log_path is None:
1804                                 if level >= logging.WARN:
1805                                         writemsg_level(msg, level=level, noiselevel=noiselevel)
1806                         else:
1807                                 self._scheduler.output(msg,
1808                                         log_path=log_path, background=background,
1809                                         level=level, noiselevel=noiselevel)
1810
1811         def _unmerge_pkgfiles(self, pkgfiles, others_in_slot, conf_mem_file):
1812                 """
1813                 
1814                 Unmerges the contents of a package from the liveFS
1815                 Removes the VDB entry for self
1816                 
1817                 @param pkgfiles: typically self.getcontents()
1818                 @type pkgfiles: Dictionary { filename: [ 'type', '?', 'md5sum' ] }
1819                 @param others_in_slot: all dblink instances in this slot, excluding self
1820                 @type others_in_slot: list
1821                 @rtype: None
1822                 """
1823
1824                 os = _os_merge
1825                 perf_md5 = perform_md5
1826                 showMessage = self._display_merge
1827
1828                 if not pkgfiles:
1829                         showMessage(_("No package files given... Grabbing a set.\n"))
1830                         pkgfiles = self.getcontents()
1831
1832                 if others_in_slot is None:
1833                         others_in_slot = []
1834                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1835                         slot_matches = self.vartree.dbapi.match(
1836                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1837                         for cur_cpv in slot_matches:
1838                                 if cur_cpv == self.mycpv:
1839                                         continue
1840                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1841                                         settings=self.settings,
1842                                         vartree=self.vartree, treetype="vartree", pipe=self._pipe))
1843
1844                 dest_root = self._eroot
1845                 dest_root_len = len(dest_root) - 1
1846
1847                 cfgfiledict = grabdict(conf_mem_file)
1848                 stale_confmem = []
1849
1850                 unmerge_orphans = "unmerge-orphans" in self.settings.features
1851                 calc_prelink = "prelink-checksums" in self.settings.features
1852
1853                 if pkgfiles:
1854                         self.updateprotect()
1855                         mykeys = list(pkgfiles)
1856                         mykeys.sort()
1857                         mykeys.reverse()
1858
1859                         #process symlinks second-to-last, directories last.
1860                         mydirs = set()
1861                         ignored_unlink_errnos = (
1862                                 errno.EBUSY, errno.ENOENT,
1863                                 errno.ENOTDIR, errno.EISDIR)
1864                         ignored_rmdir_errnos = (
1865                                 errno.EEXIST, errno.ENOTEMPTY,
1866                                 errno.EBUSY, errno.ENOENT,
1867                                 errno.ENOTDIR, errno.EISDIR,
1868                                 errno.EPERM)
1869                         modprotect = os.path.join(self._eroot, "lib/modules/")
1870
1871                         def unlink(file_name, lstatobj):
1872                                 if bsd_chflags:
1873                                         if lstatobj.st_flags != 0:
1874                                                 bsd_chflags.lchflags(file_name, 0)
1875                                         parent_name = os.path.dirname(file_name)
1876                                         # Use normal stat/chflags for the parent since we want to
1877                                         # follow any symlinks to the real parent directory.
1878                                         pflags = os.stat(parent_name).st_flags
1879                                         if pflags != 0:
1880                                                 bsd_chflags.chflags(parent_name, 0)
1881                                 try:
1882                                         if not stat.S_ISLNK(lstatobj.st_mode):
1883                                                 # Remove permissions to ensure that any hardlinks to
1884                                                 # suid/sgid files are rendered harmless.
1885                                                 os.chmod(file_name, 0)
1886                                         os.unlink(file_name)
1887                                 except OSError as ose:
1888                                         # If the chmod or unlink fails, you are in trouble.
1889                                         # With Prefix this can be because the file is owned
1890                                         # by someone else (a screwup by root?), on a normal
1891                                         # system maybe filesystem corruption.  In any case,
1892                                         # if we backtrace and die here, we leave the system
1893                                         # in a totally undefined state, hence we just bleed
1894                                         # like hell and continue to hopefully finish all our
1895                                         # administrative and pkg_postinst stuff.
1896                                         self._eerror("postrm", 
1897                                                 ["Could not chmod or unlink '%s': %s" % \
1898                                                 (file_name, ose)])
1899                                 finally:
1900                                         if bsd_chflags and pflags != 0:
1901                                                 # Restore the parent flags we saved before unlinking
1902                                                 bsd_chflags.chflags(parent_name, pflags)
1903
1904                         def show_unmerge(zing, desc, file_type, file_name):
1905                                         showMessage("%s %s %s %s\n" % \
1906                                                 (zing, desc.ljust(8), file_type, file_name))
1907
1908                         unmerge_desc = {}
1909                         unmerge_desc["cfgpro"] = _("cfgpro")
1910                         unmerge_desc["replaced"] = _("replaced")
1911                         unmerge_desc["!dir"] = _("!dir")
1912                         unmerge_desc["!empty"] = _("!empty")
1913                         unmerge_desc["!fif"] = _("!fif")
1914                         unmerge_desc["!found"] = _("!found")
1915                         unmerge_desc["!md5"] = _("!md5")
1916                         unmerge_desc["!mtime"] = _("!mtime")
1917                         unmerge_desc["!obj"] = _("!obj")
1918                         unmerge_desc["!sym"] = _("!sym")
1919
1920                         real_root = self.settings['ROOT']
1921                         real_root_len = len(real_root) - 1
1922                         eroot_split_len = len(self.settings["EROOT"].split(os.sep)) - 1
1923
1924                         for i, objkey in enumerate(mykeys):
1925
1926                                 obj = normalize_path(objkey)
1927                                 if os is _os_merge:
1928                                         try:
1929                                                 _unicode_encode(obj,
1930                                                         encoding=_encodings['merge'], errors='strict')
1931                                         except UnicodeEncodeError:
1932                                                 # The package appears to have been merged with a 
1933                                                 # different value of sys.getfilesystemencoding(),
1934                                                 # so fall back to utf_8 if appropriate.
1935                                                 try:
1936                                                         _unicode_encode(obj,
1937                                                                 encoding=_encodings['fs'], errors='strict')
1938                                                 except UnicodeEncodeError:
1939                                                         pass
1940                                                 else:
1941                                                         os = portage.os
1942                                                         perf_md5 = portage.checksum.perform_md5
1943
1944                                 file_data = pkgfiles[objkey]
1945                                 file_type = file_data[0]
1946                                 statobj = None
1947                                 try:
1948                                         statobj = os.stat(obj)
1949                                 except OSError:
1950                                         pass
1951                                 lstatobj = None
1952                                 try:
1953                                         lstatobj = os.lstat(obj)
1954                                 except (OSError, AttributeError):
1955                                         pass
1956                                 islink = lstatobj is not None and stat.S_ISLNK(lstatobj.st_mode)
1957                                 if lstatobj is None:
1958                                                 show_unmerge("---", unmerge_desc["!found"], file_type, obj)
1959                                                 continue
1960                                 # don't use EROOT, CONTENTS entries already contain EPREFIX
1961                                 if obj.startswith(real_root):
1962                                         relative_path = obj[real_root_len:]
1963                                         is_owned = False
1964                                         for dblnk in others_in_slot:
1965                                                 if dblnk.isowner(relative_path):
1966                                                         is_owned = True
1967                                                         break
1968                                         if is_owned:
1969                                                 # A new instance of this package claims the file, so
1970                                                 # don't unmerge it.
1971                                                 show_unmerge("---", unmerge_desc["replaced"], file_type, obj)
1972                                                 continue
1973                                         elif relative_path in cfgfiledict:
1974                                                 stale_confmem.append(relative_path)
1975                                 # next line includes a tweak to protect modules from being unmerged,
1976                                 # but we don't protect modules from being overwritten if they are
1977                                 # upgraded. We effectively only want one half of the config protection
1978                                 # functionality for /lib/modules. For portage-ng both capabilities
1979                                 # should be able to be independently specified.
1980                                 # TODO: For rebuilds, re-parent previous modules to the new
1981                                 # installed instance (so they are not orphans). For normal
1982                                 # uninstall (not rebuild/reinstall), remove the modules along
1983                                 # with all other files (leave no orphans).
1984                                 if obj.startswith(modprotect):
1985                                         show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
1986                                         continue
1987
1988                                 # Don't unlink symlinks to directories here since that can
1989                                 # remove /lib and /usr/lib symlinks.
1990                                 if unmerge_orphans and \
1991                                         lstatobj and not stat.S_ISDIR(lstatobj.st_mode) and \
1992                                         not (islink and statobj and stat.S_ISDIR(statobj.st_mode)) and \
1993                                         not self.isprotected(obj):
1994                                         try:
1995                                                 unlink(obj, lstatobj)
1996                                         except EnvironmentError as e:
1997                                                 if e.errno not in ignored_unlink_errnos:
1998                                                         raise
1999                                                 del e
2000                                         show_unmerge("<<<", "", file_type, obj)
2001                                         continue
2002
2003                                 lmtime = str(lstatobj[stat.ST_MTIME])
2004                                 if (pkgfiles[objkey][0] not in ("dir", "fif", "dev")) and (lmtime != pkgfiles[objkey][1]):
2005                                         show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
2006                                         continue
2007
2008                                 if pkgfiles[objkey][0] == "dir":
2009                                         if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
2010                                                 show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
2011                                                 continue
2012                                         mydirs.add(obj)
2013                                 elif pkgfiles[objkey][0] == "sym":
2014                                         if not islink:
2015                                                 show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
2016                                                 continue
2017                                         # Go ahead and unlink symlinks to directories here when
2018                                         # they're actually recorded as symlinks in the contents.
2019                                         # Normally, symlinks such as /lib -> lib64 are not recorded
2020                                         # as symlinks in the contents of a package.  If a package
2021                                         # installs something into ${D}/lib/, it is recorded in the
2022                                         # contents as a directory even if it happens to correspond
2023                                         # to a symlink when it's merged to the live filesystem.
2024                                         try:
2025                                                 unlink(obj, lstatobj)
2026                                                 show_unmerge("<<<", "", file_type, obj)
2027                                         except (OSError, IOError) as e:
2028                                                 if e.errno not in ignored_unlink_errnos:
2029                                                         raise
2030                                                 del e
2031                                                 show_unmerge("!!!", "", file_type, obj)
2032                                 elif pkgfiles[objkey][0] == "obj":
2033                                         if statobj is None or not stat.S_ISREG(statobj.st_mode):
2034                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2035                                                 continue
2036                                         mymd5 = None
2037                                         try:
2038                                                 mymd5 = perf_md5(obj, calc_prelink=calc_prelink)
2039                                         except FileNotFound as e:
2040                                                 # the file has disappeared between now and our stat call
2041                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
2042                                                 continue
2043
2044                                         # string.lower is needed because db entries used to be in upper-case.  The
2045                                         # string.lower allows for backwards compatibility.
2046                                         if mymd5 != pkgfiles[objkey][2].lower():
2047                                                 show_unmerge("---", unmerge_desc["!md5"], file_type, obj)
2048                                                 continue
2049                                         try:
2050                                                 unlink(obj, lstatobj)
2051                                         except (OSError, IOError) as e:
2052                                                 if e.errno not in ignored_unlink_errnos:
2053                                                         raise
2054                                                 del e
2055                                         show_unmerge("<<<", "", file_type, obj)
2056                                 elif pkgfiles[objkey][0] == "fif":
2057                                         if not stat.S_ISFIFO(lstatobj[stat.ST_MODE]):
2058                                                 show_unmerge("---", unmerge_desc["!fif"], file_type, obj)
2059                                                 continue
2060                                         show_unmerge("---", "", file_type, obj)
2061                                 elif pkgfiles[objkey][0] == "dev":
2062                                         show_unmerge("---", "", file_type, obj)
2063
2064                         mydirs = sorted(mydirs)
2065                         mydirs.reverse()
2066
2067                         for obj in mydirs:
2068                                 try:
2069                                         if bsd_chflags:
2070                                                 lstatobj = os.lstat(obj)
2071                                                 if lstatobj.st_flags != 0:
2072                                                         bsd_chflags.lchflags(obj, 0)
2073                                                 parent_name = os.path.dirname(obj)
2074                                                 # Use normal stat/chflags for the parent since we want to
2075                                                 # follow any symlinks to the real parent directory.
2076                                                 pflags = os.stat(parent_name).st_flags
2077                                                 if pflags != 0:
2078                                                         bsd_chflags.chflags(parent_name, 0)
2079                                         try:
2080                                                 os.rmdir(obj)
2081                                         finally:
2082                                                 if bsd_chflags and pflags != 0:
2083                                                         # Restore the parent flags we saved before unlinking
2084                                                         bsd_chflags.chflags(parent_name, pflags)
2085                                         show_unmerge("<<<", "", "dir", obj)
2086                                 except EnvironmentError as e:
2087                                         if e.errno not in ignored_rmdir_errnos:
2088                                                 raise
2089                                         if e.errno != errno.ENOENT:
2090                                                 show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
2091                                         del e
2092
2093                 # Remove stale entries from config memory.
2094                 if stale_confmem:
2095                         for filename in stale_confmem:
2096                                 del cfgfiledict[filename]
2097                         writedict(cfgfiledict, conf_mem_file)
2098
2099                 #remove self from vartree database so that our own virtual gets zapped if we're the last node
2100                 self.vartree.zap(self.mycpv)
2101
2102         def isowner(self, filename, destroot=None):
2103                 """ 
2104                 Check if a file belongs to this package. This may
2105                 result in a stat call for the parent directory of
2106                 every installed file, since the inode numbers are
2107                 used to work around the problem of ambiguous paths
2108                 caused by symlinked directories. The results of
2109                 stat calls are cached to optimize multiple calls
2110                 to this method.
2111
2112                 @param filename:
2113                 @type filename:
2114                 @param destroot:
2115                 @type destroot:
2116                 @rtype: Boolean
2117                 @returns:
2118                 1. True if this package owns the file.
2119                 2. False if this package does not own the file.
2120                 """
2121
2122                 if destroot is not None and destroot != self._eroot:
2123                         warnings.warn("The second parameter of the " + \
2124                                 "portage.dbapi.vartree.dblink.isowner()" + \
2125                                 " is now unused. Instead " + \
2126                                 "self.settings['EROOT'] will be used.",
2127                                 DeprecationWarning, stacklevel=2)
2128
2129                 return bool(self._match_contents(filename))
2130
2131         def _match_contents(self, filename, destroot=None):
2132                 """
2133                 The matching contents entry is returned, which is useful
2134                 since the path may differ from the one given by the caller,
2135                 due to symlinks.
2136
2137                 @rtype: String
2138                 @return: the contents entry corresponding to the given path, or False
2139                         if the file is not owned by this package.
2140                 """
2141
2142                 filename = _unicode_decode(filename,
2143                         encoding=_encodings['content'], errors='strict')
2144
2145                 if destroot is not None and destroot != self._eroot:
2146                         warnings.warn("The second parameter of the " + \
2147                                 "portage.dbapi.vartree.dblink._match_contents()" + \
2148                                 " is now unused. Instead " + \
2149                                 "self.settings['ROOT'] will be used.",
2150                                 DeprecationWarning, stacklevel=2)
2151
2152                 # don't use EROOT here, image already contains EPREFIX
2153                 destroot = self.settings['ROOT']
2154
2155                 # The given filename argument might have a different encoding than the
2156                 # the filenames contained in the contents, so use separate wrapped os
2157                 # modules for each. The basename is more likely to contain non-ascii
2158                 # characters than the directory path, so use os_filename_arg for all
2159                 # operations involving the basename of the filename arg.
2160                 os_filename_arg = _os_merge
2161                 os = _os_merge
2162
2163                 try:
2164                         _unicode_encode(filename,
2165                                 encoding=_encodings['merge'], errors='strict')
2166                 except UnicodeEncodeError:
2167                         # The package appears to have been merged with a
2168                         # different value of sys.getfilesystemencoding(),
2169                         # so fall back to utf_8 if appropriate.
2170                         try:
2171                                 _unicode_encode(filename,
2172                                         encoding=_encodings['fs'], errors='strict')
2173                         except UnicodeEncodeError:
2174                                 pass
2175                         else:
2176                                 os_filename_arg = portage.os
2177
2178                 destfile = normalize_path(
2179                         os_filename_arg.path.join(destroot,
2180                         filename.lstrip(os_filename_arg.path.sep)))
2181
2182                 pkgfiles = self.getcontents()
2183                 if pkgfiles and destfile in pkgfiles:
2184                         return destfile
2185                 if pkgfiles:
2186                         basename = os_filename_arg.path.basename(destfile)
2187                         if self._contents_basenames is None:
2188
2189                                 try:
2190                                         for x in pkgfiles:
2191                                                 _unicode_encode(x,
2192                                                         encoding=_encodings['merge'],
2193                                                         errors='strict')
2194                                 except UnicodeEncodeError:
2195                                         # The package appears to have been merged with a
2196                                         # different value of sys.getfilesystemencoding(),
2197                                         # so fall back to utf_8 if appropriate.
2198                                         try:
2199                                                 for x in pkgfiles:
2200                                                         _unicode_encode(x,
2201                                                                 encoding=_encodings['fs'],
2202                                                                 errors='strict')
2203                                         except UnicodeEncodeError:
2204                                                 pass
2205                                         else:
2206                                                 os = portage.os
2207
2208                                 self._contents_basenames = set(
2209                                         os.path.basename(x) for x in pkgfiles)
2210                         if basename not in self._contents_basenames:
2211                                 # This is a shortcut that, in most cases, allows us to
2212                                 # eliminate this package as an owner without the need
2213                                 # to examine inode numbers of parent directories.
2214                                 return False
2215
2216                         # Use stat rather than lstat since we want to follow
2217                         # any symlinks to the real parent directory.
2218                         parent_path = os_filename_arg.path.dirname(destfile)
2219                         try:
2220                                 parent_stat = os_filename_arg.stat(parent_path)
2221                         except EnvironmentError as e:
2222                                 if e.errno != errno.ENOENT:
2223                                         raise
2224                                 del e
2225                                 return False
2226                         if self._contents_inodes is None:
2227
2228                                 if os is _os_merge:
2229                                         try:
2230                                                 for x in pkgfiles:
2231                                                         _unicode_encode(x,
2232                                                                 encoding=_encodings['merge'],
2233                                                                 errors='strict')
2234                                         except UnicodeEncodeError:
2235                                                 # The package appears to have been merged with a 
2236                                                 # different value of sys.getfilesystemencoding(),
2237                                                 # so fall back to utf_8 if appropriate.
2238                                                 try:
2239                                                         for x in pkgfiles:
2240                                                                 _unicode_encode(x,
2241                                                                         encoding=_encodings['fs'],
2242                                                                         errors='strict')
2243                                                 except UnicodeEncodeError:
2244                                                         pass
2245                                                 else:
2246                                                         os = portage.os
2247
2248                                 self._contents_inodes = {}
2249                                 parent_paths = set()
2250                                 for x in pkgfiles:
2251                                         p_path = os.path.dirname(x)
2252                                         if p_path in parent_paths:
2253                                                 continue
2254                                         parent_paths.add(p_path)
2255                                         try:
2256                                                 s = os.stat(p_path)
2257                                         except OSError:
2258                                                 pass
2259                                         else:
2260                                                 inode_key = (s.st_dev, s.st_ino)
2261                                                 # Use lists of paths in case multiple
2262                                                 # paths reference the same inode.
2263                                                 p_path_list = self._contents_inodes.get(inode_key)
2264                                                 if p_path_list is None:
2265                                                         p_path_list = []
2266                                                         self._contents_inodes[inode_key] = p_path_list
2267                                                 if p_path not in p_path_list:
2268                                                         p_path_list.append(p_path)
2269
2270                         p_path_list = self._contents_inodes.get(
2271                                 (parent_stat.st_dev, parent_stat.st_ino))
2272                         if p_path_list:
2273                                 for p_path in p_path_list:
2274                                         x = os_filename_arg.path.join(p_path, basename)
2275                                         if x in pkgfiles:
2276                                                 return x
2277
2278                 return False
2279
2280         def _linkmap_rebuild(self, **kwargs):
2281                 """
2282                 Rebuild the self._linkmap if it's not broken due to missing
2283                 scanelf binary. Also, return early if preserve-libs is disabled
2284                 and the preserve-libs registry is empty.
2285                 """
2286                 if self._linkmap_broken or \
2287                         self.vartree.dbapi._linkmap is None or \
2288                         self.vartree.dbapi._plib_registry is None or \
2289                         ("preserve-libs" not in self.settings.features and \
2290                         not self.vartree.dbapi._plib_registry.hasEntries()):
2291                         return
2292                 try:
2293                         self.vartree.dbapi._linkmap.rebuild(**kwargs)
2294                 except CommandNotFound as e:
2295                         self._linkmap_broken = True
2296                         self._display_merge(_("!!! Disabling preserve-libs " \
2297                                 "due to error: Command Not Found: %s\n") % (e,),
2298                                 level=logging.ERROR, noiselevel=-1)
2299
2300         def _find_libs_to_preserve(self, unmerge=False):
2301                 """
2302                 Get set of relative paths for libraries to be preserved. When
2303                 unmerge is False, file paths to preserver are selected from
2304                 self._installed_instance. Otherwise, paths are selected from
2305                 self.
2306                 """
2307                 if self._linkmap_broken or \
2308                         self.vartree.dbapi._linkmap is None or \
2309                         self.vartree.dbapi._plib_registry is None or \
2310                         (not unmerge and self._installed_instance is None) or \
2311                         "preserve-libs" not in self.settings.features:
2312                         return None
2313
2314                 os = _os_merge
2315                 linkmap = self.vartree.dbapi._linkmap
2316                 if unmerge:
2317                         installed_instance = self
2318                 else:
2319                         installed_instance = self._installed_instance
2320                 old_contents = installed_instance.getcontents()
2321                 root = self.settings['ROOT']
2322                 root_len = len(root) - 1
2323                 lib_graph = digraph()
2324                 path_node_map = {}
2325
2326                 def path_to_node(path):
2327                         node = path_node_map.get(path)
2328                         if node is None:
2329                                 node = LinkageMap._LibGraphNode(path, root)
2330                                 alt_path_node = lib_graph.get(node)
2331                                 if alt_path_node is not None:
2332                                         node = alt_path_node
2333                                 node.alt_paths.add(path)
2334                                 path_node_map[path] = node
2335                         return node
2336
2337                 consumer_map = {}
2338                 provider_nodes = set()
2339                 # Create provider nodes and add them to the graph.
2340                 for f_abs in old_contents:
2341
2342                         if os is _os_merge:
2343                                 try:
2344                                         _unicode_encode(f_abs,
2345                                                 encoding=_encodings['merge'], errors='strict')
2346                                 except UnicodeEncodeError:
2347                                         # The package appears to have been merged with a 
2348                                         # different value of sys.getfilesystemencoding(),
2349                                         # so fall back to utf_8 if appropriate.
2350                                         try:
2351                                                 _unicode_encode(f_abs,
2352                                                         encoding=_encodings['fs'], errors='strict')
2353                                         except UnicodeEncodeError:
2354                                                 pass
2355                                         else:
2356                                                 os = portage.os
2357
2358                         f = f_abs[root_len:]
2359                         if not unmerge and self.isowner(f):
2360                                 # We have an indentically named replacement file,
2361                                 # so we don't try to preserve the old copy.
2362                                 continue
2363                         try:
2364                                 consumers = linkmap.findConsumers(f)
2365                         except KeyError:
2366                                 continue
2367                         if not consumers:
2368                                 continue
2369                         provider_node = path_to_node(f)
2370                         lib_graph.add(provider_node, None)
2371                         provider_nodes.add(provider_node)
2372                         consumer_map[provider_node] = consumers
2373
2374                 # Create consumer nodes and add them to the graph.
2375                 # Note that consumers can also be providers.
2376                 for provider_node, consumers in consumer_map.items():
2377                         for c in consumers:
2378                                 consumer_node = path_to_node(c)
2379                                 if installed_instance.isowner(c) and \
2380                                         consumer_node not in provider_nodes:
2381                                         # This is not a provider, so it will be uninstalled.
2382                                         continue
2383                                 lib_graph.add(provider_node, consumer_node)
2384
2385                 # Locate nodes which should be preserved. They consist of all
2386                 # providers that are reachable from consumers that are not
2387                 # providers themselves.
2388                 preserve_nodes = set()
2389                 for consumer_node in lib_graph.root_nodes():
2390                         if consumer_node in provider_nodes:
2391                                 continue
2392                         # Preserve all providers that are reachable from this consumer.
2393                         node_stack = lib_graph.child_nodes(consumer_node)
2394                         while node_stack:
2395                                 provider_node = node_stack.pop()
2396                                 if provider_node in preserve_nodes:
2397                                         continue
2398                                 preserve_nodes.add(provider_node)
2399                                 node_stack.extend(lib_graph.child_nodes(provider_node))
2400
2401                 preserve_paths = set()
2402                 for preserve_node in preserve_nodes:
2403                         # Make sure that at least one of the paths is not a symlink.
2404                         # This prevents symlinks from being erroneously preserved by
2405                         # themselves when the old instance installed symlinks that
2406                         # the new instance does not install.
2407                         have_lib = False
2408                         for f in preserve_node.alt_paths:
2409                                 f_abs = os.path.join(root, f.lstrip(os.sep))
2410                                 try:
2411                                         if stat.S_ISREG(os.lstat(f_abs).st_mode):
2412                                                 have_lib = True
2413                                                 break
2414                                 except OSError:
2415                                         continue
2416
2417                         if have_lib:
2418                                 preserve_paths.update(preserve_node.alt_paths)
2419
2420                 return preserve_paths
2421
2422         def _add_preserve_libs_to_contents(self, preserve_paths):
2423                 """
2424                 Preserve libs returned from _find_libs_to_preserve().
2425                 """
2426
2427                 if not preserve_paths:
2428                         return
2429
2430                 os = _os_merge
2431                 showMessage = self._display_merge
2432                 root = self.settings['ROOT']
2433
2434                 # Copy contents entries from the old package to the new one.
2435                 new_contents = self.getcontents().copy()
2436                 old_contents = self._installed_instance.getcontents()
2437                 for f in sorted(preserve_paths):
2438                         f = _unicode_decode(f,
2439                                 encoding=_encodings['content'], errors='strict')
2440                         f_abs = os.path.join(root, f.lstrip(os.sep))
2441                         contents_entry = old_contents.get(f_abs)
2442                         if contents_entry is None:
2443                                 # This will probably never happen, but it might if one of the
2444                                 # paths returned from findConsumers() refers to one of the libs
2445                                 # that should be preserved yet the path is not listed in the
2446                                 # contents. Such a path might belong to some other package, so
2447                                 # it shouldn't be preserved here.
2448                                 showMessage(_("!!! File '%s' will not be preserved "
2449                                         "due to missing contents entry\n") % (f_abs,),
2450                                         level=logging.ERROR, noiselevel=-1)
2451                                 preserve_paths.remove(f)
2452                                 continue
2453                         new_contents[f_abs] = contents_entry
2454                         obj_type = contents_entry[0]
2455                         showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
2456                                 noiselevel=-1)
2457                         # Add parent directories to contents if necessary.
2458                         parent_dir = os.path.dirname(f_abs)
2459                         while len(parent_dir) > len(root):
2460                                 new_contents[parent_dir] = ["dir"]
2461                                 prev = parent_dir
2462                                 parent_dir = os.path.dirname(parent_dir)
2463                                 if prev == parent_dir:
2464                                         break
2465                 outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
2466                 write_contents(new_contents, root, outfile)
2467                 outfile.close()
2468                 self._clear_contents_cache()
2469
2470         def _find_unused_preserved_libs(self):
2471                 """
2472                 Find preserved libraries that don't have any consumers left.
2473                 """
2474
2475                 if self._linkmap_broken or \
2476                         self.vartree.dbapi._linkmap is None or \
2477                         self.vartree.dbapi._plib_registry is None or \
2478                         not self.vartree.dbapi._plib_registry.hasEntries():
2479                         return {}
2480
2481                 # Since preserved libraries can be consumers of other preserved
2482                 # libraries, use a graph to track consumer relationships.
2483                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2484                 lib_graph = digraph()
2485                 preserved_nodes = set()
2486                 preserved_paths = set()
2487                 path_cpv_map = {}
2488                 path_node_map = {}
2489                 root = self.settings['ROOT']
2490
2491                 def path_to_node(path):
2492                         node = path_node_map.get(path)
2493                         if node is None:
2494                                 node = LinkageMap._LibGraphNode(path, root)
2495                                 alt_path_node = lib_graph.get(node)
2496                                 if alt_path_node is not None:
2497                                         node = alt_path_node
2498                                 node.alt_paths.add(path)
2499                                 path_node_map[path] = node
2500                         return node
2501
2502                 linkmap = self.vartree.dbapi._linkmap
2503                 for cpv, plibs in plib_dict.items():
2504                         for f in plibs:
2505                                 path_cpv_map[f] = cpv
2506                                 preserved_node = path_to_node(f)
2507                                 if not preserved_node.file_exists():
2508                                         continue
2509                                 lib_graph.add(preserved_node, None)
2510                                 preserved_paths.add(f)
2511                                 preserved_nodes.add(preserved_node)
2512                                 for c in self.vartree.dbapi._linkmap.findConsumers(f):
2513                                         consumer_node = path_to_node(c)
2514                                         if not consumer_node.file_exists():
2515                                                 continue
2516                                         # Note that consumers may also be providers.
2517                                         lib_graph.add(preserved_node, consumer_node)
2518
2519                 # Eliminate consumers having providers with the same soname as an
2520                 # installed library that is not preserved. This eliminates
2521                 # libraries that are erroneously preserved due to a move from one
2522                 # directory to another.
2523                 provider_cache = {}
2524                 for preserved_node in preserved_nodes:
2525                         soname = linkmap.getSoname(preserved_node)
2526                         for consumer_node in lib_graph.parent_nodes(preserved_node):
2527                                 if consumer_node in preserved_nodes:
2528                                         continue
2529                                 providers = provider_cache.get(consumer_node)
2530                                 if providers is None:
2531                                         providers = linkmap.findProviders(consumer_node)
2532                                         provider_cache[consumer_node] = providers
2533                                 providers = providers.get(soname)
2534                                 if providers is None:
2535                                         continue
2536                                 for provider in providers:
2537                                         if provider in preserved_paths:
2538                                                 continue
2539                                         provider_node = path_to_node(provider)
2540                                         if not provider_node.file_exists():
2541                                                 continue
2542                                         if provider_node in preserved_nodes:
2543                                                 continue
2544                                         # An alternative provider seems to be
2545                                         # installed, so drop this edge.
2546                                         lib_graph.remove_edge(preserved_node, consumer_node)
2547                                         break
2548
2549                 cpv_lib_map = {}
2550                 while not lib_graph.empty():
2551                         root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
2552                         if not root_nodes:
2553                                 break
2554                         lib_graph.difference_update(root_nodes)
2555                         unlink_list = set()
2556                         for node in root_nodes:
2557                                 unlink_list.update(node.alt_paths)
2558                         unlink_list = sorted(unlink_list)
2559                         for obj in unlink_list:
2560                                 cpv = path_cpv_map.get(obj)
2561                                 if cpv is None:
2562                                         # This means that a symlink is in the preserved libs
2563                                         # registry, but the actual lib it points to is not.
2564                                         self._display_merge(_("!!! symlink to lib is preserved, "
2565                                                 "but not the lib itself:\n!!! '%s'\n") % (obj,),
2566                                                 level=logging.ERROR, noiselevel=-1)
2567                                         continue
2568                                 removed = cpv_lib_map.get(cpv)
2569                                 if removed is None:
2570                                         removed = set()
2571                                         cpv_lib_map[cpv] = removed
2572                                 removed.add(obj)
2573
2574                 return cpv_lib_map
2575
2576         def _remove_preserved_libs(self, cpv_lib_map):
2577                 """
2578                 Remove files returned from _find_unused_preserved_libs().
2579                 """
2580
2581                 os = _os_merge
2582
2583                 files_to_remove = set()
2584                 for files in cpv_lib_map.values():
2585                         files_to_remove.update(files)
2586                 files_to_remove = sorted(files_to_remove)
2587                 showMessage = self._display_merge
2588                 root = self.settings['ROOT']
2589
2590                 parent_dirs = set()
2591                 for obj in files_to_remove:
2592                         obj = os.path.join(root, obj.lstrip(os.sep))
2593                         parent_dirs.add(os.path.dirname(obj))
2594                         if os.path.islink(obj):
2595                                 obj_type = _("sym")
2596                         else:
2597                                 obj_type = _("obj")
2598                         try:
2599                                 os.unlink(obj)
2600                         except OSError as e:
2601                                 if e.errno != errno.ENOENT:
2602                                         raise
2603                                 del e
2604                         else:
2605                                 showMessage(_("<<< !needed  %s %s\n") % (obj_type, obj),
2606                                         noiselevel=-1)
2607
2608                 # Remove empty parent directories if possible.
2609                 while parent_dirs:
2610                         x = parent_dirs.pop()
2611                         while True:
2612                                 try:
2613                                         os.rmdir(x)
2614                                 except OSError:
2615                                         break
2616                                 prev = x
2617                                 x = os.path.dirname(x)
2618                                 if x == prev:
2619                                         break
2620
2621                 self.vartree.dbapi._plib_registry.pruneNonExisting()
2622
2623         def _collision_protect(self, srcroot, destroot, mypkglist, mycontents):
2624
2625                         os = _os_merge
2626
2627                         collision_ignore = set([normalize_path(myignore) for myignore in \
2628                                 portage.util.shlex_split(
2629                                 self.settings.get("COLLISION_IGNORE", ""))])
2630
2631                         # For collisions with preserved libraries, the current package
2632                         # will assume ownership and the libraries will be unregistered.
2633                         if self.vartree.dbapi._plib_registry is None:
2634                                 # preserve-libs is entirely disabled
2635                                 plib_cpv_map = None
2636                                 plib_paths = None
2637                                 plib_inodes = {}
2638                         else:
2639                                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2640                                 plib_cpv_map = {}
2641                                 plib_paths = set()
2642                                 for cpv, paths in plib_dict.items():
2643                                         plib_paths.update(paths)
2644                                         for f in paths:
2645                                                 plib_cpv_map[f] = cpv
2646                                 plib_inodes = self._lstat_inode_map(plib_paths)
2647
2648                         plib_collisions = {}
2649
2650                         showMessage = self._display_merge
2651                         stopmerge = False
2652                         collisions = []
2653                         destroot = self.settings['ROOT']
2654                         showMessage(_(" %s checking %d files for package collisions\n") % \
2655                                 (colorize("GOOD", "*"), len(mycontents)))
2656                         for i, f in enumerate(mycontents):
2657                                 if i % 1000 == 0 and i != 0:
2658                                         showMessage(_("%d files checked ...\n") % i)
2659
2660                                 dest_path = normalize_path(
2661                                         os.path.join(destroot, f.lstrip(os.path.sep)))
2662                                 try:
2663                                         dest_lstat = os.lstat(dest_path)
2664                                 except EnvironmentError as e:
2665                                         if e.errno == errno.ENOENT:
2666                                                 del e
2667                                                 continue
2668                                         elif e.errno == errno.ENOTDIR:
2669                                                 del e
2670                                                 # A non-directory is in a location where this package
2671                                                 # expects to have a directory.
2672                                                 dest_lstat = None
2673                                                 parent_path = dest_path
2674                                                 while len(parent_path) > len(destroot):
2675                                                         parent_path = os.path.dirname(parent_path)
2676                                                         try:
2677                                                                 dest_lstat = os.lstat(parent_path)
2678                                                                 break
2679                                                         except EnvironmentError as e:
2680                                                                 if e.errno != errno.ENOTDIR:
2681                                                                         raise
2682                                                                 del e
2683                                                 if not dest_lstat:
2684                                                         raise AssertionError(
2685                                                                 "unable to find non-directory " + \
2686                                                                 "parent for '%s'" % dest_path)
2687                                                 dest_path = parent_path
2688                                                 f = os.path.sep + dest_path[len(destroot):]
2689                                                 if f in collisions:
2690                                                         continue
2691                                         else:
2692                                                 raise
2693                                 if f[0] != "/":
2694                                         f="/"+f
2695
2696                                 plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
2697                                 if plibs:
2698                                         for path in plibs:
2699                                                 cpv = plib_cpv_map[path]
2700                                                 paths = plib_collisions.get(cpv)
2701                                                 if paths is None:
2702                                                         paths = set()
2703                                                         plib_collisions[cpv] = paths
2704                                                 paths.add(path)
2705                                         # The current package will assume ownership and the
2706                                         # libraries will be unregistered, so exclude this
2707                                         # path from the normal collisions.
2708                                         continue
2709
2710                                 isowned = False
2711                                 full_path = os.path.join(destroot, f.lstrip(os.path.sep))
2712                                 for ver in mypkglist:
2713                                         if ver.isowner(f):
2714                                                 isowned = True
2715                                                 break
2716                                 if not isowned and self.isprotected(full_path):
2717                                         isowned = True
2718                                 if not isowned:
2719                                         stopmerge = True
2720                                         if collision_ignore:
2721                                                 if f in collision_ignore:
2722                                                         stopmerge = False
2723                                                 else:
2724                                                         for myignore in collision_ignore:
2725                                                                 if f.startswith(myignore + os.path.sep):
2726                                                                         stopmerge = False
2727                                                                         break
2728                                         if stopmerge:
2729                                                 collisions.append(f)
2730                         return collisions, plib_collisions
2731
2732         def _lstat_inode_map(self, path_iter):
2733                 """
2734                 Use lstat to create a map of the form:
2735                   {(st_dev, st_ino) : set([path1, path2, ...])}
2736                 Multiple paths may reference the same inode due to hardlinks.
2737                 All lstat() calls are relative to self.myroot.
2738                 """
2739
2740                 os = _os_merge
2741
2742                 root = self.settings['ROOT']
2743                 inode_map = {}
2744                 for f in path_iter:
2745                         path = os.path.join(root, f.lstrip(os.sep))
2746                         try:
2747                                 st = os.lstat(path)
2748                         except OSError as e:
2749                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
2750                                         raise
2751                                 del e
2752                                 continue
2753                         key = (st.st_dev, st.st_ino)
2754                         paths = inode_map.get(key)
2755                         if paths is None:
2756                                 paths = set()
2757                                 inode_map[key] = paths
2758                         paths.add(f)
2759                 return inode_map
2760
2761         def _security_check(self, installed_instances):
2762                 if not installed_instances:
2763                         return 0
2764
2765                 os = _os_merge
2766
2767                 showMessage = self._display_merge
2768
2769                 file_paths = set()
2770                 for dblnk in installed_instances:
2771                         file_paths.update(dblnk.getcontents())
2772                 inode_map = {}
2773                 real_paths = set()
2774                 for i, path in enumerate(file_paths):
2775
2776                         if os is _os_merge:
2777                                 try:
2778                                         _unicode_encode(path,
2779                                                 encoding=_encodings['merge'], errors='strict')
2780                                 except UnicodeEncodeError:
2781                                         # The package appears to have been merged with a 
2782                                         # different value of sys.getfilesystemencoding(),
2783                                         # so fall back to utf_8 if appropriate.
2784                                         try:
2785                                                 _unicode_encode(path,
2786                                                         encoding=_encodings['fs'], errors='strict')
2787                                         except UnicodeEncodeError:
2788                                                 pass
2789                                         else:
2790                                                 os = portage.os
2791
2792                         try:
2793                                 s = os.lstat(path)
2794                         except OSError as e:
2795                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
2796                                         raise
2797                                 del e
2798                                 continue
2799                         if not stat.S_ISREG(s.st_mode):
2800                                 continue
2801                         path = os.path.realpath(path)
2802                         if path in real_paths:
2803                                 continue
2804                         real_paths.add(path)
2805                         if s.st_nlink > 1 and \
2806                                 s.st_mode & (stat.S_ISUID | stat.S_ISGID):
2807                                 k = (s.st_dev, s.st_ino)
2808                                 inode_map.setdefault(k, []).append((path, s))
2809                 suspicious_hardlinks = []
2810                 for path_list in inode_map.values():
2811                         path, s = path_list[0]
2812                         if len(path_list) == s.st_nlink:
2813                                 # All hardlinks seem to be owned by this package.
2814                                 continue
2815                         suspicious_hardlinks.append(path_list)
2816                 if not suspicious_hardlinks:
2817                         return 0
2818
2819                 msg = []
2820                 msg.append(_("suid/sgid file(s) "
2821                         "with suspicious hardlink(s):"))
2822                 msg.append("")
2823                 for path_list in suspicious_hardlinks:
2824                         for path, s in path_list:
2825                                 msg.append("\t%s" % path)
2826                 msg.append("")
2827                 msg.append(_("See the Gentoo Security Handbook " 
2828                         "guide for advice on how to proceed."))
2829
2830                 self._eerror("preinst", msg)
2831
2832                 return 1
2833
2834         def _eqawarn(self, phase, lines):
2835                 self._elog("eqawarn", phase, lines)
2836
2837         def _eerror(self, phase, lines):
2838                 self._elog("eerror", phase, lines)
2839
2840         def _elog(self, funcname, phase, lines):
2841                 func = getattr(portage.elog.messages, funcname)
2842                 if self._scheduler is None:
2843                         for l in lines:
2844                                 func(l, phase=phase, key=self.mycpv)
2845                 else:
2846                         background = self.settings.get("PORTAGE_BACKGROUND") == "1"
2847                         log_path = None
2848                         if self.settings.get("PORTAGE_BACKGROUND") != "subprocess":
2849                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
2850                         out = portage.StringIO()
2851                         for line in lines:
2852                                 func(line, phase=phase, key=self.mycpv, out=out)
2853                         msg = out.getvalue()
2854                         self._scheduler.output(msg,
2855                                 background=background, log_path=log_path)
2856
2857         def _elog_process(self, phasefilter=None):
2858                 cpv = self.mycpv
2859                 if self._pipe is None:
2860                         elog_process(cpv, self.settings, phasefilter=phasefilter)
2861                 else:
2862                         logdir = os.path.join(self.settings["T"], "logging")
2863                         ebuild_logentries = collect_ebuild_messages(logdir)
2864                         py_logentries = collect_messages(key=cpv).get(cpv, {})
2865                         logentries = _merge_logentries(py_logentries, ebuild_logentries)
2866                         funcnames = {
2867                                 "INFO": "einfo",
2868                                 "LOG": "elog",
2869                                 "WARN": "ewarn",
2870                                 "QA": "eqawarn",
2871                                 "ERROR": "eerror"
2872                         }
2873                         str_buffer = []
2874                         for phase, messages in logentries.items():
2875                                 for key, lines in messages:
2876                                         funcname = funcnames[key]
2877                                         if isinstance(lines, basestring):
2878                                                 lines = [lines]
2879                                         for line in lines:
2880                                                 fields = (funcname, phase, cpv, line.rstrip('\n'))
2881                                                 str_buffer.append(' '.join(fields))
2882                                                 str_buffer.append('\n')
2883                         if str_buffer:
2884                                 os.write(self._pipe, _unicode_encode(''.join(str_buffer)))
2885
2886         def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0,
2887                 mydbapi=None, prev_mtimes=None, counter=None):
2888                 """
2889                 
2890                 This function does the following:
2891                 
2892                 calls self._preserve_libs if FEATURES=preserve-libs
2893                 calls self._collision_protect if FEATURES=collision-protect
2894                 calls doebuild(mydo=pkg_preinst)
2895                 Merges the package to the livefs
2896                 unmerges old version (if required)
2897                 calls doebuild(mydo=pkg_postinst)
2898                 calls env_update
2899                 
2900                 @param srcroot: Typically this is ${D}
2901                 @type srcroot: String (Path)
2902                 @param destroot: ignored, self.settings['ROOT'] is used instead
2903                 @type destroot: String (Path)
2904                 @param inforoot: root of the vardb entry ?
2905                 @type inforoot: String (Path)
2906                 @param myebuild: path to the ebuild that we are processing
2907                 @type myebuild: String (Path)
2908                 @param mydbapi: dbapi which is handed to doebuild.
2909                 @type mydbapi: portdbapi instance
2910                 @param prev_mtimes: { Filename:mtime } mapping for env_update
2911                 @type prev_mtimes: Dictionary
2912                 @rtype: Boolean
2913                 @returns:
2914                 1. 0 on success
2915                 2. 1 on failure
2916                 
2917                 secondhand is a list of symlinks that have been skipped due to their target
2918                 not existing; we will merge these symlinks at a later time.
2919                 """
2920
2921                 os = _os_merge
2922
2923                 srcroot = _unicode_decode(srcroot,
2924                         encoding=_encodings['content'], errors='strict')
2925                 destroot = self.settings['ROOT']
2926                 inforoot = _unicode_decode(inforoot,
2927                         encoding=_encodings['content'], errors='strict')
2928                 myebuild = _unicode_decode(myebuild,
2929                         encoding=_encodings['content'], errors='strict')
2930
2931                 showMessage = self._display_merge
2932                 scheduler = self._scheduler
2933
2934                 srcroot = normalize_path(srcroot).rstrip(os.path.sep) + os.path.sep
2935
2936                 if not os.path.isdir(srcroot):
2937                         showMessage(_("!!! Directory Not Found: D='%s'\n") % srcroot,
2938                                 level=logging.ERROR, noiselevel=-1)
2939                         return 1
2940
2941                 slot = ''
2942                 for var_name in ('CHOST', 'SLOT'):
2943                         if var_name == 'CHOST' and self.cat == 'virtual':
2944                                 try:
2945                                         os.unlink(os.path.join(inforoot, var_name))
2946                                 except OSError:
2947                                         pass
2948                                 continue
2949
2950                         try:
2951                                 val = codecs.open(_unicode_encode(
2952                                         os.path.join(inforoot, var_name),
2953                                         encoding=_encodings['fs'], errors='strict'),
2954                                         mode='r', encoding=_encodings['repo.content'],
2955                                         errors='replace').readline().strip()
2956                         except EnvironmentError as e:
2957                                 if e.errno != errno.ENOENT:
2958                                         raise
2959                                 del e
2960                                 val = ''
2961
2962                         if var_name == 'SLOT':
2963                                 slot = val
2964
2965                                 if not slot.strip():
2966                                         slot = self.settings.get(var_name, '')
2967                                         if not slot.strip():
2968                                                 showMessage(_("!!! SLOT is undefined\n"),
2969                                                         level=logging.ERROR, noiselevel=-1)
2970                                                 return 1
2971                                         write_atomic(os.path.join(inforoot, var_name), slot + '\n')
2972
2973                         if val != self.settings.get(var_name, ''):
2974                                 self._eqawarn('preinst',
2975                                         [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
2976                                         {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
2977
2978                 def eerror(lines):
2979                         self._eerror("preinst", lines)
2980
2981                 if not os.path.exists(self.dbcatdir):
2982                         ensure_dirs(self.dbcatdir)
2983
2984                 otherversions = []
2985                 for v in self.vartree.dbapi.cp_list(self.mysplit[0]):
2986                         otherversions.append(v.split("/")[1])
2987
2988                 cp = self.mysplit[0]
2989                 slot_atom = "%s:%s" % (cp, slot)
2990
2991                 # filter any old-style virtual matches
2992                 slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
2993                         if cpv_getkey(cpv) == cp]
2994
2995                 if self.mycpv not in slot_matches and \
2996                         self.vartree.dbapi.cpv_exists(self.mycpv):
2997                         # handle multislot or unapplied slotmove
2998                         slot_matches.append(self.mycpv)
2999
3000                 others_in_slot = []
3001                 from portage import config
3002                 for cur_cpv in slot_matches:
3003                         # Clone the config in case one of these has to be unmerged since
3004                         # we need it to have private ${T} etc... for things like elog.
3005                         others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
3006                                 settings=config(clone=self.settings),
3007                                 vartree=self.vartree, treetype="vartree",
3008                                 scheduler=self._scheduler, pipe=self._pipe))
3009
3010                 retval = self._security_check(others_in_slot)
3011                 if retval:
3012                         return retval
3013
3014                 self.settings["REPLACING_VERSIONS"] = " ".join( 
3015                         [portage.versions.cpv_getversion(other.mycpv) for other in others_in_slot] )
3016                 self.settings.backup_changes("REPLACING_VERSIONS")
3017
3018                 if slot_matches:
3019                         # Used by self.isprotected().
3020                         max_dblnk = None
3021                         max_counter = -1
3022                         for dblnk in others_in_slot:
3023                                 cur_counter = self.vartree.dbapi.cpv_counter(dblnk.mycpv)
3024                                 if cur_counter > max_counter:
3025                                         max_counter = cur_counter
3026                                         max_dblnk = dblnk
3027                         self._installed_instance = max_dblnk
3028
3029                 # We check for unicode encoding issues after src_install. However,
3030                 # the check must be repeated here for binary packages (it's
3031                 # inexpensive since we call os.walk() here anyway).
3032                 unicode_errors = []
3033
3034                 while True:
3035
3036                         unicode_error = False
3037
3038                         myfilelist = []
3039                         mylinklist = []
3040                         paths_with_newlines = []
3041                         srcroot_len = len(srcroot)
3042                         def onerror(e):
3043                                 raise
3044                         for parent, dirs, files in os.walk(srcroot, onerror=onerror):
3045                                 try:
3046                                         parent = _unicode_decode(parent,
3047                                                 encoding=_encodings['merge'], errors='strict')
3048                                 except UnicodeDecodeError:
3049                                         new_parent = _unicode_decode(parent,
3050                                                 encoding=_encodings['merge'], errors='replace')
3051                                         new_parent = _unicode_encode(new_parent,
3052                                                 encoding=_encodings['merge'], errors='backslashreplace')
3053                                         new_parent = _unicode_decode(new_parent,
3054                                                 encoding=_encodings['merge'], errors='replace')
3055                                         os.rename(parent, new_parent)
3056                                         unicode_error = True
3057                                         unicode_errors.append(new_parent[srcroot_len:])
3058                                         break
3059
3060                                 for fname in files:
3061                                         try:
3062                                                 fname = _unicode_decode(fname,
3063                                                         encoding=_encodings['merge'], errors='strict')
3064                                         except UnicodeDecodeError:
3065                                                 fpath = portage._os.path.join(
3066                                                         parent.encode(_encodings['merge']), fname)
3067                                                 new_fname = _unicode_decode(fname,
3068                                                         encoding=_encodings['merge'], errors='replace')
3069                                                 new_fname = _unicode_encode(new_fname,
3070                                                         encoding=_encodings['merge'], errors='backslashreplace')
3071                                                 new_fname = _unicode_decode(new_fname,
3072                                                         encoding=_encodings['merge'], errors='replace')
3073                                                 new_fpath = os.path.join(parent, new_fname)
3074                                                 os.rename(fpath, new_fpath)
3075                                                 unicode_error = True
3076                                                 unicode_errors.append(new_fpath[srcroot_len:])
3077                                                 fname = new_fname
3078                                                 fpath = new_fpath
3079                                         else:
3080                                                 fpath = os.path.join(parent, fname)
3081
3082                                         relative_path = fpath[srcroot_len:]
3083
3084                                         if "\n" in relative_path:
3085                                                 paths_with_newlines.append(relative_path)
3086
3087                                         file_mode = os.lstat(fpath).st_mode
3088                                         if stat.S_ISREG(file_mode):
3089                                                 myfilelist.append(relative_path)
3090                                         elif stat.S_ISLNK(file_mode):
3091                                                 # Note: os.walk puts symlinks to directories in the "dirs"
3092                                                 # list and it does not traverse them since that could lead
3093                                                 # to an infinite recursion loop.
3094                                                 mylinklist.append(relative_path)
3095
3096                                 if unicode_error:
3097                                         break
3098
3099                         if not unicode_error:
3100                                 break
3101
3102                 if unicode_errors:
3103                         eerror(portage._merge_unicode_error(unicode_errors))
3104
3105                 if paths_with_newlines:
3106                         msg = []
3107                         msg.append(_("This package installs one or more files containing a newline (\\n) character:"))
3108                         msg.append("")
3109                         paths_with_newlines.sort()
3110                         for f in paths_with_newlines:
3111                                 msg.append("\t/%s" % (f.replace("\n", "\\n")))
3112                         msg.append("")
3113                         msg.append(_("package %s NOT merged") % self.mycpv)
3114                         msg.append("")
3115                         eerror(msg)
3116                         return 1
3117
3118                 # If there are no files to merge, and an installed package in the same
3119                 # slot has files, it probably means that something went wrong.
3120                 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
3121                         not myfilelist and not mylinklist and others_in_slot:
3122                         installed_files = None
3123                         for other_dblink in others_in_slot:
3124                                 installed_files = other_dblink.getcontents()
3125                                 if not installed_files:
3126                                         continue
3127                                 from textwrap import wrap
3128                                 wrap_width = 72
3129                                 msg = []
3130                                 d = {
3131                                         "new_cpv":self.mycpv,
3132                                         "old_cpv":other_dblink.mycpv
3133                                 }
3134                                 msg.extend(wrap(_("The '%(new_cpv)s' package will not install "
3135                                         "any files, but the currently installed '%(old_cpv)s'"
3136                                         " package has the following files: ") % d, wrap_width))
3137                                 msg.append("")
3138                                 msg.extend(sorted(installed_files))
3139                                 msg.append("")
3140                                 msg.append(_("package %s NOT merged") % self.mycpv)
3141                                 msg.append("")
3142                                 msg.extend(wrap(
3143                                         _("Manually run `emerge --unmerge =%s` if you "
3144                                         "really want to remove the above files. Set "
3145                                         "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
3146                                         "/etc/make.conf if you do not want to "
3147                                         "abort in cases like this.") % other_dblink.mycpv,
3148                                         wrap_width))
3149                                 eerror(msg)
3150                         if installed_files:
3151                                 return 1
3152
3153                 # check for package collisions
3154                 blockers = self._blockers
3155                 if blockers is None:
3156                         blockers = []
3157                 collisions, plib_collisions = \
3158                         self._collision_protect(srcroot, destroot,
3159                         others_in_slot + blockers, myfilelist + mylinklist)
3160
3161                 # Make sure the ebuild environment is initialized and that ${T}/elog
3162                 # exists for logging of collision-protect eerror messages.
3163                 if myebuild is None:
3164                         myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3165                 doebuild_environment(myebuild, "preinst",
3166                         settings=self.settings, db=mydbapi)
3167                 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
3168
3169                 if collisions:
3170                         collision_protect = "collision-protect" in self.settings.features
3171                         protect_owned = "protect-owned" in self.settings.features
3172                         msg = _("This package will overwrite one or more files that"
3173                         " may belong to other packages (see list below).")
3174                         if not (collision_protect or protect_owned):
3175                                 msg += _(" Add either \"collision-protect\" or" 
3176                                 " \"protect-owned\" to FEATURES in"
3177                                 " make.conf if you would like the merge to abort"
3178                                 " in cases like this. See the make.conf man page for"
3179                                 " more information about these features.")
3180                         if self.settings.get("PORTAGE_QUIET") != "1":
3181                                 msg += _(" You can use a command such as"
3182                                 " `portageq owners / <filename>` to identify the"
3183                                 " installed package that owns a file. If portageq"
3184                                 " reports that only one package owns a file then do NOT"
3185                                 " file a bug report. A bug report is only useful if it"
3186                                 " identifies at least two or more packages that are known"
3187                                 " to install the same file(s)."
3188                                 " If a collision occurs and you"
3189                                 " can not explain where the file came from then you"
3190                                 " should simply ignore the collision since there is not"
3191                                 " enough information to determine if a real problem"
3192                                 " exists. Please do NOT file a bug report at"
3193                                 " http://bugs.gentoo.org unless you report exactly which"
3194                                 " two packages install the same file(s). Once again,"
3195                                 " please do NOT file a bug report unless you have"
3196                                 " completely understood the above message.")
3197
3198                         self.settings["EBUILD_PHASE"] = "preinst"
3199                         from textwrap import wrap
3200                         msg = wrap(msg, 70)
3201                         if collision_protect:
3202                                 msg.append("")
3203                                 msg.append(_("package %s NOT merged") % self.settings.mycpv)
3204                         msg.append("")
3205                         msg.append(_("Detected file collision(s):"))
3206                         msg.append("")
3207
3208                         for f in collisions:
3209                                 msg.append("\t%s" % \
3210                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3211
3212                         eerror(msg)
3213
3214                         owners = None
3215                         if collision_protect or protect_owned:
3216                                 msg = []
3217                                 msg.append("")
3218                                 msg.append(_("Searching all installed"
3219                                         " packages for file collisions..."))
3220                                 msg.append("")
3221                                 msg.append(_("Press Ctrl-C to Stop"))
3222                                 msg.append("")
3223                                 eerror(msg)
3224
3225                                 if len(collisions) > 20:
3226                                         # get_owners is slow for large numbers of files, so
3227                                         # don't look them all up.
3228                                         collisions = collisions[:20]
3229                                 self.lockdb()
3230                                 try:
3231                                         owners = self.vartree.dbapi._owners.get_owners(collisions)
3232                                         self.vartree.dbapi.flush_cache()
3233                                 finally:
3234                                         self.unlockdb()
3235
3236                                 for pkg, owned_files in owners.items():
3237                                         cpv = pkg.mycpv
3238                                         msg = []
3239                                         msg.append("%s" % cpv)
3240                                         for f in sorted(owned_files):
3241                                                 msg.append("\t%s" % os.path.join(destroot,
3242                                                         f.lstrip(os.path.sep)))
3243                                         msg.append("")
3244                                         eerror(msg)
3245
3246                                 if not owners:
3247                                         eerror([_("None of the installed"
3248                                                 " packages claim the file(s)."), ""])
3249
3250                         # The explanation about the collision and how to solve
3251                         # it may not be visible via a scrollback buffer, especially
3252                         # if the number of file collisions is large. Therefore,
3253                         # show a summary at the end.
3254                         if collision_protect:
3255                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3256                                         self.settings.mycpv
3257                         elif protect_owned and owners:
3258                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3259                                         self.settings.mycpv
3260                         else:
3261                                 msg = _("Package '%s' merged despite file collisions.") % \
3262                                         self.settings.mycpv
3263                         msg += _(" If necessary, refer to your elog "
3264                                 "messages for the whole content of the above message.")
3265                         eerror(wrap(msg, 70))
3266
3267                         if collision_protect or (protect_owned and owners):
3268                                 return 1
3269
3270                 # The merge process may move files out of the image directory,
3271                 # which causes invalidation of the .installed flag.
3272                 try:
3273                         os.unlink(os.path.join(
3274                                 os.path.dirname(normalize_path(srcroot)), ".installed"))
3275                 except OSError as e:
3276                         if e.errno != errno.ENOENT:
3277                                 raise
3278                         del e
3279
3280                 self.dbdir = self.dbtmpdir
3281                 self.delete()
3282                 ensure_dirs(self.dbtmpdir)
3283
3284                 # run preinst script
3285                 if scheduler is None:
3286                         showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % {"cpv":self.mycpv, "destroot":destroot})
3287                         a = _spawn_phase("preinst", self.settings)
3288                 else:
3289                         a = scheduler.dblinkEbuildPhase(
3290                                 self, mydbapi, myebuild, "preinst")
3291
3292                 # XXX: Decide how to handle failures here.
3293                 if a != os.EX_OK:
3294                         showMessage(_("!!! FAILED preinst: ")+str(a)+"\n",
3295                                 level=logging.ERROR, noiselevel=-1)
3296                         return a
3297
3298                 # copy "info" files (like SLOT, CFLAGS, etc.) into the database
3299                 for x in os.listdir(inforoot):
3300                         self.copyfile(inforoot+"/"+x)
3301
3302                 # write local package counter for recording
3303                 if counter is None:
3304                         counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
3305                 codecs.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
3306                         encoding=_encodings['fs'], errors='strict'),
3307                         'w', encoding=_encodings['repo.content'], errors='backslashreplace'
3308                         ).write(str(counter))
3309
3310                 self.updateprotect()
3311
3312                 #if we have a file containing previously-merged config file md5sums, grab it.
3313                 conf_mem_file = os.path.join(self._eroot, CONFIG_MEMORY_FILE)
3314                 conf_mem_lock = lockfile(conf_mem_file)
3315                 try:
3316                         cfgfiledict = grabdict(conf_mem_file)
3317                         if "NOCONFMEM" in self.settings:
3318                                 cfgfiledict["IGNORE"]=1
3319                         else:
3320                                 cfgfiledict["IGNORE"]=0
3321
3322                         # Always behave like --noconfmem is enabled for downgrades
3323                         # so that people who don't know about this option are less
3324                         # likely to get confused when doing upgrade/downgrade cycles.
3325                         pv_split = catpkgsplit(self.mycpv)[1:]
3326                         for other in others_in_slot:
3327                                 if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0:
3328                                         cfgfiledict["IGNORE"] = 1
3329                                         break
3330
3331                         rval = self._merge_contents(srcroot, destroot, cfgfiledict,
3332                                 conf_mem_file)
3333                         if rval != os.EX_OK:
3334                                 return rval
3335                 finally:
3336                         unlockfile(conf_mem_lock)
3337
3338                 # These caches are populated during collision-protect and the data
3339                 # they contain is now invalid. It's very important to invalidate
3340                 # the contents_inodes cache so that FEATURES=unmerge-orphans
3341                 # doesn't unmerge anything that belongs to this package that has
3342                 # just been merged.
3343                 for dblnk in others_in_slot:
3344                         dblnk._clear_contents_cache()
3345                 self._clear_contents_cache()
3346
3347                 linkmap = self.vartree.dbapi._linkmap
3348                 plib_registry = self.vartree.dbapi._plib_registry
3349                 # We initialize preserve_paths to an empty set rather
3350                 # than None here because it plays an important role
3351                 # in prune_plib_registry logic by serving to indicate
3352                 # that we have a replacement for a package that's
3353                 # being unmerged.
3354
3355                 preserve_paths = set()
3356                 needed = None
3357                 if not (linkmap is None or plib_registry is None):
3358                         plib_registry.lock()
3359                         try:
3360                                 plib_registry.load()
3361                                 needed = os.path.join(inforoot, linkmap._needed_aux_key)
3362                                 self._linkmap_rebuild(include_file=needed)
3363
3364                                 # Preserve old libs if they are still in use
3365                                 # TODO: Handle cases where the previous instance
3366                                 # has already been uninstalled but it still has some
3367                                 # preserved libraries in the registry that we may
3368                                 # want to preserve here.
3369                                 preserve_paths = self._find_libs_to_preserve()
3370                         finally:
3371                                 plib_registry.unlock()
3372
3373                         if preserve_paths:
3374                                 self._add_preserve_libs_to_contents(preserve_paths)
3375
3376                 # If portage is reinstalling itself, remove the old
3377                 # version now since we want to use the temporary
3378                 # PORTAGE_BIN_PATH that will be removed when we return.
3379                 reinstall_self = False
3380                 if self.myroot == "/" and \
3381                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]):
3382                         reinstall_self = True
3383
3384                 if scheduler is None:
3385                         def emerge_log(msg):
3386                                 pass
3387                 else:
3388                         emerge_log = scheduler.dblinkEmergeLog
3389
3390                 autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes"
3391
3392                 if autoclean:
3393                         emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
3394
3395                 others_in_slot.append(self)  # self has just been merged
3396                 for dblnk in list(others_in_slot):
3397                         if dblnk is self:
3398                                 continue
3399                         if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
3400                                 continue
3401                         showMessage(_(">>> Safely unmerging already-installed instance...\n"))
3402                         emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
3403                         others_in_slot.remove(dblnk) # dblnk will unmerge itself now
3404                         dblnk._linkmap_broken = self._linkmap_broken
3405                         dblnk.settings["REPLACED_BY_VERSION"] = portage.versions.cpv_getversion(self.mycpv)
3406                         dblnk.settings.backup_changes("REPLACED_BY_VERSION")
3407                         unmerge_rval = dblnk.unmerge(ldpath_mtimes=prev_mtimes,
3408                                 others_in_slot=others_in_slot, needed=needed,
3409                                 preserve_paths=preserve_paths)
3410                         dblnk.settings.pop("REPLACED_BY_VERSION", None)
3411
3412                         if unmerge_rval == os.EX_OK:
3413                                 emerge_log(_(" >>> unmerge success: %s") % (dblnk.mycpv,))
3414                         else:
3415                                 emerge_log(_(" !!! unmerge FAILURE: %s") % (dblnk.mycpv,))
3416
3417                         self.lockdb()
3418                         try:
3419                                 # TODO: Check status and abort if necessary.
3420                                 dblnk.delete()
3421                         finally:
3422                                 self.unlockdb()
3423                         showMessage(_(">>> Original instance of package unmerged safely.\n"))
3424
3425                 if len(others_in_slot) > 1:
3426                         showMessage(colorize("WARN", _("WARNING:"))
3427                                 + _(" AUTOCLEAN is disabled.  This can cause serious"
3428                                 " problems due to overlapping packages.\n"),
3429                                 level=logging.WARN, noiselevel=-1)
3430
3431                 # We hold both directory locks.
3432                 self.dbdir = self.dbpkgdir
3433                 self.lockdb()
3434                 try:
3435                         self.delete()
3436                         _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
3437                 finally:
3438                         self.unlockdb()
3439
3440                 # Check for file collisions with blocking packages
3441                 # and remove any colliding files from their CONTENTS
3442                 # since they now belong to this package.
3443                 self._clear_contents_cache()
3444                 contents = self.getcontents()
3445                 destroot_len = len(destroot) - 1
3446                 self.lockdb()
3447                 try:
3448                         for blocker in blockers:
3449                                 self.vartree.dbapi.removeFromContents(blocker, iter(contents),
3450                                         relative_paths=False)
3451                 finally:
3452                         self.lockdb()
3453
3454                 plib_registry = self.vartree.dbapi._plib_registry
3455                 if plib_registry:
3456                         plib_registry.lock()
3457                         try:
3458                                 plib_registry.load()
3459
3460                                 if preserve_paths:
3461                                         # keep track of the libs we preserved
3462                                         plib_registry.register(self.mycpv, slot, counter,
3463                                                 sorted(preserve_paths))
3464
3465                                 # Unregister any preserved libs that this package has overwritten
3466                                 # and update the contents of the packages that owned them.
3467                                 plib_dict = plib_registry.getPreservedLibs()
3468                                 for cpv, paths in plib_collisions.items():
3469                                         if cpv not in plib_dict:
3470                                                 continue
3471                                         has_vdb_entry = False
3472                                         if cpv != self.mycpv:
3473                                                 # If we've replaced another instance with the
3474                                                 # same cpv then the vdb entry no longer belongs
3475                                                 # to it, so we'll have to get the slot and couter
3476                                                 # from plib_registry._data instead.
3477                                                 try:
3478                                                         slot, counter = self.vartree.dbapi.aux_get(
3479                                                                 cpv, ["SLOT", "COUNTER"])
3480                                                         has_vdb_entry = True
3481                                                 except KeyError:
3482                                                         pass
3483
3484                                         if not has_vdb_entry:
3485                                                 # It's possible for previously unmerged packages
3486                                                 # to have preserved libs in the registry, so try
3487                                                 # to retrieve the slot and counter from there.
3488                                                 has_registry_entry = False
3489                                                 for plib_cps, (plib_cpv, plib_counter, plib_paths) in \
3490                                                         plib_registry._data.items():
3491                                                         if plib_cpv != cpv:
3492                                                                 continue
3493                                                         try:
3494                                                                 cp, slot = plib_cps.split(":", 1)
3495                                                         except ValueError:
3496                                                                 continue
3497                                                         counter = plib_counter
3498                                                         has_registry_entry = True
3499                                                         break
3500
3501                                                 if not has_registry_entry:
3502                                                         continue
3503
3504                                         remaining = [f for f in plib_dict[cpv] if f not in paths]
3505                                         plib_registry.register(cpv, slot, counter, remaining)
3506                                         if has_vdb_entry:
3507                                                 self.vartree.dbapi.removeFromContents(cpv, paths)
3508
3509                                 plib_registry.store()
3510                         finally:
3511                                 plib_registry.unlock()
3512
3513                 self.vartree.dbapi._add(self)
3514                 contents = self.getcontents()
3515
3516                 #do postinst script
3517                 self.settings["PORTAGE_UPDATE_ENV"] = \
3518                         os.path.join(self.dbpkgdir, "environment.bz2")
3519                 self.settings.backup_changes("PORTAGE_UPDATE_ENV")
3520                 try:
3521                         if scheduler is None:
3522                                 a = _spawn_phase("postinst", self.settings)
3523                                 if a == os.EX_OK:
3524                                         showMessage(_(">>> %s merged.\n") % self.mycpv)
3525                         else:
3526                                 a = scheduler.dblinkEbuildPhase(
3527                                         self, mydbapi, myebuild, "postinst")
3528                 finally:
3529                         self.settings.pop("PORTAGE_UPDATE_ENV", None)
3530
3531                 if a != os.EX_OK:
3532                         # It's stupid to bail out here, so keep going regardless of
3533                         # phase return code.
3534                         showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
3535                                 level=logging.ERROR, noiselevel=-1)
3536
3537                 downgrade = False
3538                 for v in otherversions:
3539                         if pkgcmp(catpkgsplit(self.pkg)[1:], catpkgsplit(v)[1:]) < 0:
3540                                 downgrade = True
3541
3542                 # Lock the config memory file to prevent symlink creation
3543                 # in merge_contents from overlapping with env-update.
3544                 conf_mem_file = os.path.join(self._eroot, CONFIG_MEMORY_FILE)
3545                 conf_mem_lock = lockfile(conf_mem_file)
3546                 try:
3547                         #update environment settings, library paths. DO NOT change symlinks.
3548                         env_update(makelinks=(not downgrade),
3549                                 target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
3550                                 contents=contents, env=self.settings.environ(),
3551                                 writemsg_level=self._display_merge)
3552                 finally:
3553                         unlockfile(conf_mem_lock)
3554
3555                 # For gcc upgrades, preserved libs have to be removed after the
3556                 # the library path has been updated.
3557                 self._prune_plib_registry()
3558
3559                 return os.EX_OK
3560
3561         def _new_backup_path(self, p):
3562                 """
3563                 The works for any type path, such as a regular file, symlink,
3564                 or directory. The parent directory is assumed to exist.
3565                 The returned filename is of the form p + '.backup.' + x, where
3566                 x guarantees that the returned path does not exist yet.
3567                 """
3568                 os = _os_merge
3569
3570                 x = -1
3571                 while True:
3572                         x += 1
3573                         backup_p = p + '.backup.' + str(x).rjust(4, '0')
3574                         try:
3575                                 os.lstat(backup_p)
3576                         except OSError:
3577                                 break
3578
3579                 return backup_p
3580
3581         def _merge_contents(self, srcroot, destroot, cfgfiledict, conf_mem_file):
3582
3583                 cfgfiledict_orig = cfgfiledict.copy()
3584
3585                 # open CONTENTS file (possibly overwriting old one) for recording
3586                 outfile = codecs.open(_unicode_encode(
3587                         os.path.join(self.dbtmpdir, 'CONTENTS'),
3588                         encoding=_encodings['fs'], errors='strict'),
3589                         mode='w', encoding=_encodings['repo.content'],
3590                         errors='backslashreplace')
3591
3592                 # Don't bump mtimes on merge since some application require
3593                 # preservation of timestamps.  This means that the unmerge phase must
3594                 # check to see if file belongs to an installed instance in the same
3595                 # slot.
3596                 mymtime = None
3597
3598                 # set umask to 0 for merging; back up umask, save old one in prevmask (since this is a global change)
3599                 prevmask = os.umask(0)
3600                 secondhand = []
3601
3602                 # we do a first merge; this will recurse through all files in our srcroot but also build up a
3603                 # "second hand" of symlinks to merge later
3604                 if self.mergeme(srcroot, destroot, outfile, secondhand, "", cfgfiledict, mymtime):
3605                         return 1
3606
3607                 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore.  The rest are
3608                 # broken symlinks.  We'll merge them too.
3609                 lastlen = 0
3610                 while len(secondhand) and len(secondhand)!=lastlen:
3611                         # clear the thirdhand.  Anything from our second hand that
3612                         # couldn't get merged will be added to thirdhand.
3613
3614                         thirdhand = []
3615                         if self.mergeme(srcroot, destroot, outfile, thirdhand,
3616                                 secondhand, cfgfiledict, mymtime):
3617                                 return 1
3618
3619                         #swap hands
3620                         lastlen = len(secondhand)
3621
3622                         # our thirdhand now becomes our secondhand.  It's ok to throw
3623                         # away secondhand since thirdhand contains all the stuff that
3624                         # couldn't be merged.
3625                         secondhand = thirdhand
3626
3627                 if len(secondhand):
3628                         # force merge of remaining symlinks (broken or circular; oh well)
3629                         if self.mergeme(srcroot, destroot, outfile, None,
3630                                 secondhand, cfgfiledict, mymtime):
3631                                 return 1
3632
3633                 #restore umask
3634                 os.umask(prevmask)
3635
3636                 #if we opened it, close it
3637                 outfile.flush()
3638                 outfile.close()
3639
3640                 # write out our collection of md5sums
3641                 if cfgfiledict != cfgfiledict_orig:
3642                         cfgfiledict.pop("IGNORE", None)
3643                         ensure_dirs(os.path.dirname(conf_mem_file),
3644                                 gid=portage_gid, mode=0o2750, mask=0o2)
3645                         writedict(cfgfiledict, conf_mem_file)
3646
3647                 return os.EX_OK
3648
3649         def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
3650                 """
3651                 
3652                 This function handles actual merging of the package contents to the livefs.
3653                 It also handles config protection.
3654                 
3655                 @param srcroot: Where are we copying files from (usually ${D})
3656                 @type srcroot: String (Path)
3657                 @param destroot: Typically ${ROOT}
3658                 @type destroot: String (Path)
3659                 @param outfile: File to log operations to
3660                 @type outfile: File Object
3661                 @param secondhand: A set of items to merge in pass two (usually
3662                 or symlinks that point to non-existing files that may get merged later)
3663                 @type secondhand: List
3664                 @param stufftomerge: Either a diretory to merge, or a list of items.
3665                 @type stufftomerge: String or List
3666                 @param cfgfiledict: { File:mtime } mapping for config_protected files
3667                 @type cfgfiledict: Dictionary
3668                 @param thismtime: The current time (typically long(time.time())
3669                 @type thismtime: Long
3670                 @rtype: None or Boolean
3671                 @returns:
3672                 1. True on failure
3673                 2. None otherwise
3674                 
3675                 """
3676
3677                 showMessage = self._display_merge
3678                 writemsg = self._display_merge
3679
3680                 os = _os_merge
3681                 sep = os.sep
3682                 join = os.path.join
3683                 srcroot = normalize_path(srcroot).rstrip(sep) + sep
3684                 destroot = normalize_path(destroot).rstrip(sep) + sep
3685                 calc_prelink = "prelink-checksums" in self.settings.features
3686
3687                 # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
3688                 if isinstance(stufftomerge, basestring):
3689                         #A directory is specified.  Figure out protection paths, listdir() it and process it.
3690                         mergelist = os.listdir(join(srcroot, stufftomerge))
3691                         offset = stufftomerge
3692                 else:
3693                         mergelist = stufftomerge
3694                         offset = ""
3695
3696                 for i, x in enumerate(mergelist):
3697
3698                         mysrc = join(srcroot, offset, x)
3699                         mydest = join(destroot, offset, x)
3700                         # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
3701                         myrealdest = join(sep, offset, x)
3702                         # stat file once, test using S_* macros many times (faster that way)
3703                         mystat = os.lstat(mysrc)
3704                         mymode = mystat[stat.ST_MODE]
3705                         # handy variables; mydest is the target object on the live filesystems;
3706                         # mysrc is the source object in the temporary install dir
3707                         try:
3708                                 mydstat = os.lstat(mydest)
3709                                 mydmode = mydstat.st_mode
3710                         except OSError as e:
3711                                 if e.errno != errno.ENOENT:
3712                                         raise
3713                                 del e
3714                                 #dest file doesn't exist
3715                                 mydstat = None
3716                                 mydmode = None
3717
3718                         if stat.S_ISLNK(mymode):
3719                                 # we are merging a symbolic link
3720                                 myabsto = abssymlink(mysrc)
3721                                 if myabsto.startswith(srcroot):
3722                                         myabsto = myabsto[len(srcroot):]
3723                                 myabsto = myabsto.lstrip(sep)
3724                                 myto = os.readlink(mysrc)
3725                                 if self.settings and self.settings["D"]:
3726                                         if myto.startswith(self.settings["D"]):
3727                                                 myto = myto[len(self.settings["D"]):]
3728                                 # myrealto contains the path of the real file to which this symlink points.
3729                                 # we can simply test for existence of this file to see if the target has been merged yet
3730                                 myrealto = normalize_path(os.path.join(destroot, myabsto))
3731                                 if mydmode!=None:
3732                                         #destination exists
3733                                         if not stat.S_ISLNK(mydmode):
3734                                                 if stat.S_ISDIR(mydmode):
3735                                                         # directory in the way: we can't merge a symlink over a directory
3736                                                         # we won't merge this, continue with next file...
3737                                                         continue
3738
3739                                                 if os.path.exists(mysrc) and stat.S_ISDIR(os.stat(mysrc)[stat.ST_MODE]):
3740                                                         # Kill file blocking installation of symlink to dir #71787
3741                                                         pass
3742                                                 elif self.isprotected(mydest):
3743                                                         # Use md5 of the target in ${D} if it exists...
3744                                                         try:
3745                                                                 newmd5 = perform_md5(join(srcroot, myabsto))
3746                                                         except FileNotFound:
3747                                                                 # Maybe the target is merged already.
3748                                                                 try:
3749                                                                         newmd5 = perform_md5(myrealto)
3750                                                                 except FileNotFound:
3751                                                                         newmd5 = None
3752                                                         mydest = new_protect_filename(mydest, newmd5=newmd5)
3753
3754                                 # if secondhand is None it means we're operating in "force" mode and should not create a second hand.
3755                                 if (secondhand != None) and (not os.path.exists(myrealto)):
3756                                         # either the target directory doesn't exist yet or the target file doesn't exist -- or
3757                                         # the target is a broken symlink.  We will add this file to our "second hand" and merge
3758                                         # it later.
3759                                         secondhand.append(mysrc[len(srcroot):])
3760                                         continue
3761                                 # unlinking no longer necessary; "movefile" will overwrite symlinks atomically and correctly
3762                                 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
3763                                         sstat=mystat, mysettings=self.settings,
3764                                         encoding=_encodings['merge'])
3765                                 if mymtime != None:
3766                                         showMessage(">>> %s -> %s\n" % (mydest, myto))
3767                                         outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
3768                                 else:
3769                                         showMessage(_("!!! Failed to move file.\n"),
3770                                                 level=logging.ERROR, noiselevel=-1)
3771                                         showMessage("!!! %s -> %s\n" % (mydest, myto),
3772                                                 level=logging.ERROR, noiselevel=-1)
3773                                         return 1
3774                         elif stat.S_ISDIR(mymode):
3775                                 # we are merging a directory
3776                                 if mydmode != None:
3777                                         # destination exists
3778
3779                                         if bsd_chflags:
3780                                                 # Save then clear flags on dest.
3781                                                 dflags = mydstat.st_flags
3782                                                 if dflags != 0:
3783                                                         bsd_chflags.lchflags(mydest, 0)
3784
3785                                         if not os.access(mydest, os.W_OK):
3786                                                 pkgstuff = pkgsplit(self.pkg)
3787                                                 writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1)
3788                                                 writemsg(_("!!! Please check permissions and directories for broken symlinks.\n"))
3789                                                 writemsg(_("!!! You may start the merge process again by using ebuild:\n"))
3790                                                 writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n")
3791                                                 writemsg(_("!!! And finish by running this: env-update\n\n"))
3792                                                 return 1
3793
3794                                         if stat.S_ISDIR(mydmode) or \
3795                                                 (stat.S_ISLNK(mydmode) and os.path.isdir(mydest)):
3796                                                 # a symlink to an existing directory will work for us; keep it:
3797                                                 showMessage("--- %s/\n" % mydest)
3798                                                 if bsd_chflags:
3799                                                         bsd_chflags.lchflags(mydest, dflags)
3800                                         else:
3801                                                 # a non-directory and non-symlink-to-directory.  Won't work for us.  Move out of the way.
3802                                                 backup_dest = self._new_backup_path(mydest)
3803                                                 msg = []
3804                                                 msg.append("")
3805                                                 msg.append(_("Installation of a directory is blocked by a file:"))
3806                                                 msg.append("  '%s'" % mydest)
3807                                                 msg.append(_("This file will be renamed to a different name:"))
3808                                                 msg.append("  '%s'" % backup_dest)
3809                                                 msg.append("")
3810                                                 self._eerror("preinst", msg)
3811                                                 if movefile(mydest, backup_dest,
3812                                                         mysettings=self.settings,
3813                                                         encoding=_encodings['merge']) is None:
3814                                                         return 1
3815                                                 showMessage(_("bak %s %s.backup\n") % (mydest, mydest),
3816                                                         level=logging.ERROR, noiselevel=-1)
3817                                                 #now create our directory
3818                                                 try:
3819                                                         if self.settings.selinux_enabled():
3820                                                                 _selinux_merge.mkdir(mydest, mysrc)
3821                                                         else:
3822                                                                 os.mkdir(mydest)
3823                                                 except OSError as e:
3824                                                         # Error handling should be equivalent to
3825                                                         # portage.util.ensure_dirs() for cases
3826                                                         # like bug #187518.
3827                                                         if e.errno in (errno.EEXIST,):
3828                                                                 pass
3829                                                         elif os.path.isdir(mydest):
3830                                                                 pass
3831                                                         else:
3832                                                                 raise
3833                                                         del e
3834
3835                                                 if bsd_chflags:
3836                                                         bsd_chflags.lchflags(mydest, dflags)
3837                                                 os.chmod(mydest, mystat[0])
3838                                                 os.chown(mydest, mystat[4], mystat[5])
3839                                                 showMessage(">>> %s/\n" % mydest)
3840                                 else:
3841                                         try:
3842                                                 #destination doesn't exist
3843                                                 if self.settings.selinux_enabled():
3844                                                         _selinux_merge.mkdir(mydest, mysrc)
3845                                                 else:
3846                                                         os.mkdir(mydest)
3847                                         except OSError as e:
3848                                                 # Error handling should be equivalent to
3849                                                 # portage.util.ensure_dirs() for cases
3850                                                 # like bug #187518.
3851                                                 if e.errno in (errno.EEXIST,):
3852                                                         pass
3853                                                 elif os.path.isdir(mydest):
3854                                                         pass
3855                                                 else:
3856                                                         raise
3857                                                 del e
3858                                         os.chmod(mydest, mystat[0])
3859                                         os.chown(mydest, mystat[4], mystat[5])
3860                                         showMessage(">>> %s/\n" % mydest)
3861                                 outfile.write("dir "+myrealdest+"\n")
3862                                 # recurse and merge this directory
3863                                 if self.mergeme(srcroot, destroot, outfile, secondhand,
3864                                         join(offset, x), cfgfiledict, thismtime):
3865                                         return 1
3866                         elif stat.S_ISREG(mymode):
3867                                 # we are merging a regular file
3868                                 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
3869                                 # calculate config file protection stuff
3870                                 mydestdir = os.path.dirname(mydest)
3871                                 moveme = 1
3872                                 zing = "!!!"
3873                                 mymtime = None
3874                                 protected = self.isprotected(mydest)
3875                                 if mydmode != None:
3876                                         # destination file exists
3877                                         
3878                                         if stat.S_ISDIR(mydmode):
3879                                                 # install of destination is blocked by an existing directory with the same name
3880                                                 newdest = self._new_backup_path(mydest)
3881                                                 msg = []
3882                                                 msg.append("")
3883                                                 msg.append(_("Installation of a regular file is blocked by a directory:"))
3884                                                 msg.append("  '%s'" % mydest)
3885                                                 msg.append(_("This file will be merged with a different name:"))
3886                                                 msg.append("  '%s'" % newdest)
3887                                                 msg.append("")
3888                                                 self._eerror("preinst", msg)
3889                                                 mydest = newdest
3890
3891                                         elif stat.S_ISREG(mydmode) or (stat.S_ISLNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST_MODE])):
3892                                                 # install of destination is blocked by an existing regular file,
3893                                                 # or by a symlink to an existing regular file;
3894                                                 # now, config file management may come into play.
3895                                                 # we only need to tweak mydest if cfg file management is in play.
3896                                                 if protected:
3897                                                         # we have a protection path; enable config file management.
3898                                                         cfgprot = 0
3899                                                         destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
3900                                                         if mymd5 == destmd5:
3901                                                                 #file already in place; simply update mtimes of destination
3902                                                                 moveme = 1
3903                                                         else:
3904                                                                 if mymd5 == cfgfiledict.get(myrealdest, [None])[0]:
3905                                                                         """ An identical update has previously been
3906                                                                         merged.  Skip it unless the user has chosen
3907                                                                         --noconfmem."""
3908                                                                         moveme = cfgfiledict["IGNORE"]
3909                                                                         cfgprot = cfgfiledict["IGNORE"]
3910                                                                         if not moveme:
3911                                                                                 zing = "---"
3912                                                                                 mymtime = mystat[stat.ST_MTIME]
3913                                                                 else:
3914                                                                         moveme = 1
3915                                                                         cfgprot = 1
3916                                                         if moveme:
3917                                                                 # Merging a new file, so update confmem.
3918                                                                 cfgfiledict[myrealdest] = [mymd5]
3919                                                         elif destmd5 == cfgfiledict.get(myrealdest, [None])[0]:
3920                                                                 """A previously remembered update has been
3921                                                                 accepted, so it is removed from confmem."""
3922                                                                 del cfgfiledict[myrealdest]
3923
3924                                                         if cfgprot:
3925                                                                 mydest = new_protect_filename(mydest, newmd5=mymd5)
3926
3927                                 # whether config protection or not, we merge the new file the
3928                                 # same way.  Unless moveme=0 (blocking directory)
3929                                 if moveme:
3930                                         # Create hardlinks only for source files that already exist
3931                                         # as hardlinks (having identical st_dev and st_ino).
3932                                         hardlink_key = (mystat.st_dev, mystat.st_ino)
3933
3934                                         hardlink_candidates = self._md5_merge_map.get(hardlink_key)
3935                                         if hardlink_candidates is None:
3936                                                 hardlink_candidates = []
3937                                                 self._md5_merge_map[hardlink_key] = hardlink_candidates
3938
3939                                         mymtime = movefile(mysrc, mydest, newmtime=thismtime,
3940                                                 sstat=mystat, mysettings=self.settings,
3941                                                 hardlink_candidates=hardlink_candidates,
3942                                                 encoding=_encodings['merge'])
3943                                         if mymtime is None:
3944                                                 return 1
3945                                         if hardlink_candidates is not None:
3946                                                 hardlink_candidates.append(mydest)
3947                                         zing = ">>>"
3948
3949                                 if mymtime != None:
3950                                         outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
3951                                 showMessage("%s %s\n" % (zing,mydest))
3952                         else:
3953                                 # we are merging a fifo or device node
3954                                 zing = "!!!"
3955                                 if mydmode is None:
3956                                         # destination doesn't exist
3957                                         if movefile(mysrc, mydest, newmtime=thismtime,
3958                                                 sstat=mystat, mysettings=self.settings,
3959                                                 encoding=_encodings['merge']) is not None:
3960                                                 zing = ">>>"
3961                                         else:
3962                                                 return 1
3963                                 if stat.S_ISFIFO(mymode):
3964                                         outfile.write("fif %s\n" % myrealdest)
3965                                 else:
3966                                         outfile.write("dev %s\n" % myrealdest)
3967                                 showMessage(zing + " " + mydest + "\n")
3968
3969         def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
3970                 mydbapi=None, prev_mtimes=None, counter=None):
3971                 """
3972                 @param myroot: ignored, self._eroot is used instead
3973                 """
3974                 myroot = None
3975                 retval = -1
3976                 parallel_install = "parallel-install" in self.settings.features
3977                 if not parallel_install:
3978                         self.lockdb()
3979                 self.vartree.dbapi._bump_mtime(self.mycpv)
3980                 try:
3981                         retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
3982                                 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes,
3983                                 counter=counter)
3984
3985                         # If PORTAGE_BUILDDIR doesn't exist, then it probably means
3986                         # fail-clean is enabled, and the success/die hooks have
3987                         # already been called by _emerge.EbuildPhase (via
3988                         # self._scheduler.dblinkEbuildPhase) prior to cleaning.
3989                         if os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
3990
3991                                 if retval == os.EX_OK:
3992                                         phase = 'success_hooks'
3993                                 else:
3994                                         phase = 'die_hooks'
3995
3996                                 if self._scheduler is None:
3997                                         ebuild_phase = MiscFunctionsProcess(
3998                                                 background=False,
3999                                                 commands=[phase],
4000                                                 scheduler=PollScheduler().sched_iface,
4001                                                 settings=self.settings)
4002                                         ebuild_phase.start()
4003                                         ebuild_phase.wait()
4004                                 else:
4005                                         self._scheduler.dblinkEbuildPhase(
4006                                                 self, mydbapi, myebuild, phase)
4007
4008                                 self._elog_process()
4009
4010                                 if 'noclean' not in self.settings.features and \
4011                                         (retval == os.EX_OK or \
4012                                         'fail-clean' in self.settings.features):
4013                                         if myebuild is None:
4014                                                 myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
4015
4016                                         doebuild_environment(myebuild, "clean",
4017                                                 settings=self.settings, db=mydbapi)
4018                                         if self._scheduler is None:
4019                                                 _spawn_phase("clean", self.settings)
4020                                         else:
4021                                                 self._scheduler.dblinkEbuildPhase(
4022                                                         self, mydbapi, myebuild, "clean")
4023
4024                 finally:
4025                         self.settings.pop('REPLACING_VERSIONS', None)
4026                         if self.vartree.dbapi._linkmap is None:
4027                                 # preserve-libs is entirely disabled
4028                                 pass
4029                         else:
4030                                 self.vartree.dbapi._linkmap._clear_cache()
4031                         self.vartree.dbapi._bump_mtime(self.mycpv)
4032                         if not parallel_install:
4033                                 self.unlockdb()
4034                 return retval
4035
4036         def getstring(self,name):
4037                 "returns contents of a file with whitespace converted to spaces"
4038                 if not os.path.exists(self.dbdir+"/"+name):
4039                         return ""
4040                 mydata = codecs.open(
4041                         _unicode_encode(os.path.join(self.dbdir, name),
4042                         encoding=_encodings['fs'], errors='strict'),
4043                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4044                         ).read().split()
4045                 return " ".join(mydata)
4046
4047         def copyfile(self,fname):
4048                 shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname))
4049
4050         def getfile(self,fname):
4051                 if not os.path.exists(self.dbdir+"/"+fname):
4052                         return ""
4053                 return codecs.open(_unicode_encode(os.path.join(self.dbdir, fname),
4054                         encoding=_encodings['fs'], errors='strict'), 
4055                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4056                         ).read()
4057
4058         def setfile(self,fname,data):
4059                 kwargs = {}
4060                 if fname == 'environment.bz2' or not isinstance(data, basestring):
4061                         kwargs['mode'] = 'wb'
4062                 else:
4063                         kwargs['mode'] = 'w'
4064                         kwargs['encoding'] = _encodings['repo.content']
4065                 write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
4066
4067         def getelements(self,ename):
4068                 if not os.path.exists(self.dbdir+"/"+ename):
4069                         return []
4070                 mylines = codecs.open(_unicode_encode(
4071                         os.path.join(self.dbdir, ename),
4072                         encoding=_encodings['fs'], errors='strict'),
4073                         mode='r', encoding=_encodings['repo.content'], errors='replace'
4074                         ).readlines()
4075                 myreturn = []
4076                 for x in mylines:
4077                         for y in x[:-1].split():
4078                                 myreturn.append(y)
4079                 return myreturn
4080
4081         def setelements(self,mylist,ename):
4082                 myelement = codecs.open(_unicode_encode(
4083                         os.path.join(self.dbdir, ename),
4084                         encoding=_encodings['fs'], errors='strict'),
4085                         mode='w', encoding=_encodings['repo.content'],
4086                         errors='backslashreplace')
4087                 for x in mylist:
4088                         myelement.write(x+"\n")
4089                 myelement.close()
4090
4091         def isregular(self):
4092                 "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
4093                 return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
4094
4095 def merge(mycat, mypkg, pkgloc, infloc,
4096         myroot=None, settings=None, myebuild=None,
4097         mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
4098         scheduler=None):
4099         """
4100         @param myroot: ignored, settings['EROOT'] is used instead
4101         """
4102         myroot = None
4103         if settings is None:
4104                 raise TypeError("settings argument is required")
4105         if not os.access(settings['EROOT'], os.W_OK):
4106                 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings['EROOT'],
4107                         noiselevel=-1)
4108                 return errno.EACCES
4109         background = (settings.get('PORTAGE_BACKGROUND') == '1')
4110         merge_task = MergeProcess(
4111                 dblink=dblink, mycat=mycat, mypkg=mypkg, settings=settings,
4112                 treetype=mytree, vartree=vartree,
4113                 scheduler=(scheduler or PollScheduler().sched_iface),
4114                 background=background, blockers=blockers, pkgloc=pkgloc,
4115                 infloc=infloc, myebuild=myebuild, mydbapi=mydbapi,
4116                 prev_mtimes=prev_mtimes, logfile=settings.get('PORTAGE_LOG_FILE'))
4117         merge_task.start()
4118         retcode = merge_task.wait()
4119         return retcode
4120
4121 def unmerge(cat, pkg, myroot=None, settings=None,
4122         mytrimworld=None, vartree=None,
4123         ldpath_mtimes=None, scheduler=None):
4124         """
4125         @param myroot: ignored, settings['EROOT'] is used instead
4126         @param mytrimworld: ignored
4127         """
4128         myroot = None
4129         if settings is None:
4130                 raise TypeError("settings argument is required")
4131         mylink = dblink(cat, pkg, settings=settings, treetype="vartree",
4132                 vartree=vartree, scheduler=scheduler)
4133         vartree = mylink.vartree
4134         parallel_install = "parallel-install" in settings.features
4135         if not parallel_install:
4136                 mylink.lockdb()
4137         try:
4138                 if mylink.exists():
4139                         retval = mylink.unmerge(ldpath_mtimes=ldpath_mtimes)
4140                         if retval == os.EX_OK:
4141                                 mylink.lockdb()
4142                                 try:
4143                                         mylink.delete()
4144                                 finally:
4145                                         mylink.unlockdb()
4146                         return retval
4147                 return os.EX_OK
4148         finally:
4149                 if vartree.dbapi._linkmap is None:
4150                         # preserve-libs is entirely disabled
4151                         pass
4152                 else:
4153                         vartree.dbapi._linkmap._clear_cache()
4154                 if not parallel_install:
4155                         mylink.unlockdb()
4156
4157 def write_contents(contents, root, f):
4158         """
4159         Write contents to any file like object. The file will be left open.
4160         """
4161         root_len = len(root) - 1
4162         for filename in sorted(contents):
4163                 entry_data = contents[filename]
4164                 entry_type = entry_data[0]
4165                 relative_filename = filename[root_len:]
4166                 if entry_type == "obj":
4167                         entry_type, mtime, md5sum = entry_data
4168                         line = "%s %s %s %s\n" % \
4169                                 (entry_type, relative_filename, md5sum, mtime)
4170                 elif entry_type == "sym":
4171                         entry_type, mtime, link = entry_data
4172                         line = "%s %s -> %s %s\n" % \
4173                                 (entry_type, relative_filename, link, mtime)
4174                 else: # dir, dev, fif
4175                         line = "%s %s\n" % (entry_type, relative_filename)
4176                 f.write(line)
4177
4178 def tar_contents(contents, root, tar, protect=None, onProgress=None):
4179         os = _os_merge
4180
4181         try:
4182                 for x in contents:
4183                         _unicode_encode(x,
4184                                 encoding=_encodings['merge'],
4185                                 errors='strict')
4186         except UnicodeEncodeError:
4187                 # The package appears to have been merged with a
4188                 # different value of sys.getfilesystemencoding(),
4189                 # so fall back to utf_8 if appropriate.
4190                 try:
4191                         for x in contents:
4192                                 _unicode_encode(x,
4193                                         encoding=_encodings['fs'],
4194                                         errors='strict')
4195                 except UnicodeEncodeError:
4196                         pass
4197                 else:
4198                         os = portage.os
4199
4200         root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
4201         id_strings = {}
4202         maxval = len(contents)
4203         curval = 0
4204         if onProgress:
4205                 onProgress(maxval, 0)
4206         paths = list(contents)
4207         paths.sort()
4208         for path in paths:
4209                 curval += 1
4210                 try:
4211                         lst = os.lstat(path)
4212                 except OSError as e:
4213                         if e.errno != errno.ENOENT:
4214                                 raise
4215                         del e
4216                         if onProgress:
4217                                 onProgress(maxval, curval)
4218                         continue
4219                 contents_type = contents[path][0]
4220                 if path.startswith(root):
4221                         arcname = path[len(root):]
4222                 else:
4223                         raise ValueError("invalid root argument: '%s'" % root)
4224                 live_path = path
4225                 if 'dir' == contents_type and \
4226                         not stat.S_ISDIR(lst.st_mode) and \
4227                         os.path.isdir(live_path):
4228                         # Even though this was a directory in the original ${D}, it exists
4229                         # as a symlink to a directory in the live filesystem.  It must be
4230                         # recorded as a real directory in the tar file to ensure that tar
4231                         # can properly extract it's children.
4232                         live_path = os.path.realpath(live_path)
4233                 tarinfo = tar.gettarinfo(live_path, arcname)
4234
4235                 if stat.S_ISREG(lst.st_mode):
4236                         if protect and protect(path):
4237                                 # Create an empty file as a place holder in order to avoid
4238                                 # potential collision-protect issues.
4239                                 f = tempfile.TemporaryFile()
4240                                 f.write(_unicode_encode(
4241                                         "# empty file because --include-config=n " + \
4242                                         "when `quickpkg` was used\n"))
4243                                 f.flush()
4244                                 f.seek(0)
4245                                 tarinfo.size = os.fstat(f.fileno()).st_size
4246                                 tar.addfile(tarinfo, f)
4247                                 f.close()
4248                         else:
4249                                 f = open(_unicode_encode(path,
4250                                         encoding=object.__getattribute__(os, '_encoding'),
4251                                         errors='strict'), 'rb')
4252                                 try:
4253                                         tar.addfile(tarinfo, f)
4254                                 finally:
4255                                         f.close()
4256                 else:
4257                         tar.addfile(tarinfo)
4258                 if onProgress:
4259                         onProgress(maxval, curval)