aff4d86c25bfeb431f63d70944f61bffaf0ca086
[portage.git] / pym / portage / dbapi / vartree.py
1 # Copyright 1998-2010 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = [
5         "vardbapi", "vartree", "dblink"] + \
6         ["write_contents", "tar_contents"]
7
8 import portage
9 portage.proxy.lazyimport.lazyimport(globals(),
10         'portage.checksum:_perform_md5_merge@perform_md5',
11         'portage.data:portage_gid,portage_uid,secpass',
12         'portage.dbapi.dep_expand:dep_expand',
13         'portage.dep:dep_getkey,isjustname,match_from_list,' + \
14                 'use_reduce,_slot_re',
15         'portage.elog:elog_process',
16         'portage.locks:lockdir,unlockdir',
17         'portage.output:bold,colorize',
18         'portage.package.ebuild.doebuild:doebuild_environment,' + \
19                 '_spawn_phase',
20         'portage.package.ebuild.prepare_build_dirs:prepare_build_dirs',
21         'portage.update:fixdbentries',
22         'portage.util:apply_secpass_permissions,ConfigProtect,ensure_dirs,' + \
23                 'writemsg,writemsg_level,write_atomic,atomic_ofstream,writedict,' + \
24                 'grabdict,normalize_path,new_protect_filename',
25         'portage.util.digraph:digraph',
26         'portage.util.env_update:env_update',
27         'portage.util.listdir:dircache,listdir',
28         'portage.util._dyn_libs.PreservedLibsRegistry:PreservedLibsRegistry',
29         'portage.util._dyn_libs.LinkageMapELF:LinkageMapELF@LinkageMap',
30         'portage.versions:best,catpkgsplit,catsplit,cpv_getkey,pkgcmp,' + \
31                 '_pkgsplit@pkgsplit',
32 )
33
34 from portage.const import CACHE_PATH, CONFIG_MEMORY_FILE, \
35         PORTAGE_PACKAGE_ATOM, PRIVATE_PATH, VDB_PATH
36 from portage.const import _ENABLE_DYN_LINK_MAP, _ENABLE_PRESERVE_LIBS
37 from portage.dbapi import dbapi
38 from portage.exception import CommandNotFound, \
39         InvalidData, InvalidPackageName, \
40         FileNotFound, PermissionDenied, UnsupportedAPIException
41 from portage.localization import _
42 from portage.util.movefile import movefile
43
44 from portage import abssymlink, _movefile, bsd_chflags
45
46 # This is a special version of the os module, wrapped for unicode support.
47 from portage import os
48 from portage import _encodings
49 from portage import _os_merge
50 from portage import _selinux_merge
51 from portage import _unicode_decode
52 from portage import _unicode_encode
53
54 from _emerge.AsynchronousLock import AsynchronousLock
55 from _emerge.EbuildBuildDir import EbuildBuildDir
56 from _emerge.PollScheduler import PollScheduler
57 from _emerge.MiscFunctionsProcess import MiscFunctionsProcess
58
59 import codecs
60 import gc
61 import re, shutil, stat, errno, subprocess
62 import logging
63 import os as _os
64 import stat
65 import sys
66 import tempfile
67 import time
68 import warnings
69
70 try:
71         import cPickle as pickle
72 except ImportError:
73         import pickle
74
75 if sys.hexversion >= 0x3000000:
76         basestring = str
77         long = int
78
79 class vardbapi(dbapi):
80
81         _excluded_dirs = ["CVS", "lost+found"]
82         _excluded_dirs = [re.escape(x) for x in _excluded_dirs]
83         _excluded_dirs = re.compile(r'^(\..*|-MERGING-.*|' + \
84                 "|".join(_excluded_dirs) + r')$')
85
86         _aux_cache_version        = "1"
87         _owners_cache_version     = "1"
88
89         # Number of uncached packages to trigger cache update, since
90         # it's wasteful to update it for every vdb change.
91         _aux_cache_threshold = 5
92
93         _aux_cache_keys_re = re.compile(r'^NEEDED\..*$')
94         _aux_multi_line_re = re.compile(r'^(CONTENTS|NEEDED\..*)$')
95
96         def __init__(self, _unused_param=None, categories=None, settings=None, vartree=None):
97                 """
98                 The categories parameter is unused since the dbapi class
99                 now has a categories property that is generated from the
100                 available packages.
101                 """
102
103                 # Used by emerge to check whether any packages
104                 # have been added or removed.
105                 self._pkgs_changed = False
106
107                 #cache for category directory mtimes
108                 self.mtdircache = {}
109
110                 #cache for dependency checks
111                 self.matchcache = {}
112
113                 #cache for cp_list results
114                 self.cpcache = {}
115
116                 self.blockers = None
117                 if settings is None:
118                         settings = portage.settings
119                 self.settings = settings
120                 self.root = settings['ROOT']
121
122                 if _unused_param is not None and _unused_param != self.root:
123                         warnings.warn("The first parameter of the " + \
124                                 "portage.dbapi.vartree.vardbapi" + \
125                                 " constructor is now unused. Use " + \
126                                 "settings['ROOT'] instead.",
127                                 DeprecationWarning, stacklevel=2)
128
129                 self._eroot = settings['EROOT']
130                 if vartree is None:
131                         vartree = portage.db[self.root]["vartree"]
132                 self.vartree = vartree
133                 self._aux_cache_keys = set(
134                         ["BUILD_TIME", "CHOST", "COUNTER", "DEPEND", "DESCRIPTION",
135                         "EAPI", "HOMEPAGE", "IUSE", "KEYWORDS",
136                         "LICENSE", "PDEPEND", "PROPERTIES", "PROVIDE", "RDEPEND",
137                         "repository", "RESTRICT" , "SLOT", "USE", "DEFINED_PHASES",
138                         "REQUIRED_USE"])
139                 self._aux_cache_obj = None
140                 self._aux_cache_filename = os.path.join(self._eroot,
141                         CACHE_PATH, "vdb_metadata.pickle")
142                 self._counter_path = os.path.join(self._eroot,
143                         CACHE_PATH, "counter")
144
145                 self._plib_registry = None
146                 if _ENABLE_PRESERVE_LIBS:
147                         try:
148                                 self._plib_registry = PreservedLibsRegistry(self._eroot,
149                                         os.path.join(self._eroot, PRIVATE_PATH,
150                                         "preserved_libs_registry"))
151                         except PermissionDenied:
152                                 # apparently this user isn't allowed to access PRIVATE_PATH
153                                 pass
154
155                 self._linkmap = None
156                 if _ENABLE_DYN_LINK_MAP:
157                         self._linkmap = LinkageMap(self)
158                 self._owners = self._owners_db(self)
159
160         def getpath(self, mykey, filename=None):
161                 # This is an optimized hotspot, so don't use unicode-wrapped
162                 # os module and don't use os.path.join().
163                 rValue = self._eroot + VDB_PATH + _os.sep + mykey
164                 if filename is not None:
165                         # If filename is always relative, we can do just
166                         # rValue += _os.sep + filename
167                         rValue = _os.path.join(rValue, filename)
168                 return rValue
169
170         def _bump_mtime(self, cpv):
171                 """
172                 This is called before an after any modifications, so that consumers
173                 can use directory mtimes to validate caches. See bug #290428.
174                 """
175                 base = self._eroot + VDB_PATH
176                 cat = catsplit(cpv)[0]
177                 catdir = base + _os.sep + cat
178                 t = time.time()
179                 t = (t, t)
180                 try:
181                         for x in (catdir, base):
182                                 os.utime(x, t)
183                 except OSError:
184                         ensure_dirs(catdir)
185
186         def cpv_exists(self, mykey):
187                 "Tells us whether an actual ebuild exists on disk (no masking)"
188                 return os.path.exists(self.getpath(mykey))
189
190         def cpv_counter(self, mycpv):
191                 "This method will grab the COUNTER. Returns a counter value."
192                 try:
193                         return long(self.aux_get(mycpv, ["COUNTER"])[0])
194                 except (KeyError, ValueError):
195                         pass
196                 writemsg_level(_("portage: COUNTER for %s was corrupted; " \
197                         "resetting to value of 0\n") % (mycpv,),
198                         level=logging.ERROR, noiselevel=-1)
199                 return 0
200
201         def cpv_inject(self, mycpv):
202                 "injects a real package into our on-disk database; assumes mycpv is valid and doesn't already exist"
203                 ensure_dirs(self.getpath(mycpv))
204                 counter = self.counter_tick(mycpv=mycpv)
205                 # write local package counter so that emerge clean does the right thing
206                 write_atomic(self.getpath(mycpv, filename="COUNTER"), str(counter))
207
208         def isInjected(self, mycpv):
209                 if self.cpv_exists(mycpv):
210                         if os.path.exists(self.getpath(mycpv, filename="INJECTED")):
211                                 return True
212                         if not os.path.exists(self.getpath(mycpv, filename="CONTENTS")):
213                                 return True
214                 return False
215
216         def move_ent(self, mylist, repo_match=None):
217                 origcp = mylist[1]
218                 newcp = mylist[2]
219
220                 # sanity check
221                 for atom in (origcp, newcp):
222                         if not isjustname(atom):
223                                 raise InvalidPackageName(str(atom))
224                 origmatches = self.match(origcp, use_cache=0)
225                 moves = 0
226                 if not origmatches:
227                         return moves
228                 for mycpv in origmatches:
229                         mycpv_cp = cpv_getkey(mycpv)
230                         if mycpv_cp != origcp:
231                                 # Ignore PROVIDE virtual match.
232                                 continue
233                         if repo_match is not None \
234                                 and not repo_match(self.aux_get(mycpv, ['repository'])[0]):
235                                 continue
236                         mynewcpv = mycpv.replace(mycpv_cp, str(newcp), 1)
237                         mynewcat = catsplit(newcp)[0]
238                         origpath = self.getpath(mycpv)
239                         if not os.path.exists(origpath):
240                                 continue
241                         moves += 1
242                         if not os.path.exists(self.getpath(mynewcat)):
243                                 #create the directory
244                                 ensure_dirs(self.getpath(mynewcat))
245                         newpath = self.getpath(mynewcpv)
246                         if os.path.exists(newpath):
247                                 #dest already exists; keep this puppy where it is.
248                                 continue
249                         _movefile(origpath, newpath, mysettings=self.settings)
250                         self._clear_pkg_cache(self._dblink(mycpv))
251                         self._clear_pkg_cache(self._dblink(mynewcpv))
252
253                         # We need to rename the ebuild now.
254                         old_pf = catsplit(mycpv)[1]
255                         new_pf = catsplit(mynewcpv)[1]
256                         if new_pf != old_pf:
257                                 try:
258                                         os.rename(os.path.join(newpath, old_pf + ".ebuild"),
259                                                 os.path.join(newpath, new_pf + ".ebuild"))
260                                 except EnvironmentError as e:
261                                         if e.errno != errno.ENOENT:
262                                                 raise
263                                         del e
264                         write_atomic(os.path.join(newpath, "PF"), new_pf+"\n")
265                         write_atomic(os.path.join(newpath, "CATEGORY"), mynewcat+"\n")
266                         fixdbentries([mylist], newpath)
267                 return moves
268
269         def cp_list(self, mycp, use_cache=1):
270                 mysplit=catsplit(mycp)
271                 if mysplit[0] == '*':
272                         mysplit[0] = mysplit[0][1:]
273                 try:
274                         mystat = os.stat(self.getpath(mysplit[0])).st_mtime
275                 except OSError:
276                         mystat = 0
277                 if use_cache and mycp in self.cpcache:
278                         cpc = self.cpcache[mycp]
279                         if cpc[0] == mystat:
280                                 return cpc[1][:]
281                 cat_dir = self.getpath(mysplit[0])
282                 try:
283                         dir_list = os.listdir(cat_dir)
284                 except EnvironmentError as e:
285                         if e.errno == PermissionDenied.errno:
286                                 raise PermissionDenied(cat_dir)
287                         del e
288                         dir_list = []
289
290                 returnme = []
291                 for x in dir_list:
292                         if self._excluded_dirs.match(x) is not None:
293                                 continue
294                         ps = pkgsplit(x)
295                         if not ps:
296                                 self.invalidentry(os.path.join(self.getpath(mysplit[0]), x))
297                                 continue
298                         if len(mysplit) > 1:
299                                 if ps[0] == mysplit[1]:
300                                         returnme.append(mysplit[0]+"/"+x)
301                 self._cpv_sort_ascending(returnme)
302                 if use_cache:
303                         self.cpcache[mycp] = [mystat, returnme[:]]
304                 elif mycp in self.cpcache:
305                         del self.cpcache[mycp]
306                 return returnme
307
308         def cpv_all(self, use_cache=1):
309                 """
310                 Set use_cache=0 to bypass the portage.cachedir() cache in cases
311                 when the accuracy of mtime staleness checks should not be trusted
312                 (generally this is only necessary in critical sections that
313                 involve merge or unmerge of packages).
314                 """
315                 returnme = []
316                 basepath = os.path.join(self._eroot, VDB_PATH) + os.path.sep
317
318                 if use_cache:
319                         from portage import listdir
320                 else:
321                         def listdir(p, **kwargs):
322                                 try:
323                                         return [x for x in os.listdir(p) \
324                                                 if os.path.isdir(os.path.join(p, x))]
325                                 except EnvironmentError as e:
326                                         if e.errno == PermissionDenied.errno:
327                                                 raise PermissionDenied(p)
328                                         del e
329                                         return []
330
331                 for x in listdir(basepath, EmptyOnError=1, ignorecvs=1, dirsonly=1):
332                         if self._excluded_dirs.match(x) is not None:
333                                 continue
334                         if not self._category_re.match(x):
335                                 continue
336                         for y in listdir(basepath + x, EmptyOnError=1, dirsonly=1):
337                                 if self._excluded_dirs.match(y) is not None:
338                                         continue
339                                 subpath = x + "/" + y
340                                 # -MERGING- should never be a cpv, nor should files.
341                                 try:
342                                         if catpkgsplit(subpath) is None:
343                                                 self.invalidentry(self.getpath(subpath))
344                                                 continue
345                                 except InvalidData:
346                                         self.invalidentry(self.getpath(subpath))
347                                         continue
348                                 returnme.append(subpath)
349
350                 return returnme
351
352         def cp_all(self, use_cache=1):
353                 mylist = self.cpv_all(use_cache=use_cache)
354                 d={}
355                 for y in mylist:
356                         if y[0] == '*':
357                                 y = y[1:]
358                         try:
359                                 mysplit = catpkgsplit(y)
360                         except InvalidData:
361                                 self.invalidentry(self.getpath(y))
362                                 continue
363                         if not mysplit:
364                                 self.invalidentry(self.getpath(y))
365                                 continue
366                         d[mysplit[0]+"/"+mysplit[1]] = None
367                 return list(d)
368
369         def checkblockers(self, origdep):
370                 pass
371
372         def _clear_cache(self):
373                 self.mtdircache.clear()
374                 self.matchcache.clear()
375                 self.cpcache.clear()
376                 self._aux_cache_obj = None
377
378         def _add(self, pkg_dblink):
379                 self._pkgs_changed = True
380                 self._clear_pkg_cache(pkg_dblink)
381
382         def _remove(self, pkg_dblink):
383                 self._pkgs_changed = True
384                 self._clear_pkg_cache(pkg_dblink)
385
386         def _clear_pkg_cache(self, pkg_dblink):
387                 # Due to 1 second mtime granularity in <python-2.5, mtime checks
388                 # are not always sufficient to invalidate vardbapi caches. Therefore,
389                 # the caches need to be actively invalidated here.
390                 self.mtdircache.pop(pkg_dblink.cat, None)
391                 self.matchcache.pop(pkg_dblink.cat, None)
392                 self.cpcache.pop(pkg_dblink.mysplit[0], None)
393                 dircache.pop(pkg_dblink.dbcatdir, None)
394
395         def match(self, origdep, use_cache=1):
396                 "caching match function"
397                 mydep = dep_expand(
398                         origdep, mydb=self, use_cache=use_cache, settings=self.settings)
399                 mykey = dep_getkey(mydep)
400                 mycat = catsplit(mykey)[0]
401                 if not use_cache:
402                         if mycat in self.matchcache:
403                                 del self.mtdircache[mycat]
404                                 del self.matchcache[mycat]
405                         return list(self._iter_match(mydep,
406                                 self.cp_list(mydep.cp, use_cache=use_cache)))
407                 try:
408                         curmtime = os.stat(os.path.join(self._eroot, VDB_PATH, mycat)).st_mtime
409                 except (IOError, OSError):
410                         curmtime=0
411
412                 if mycat not in self.matchcache or \
413                         self.mtdircache[mycat] != curmtime:
414                         # clear cache entry
415                         self.mtdircache[mycat] = curmtime
416                         self.matchcache[mycat] = {}
417                 if mydep not in self.matchcache[mycat]:
418                         mymatch = list(self._iter_match(mydep,
419                                 self.cp_list(mydep.cp, use_cache=use_cache)))
420                         self.matchcache[mycat][mydep] = mymatch
421                 return self.matchcache[mycat][mydep][:]
422
423         def findname(self, mycpv):
424                 return self.getpath(str(mycpv), filename=catsplit(mycpv)[1]+".ebuild")
425
426         def flush_cache(self):
427                 """If the current user has permission and the internal aux_get cache has
428                 been updated, save it to disk and mark it unmodified.  This is called
429                 by emerge after it has loaded the full vdb for use in dependency
430                 calculations.  Currently, the cache is only written if the user has
431                 superuser privileges (since that's required to obtain a lock), but all
432                 users have read access and benefit from faster metadata lookups (as
433                 long as at least part of the cache is still valid)."""
434                 if self._aux_cache is not None and \
435                         len(self._aux_cache["modified"]) >= self._aux_cache_threshold and \
436                         secpass >= 2:
437                         self._owners.populate() # index any unindexed contents
438                         valid_nodes = set(self.cpv_all())
439                         for cpv in list(self._aux_cache["packages"]):
440                                 if cpv not in valid_nodes:
441                                         del self._aux_cache["packages"][cpv]
442                         del self._aux_cache["modified"]
443                         try:
444                                 f = atomic_ofstream(self._aux_cache_filename, 'wb')
445                                 pickle.dump(self._aux_cache, f, protocol=2)
446                                 f.close()
447                                 apply_secpass_permissions(
448                                         self._aux_cache_filename, gid=portage_gid, mode=0o644)
449                         except (IOError, OSError) as e:
450                                 pass
451                         self._aux_cache["modified"] = set()
452
453         @property
454         def _aux_cache(self):
455                 if self._aux_cache_obj is None:
456                         self._aux_cache_init()
457                 return self._aux_cache_obj
458
459         def _aux_cache_init(self):
460                 aux_cache = None
461                 open_kwargs = {}
462                 if sys.hexversion >= 0x3000000:
463                         # Buffered io triggers extreme performance issues in
464                         # Unpickler.load() (problem observed with python-3.0.1).
465                         # Unfortunately, performance is still poor relative to
466                         # python-2.x, but buffering makes it much worse.
467                         open_kwargs["buffering"] = 0
468                 try:
469                         f = open(_unicode_encode(self._aux_cache_filename,
470                                 encoding=_encodings['fs'], errors='strict'),
471                                 mode='rb', **open_kwargs)
472                         mypickle = pickle.Unpickler(f)
473                         try:
474                                 mypickle.find_global = None
475                         except AttributeError:
476                                 # TODO: If py3k, override Unpickler.find_class().
477                                 pass
478                         aux_cache = mypickle.load()
479                         f.close()
480                         del f
481                 except (IOError, OSError, EOFError, ValueError, pickle.UnpicklingError) as e:
482                         if isinstance(e, pickle.UnpicklingError):
483                                 writemsg(_("!!! Error loading '%s': %s\n") % \
484                                         (self._aux_cache_filename, str(e)), noiselevel=-1)
485                         del e
486
487                 if not aux_cache or \
488                         not isinstance(aux_cache, dict) or \
489                         aux_cache.get("version") != self._aux_cache_version or \
490                         not aux_cache.get("packages"):
491                         aux_cache = {"version": self._aux_cache_version}
492                         aux_cache["packages"] = {}
493
494                 owners = aux_cache.get("owners")
495                 if owners is not None:
496                         if not isinstance(owners, dict):
497                                 owners = None
498                         elif "version" not in owners:
499                                 owners = None
500                         elif owners["version"] != self._owners_cache_version:
501                                 owners = None
502                         elif "base_names" not in owners:
503                                 owners = None
504                         elif not isinstance(owners["base_names"], dict):
505                                 owners = None
506
507                 if owners is None:
508                         owners = {
509                                 "base_names" : {},
510                                 "version"    : self._owners_cache_version
511                         }
512                         aux_cache["owners"] = owners
513
514                 aux_cache["modified"] = set()
515                 self._aux_cache_obj = aux_cache
516
517         def aux_get(self, mycpv, wants):
518                 """This automatically caches selected keys that are frequently needed
519                 by emerge for dependency calculations.  The cached metadata is
520                 considered valid if the mtime of the package directory has not changed
521                 since the data was cached.  The cache is stored in a pickled dict
522                 object with the following format:
523
524                 {version:"1", "packages":{cpv1:(mtime,{k1,v1, k2,v2, ...}), cpv2...}}
525
526                 If an error occurs while loading the cache pickle or the version is
527                 unrecognized, the cache will simple be recreated from scratch (it is
528                 completely disposable).
529                 """
530                 cache_these_wants = self._aux_cache_keys.intersection(wants)
531                 for x in wants:
532                         if self._aux_cache_keys_re.match(x) is not None:
533                                 cache_these_wants.add(x)
534
535                 if not cache_these_wants:
536                         return self._aux_get(mycpv, wants)
537
538                 cache_these = set(self._aux_cache_keys)
539                 cache_these.update(cache_these_wants)
540
541                 mydir = self.getpath(mycpv)
542                 mydir_stat = None
543                 try:
544                         mydir_stat = os.stat(mydir)
545                 except OSError as e:
546                         if e.errno != errno.ENOENT:
547                                 raise
548                         raise KeyError(mycpv)
549                 mydir_mtime = mydir_stat[stat.ST_MTIME]
550                 pkg_data = self._aux_cache["packages"].get(mycpv)
551                 pull_me = cache_these.union(wants)
552                 mydata = {"_mtime_" : mydir_mtime}
553                 cache_valid = False
554                 cache_incomplete = False
555                 cache_mtime = None
556                 metadata = None
557                 if pkg_data is not None:
558                         if not isinstance(pkg_data, tuple) or len(pkg_data) != 2:
559                                 pkg_data = None
560                         else:
561                                 cache_mtime, metadata = pkg_data
562                                 if not isinstance(cache_mtime, (long, int)) or \
563                                         not isinstance(metadata, dict):
564                                         pkg_data = None
565
566                 if pkg_data:
567                         cache_mtime, metadata = pkg_data
568                         cache_valid = cache_mtime == mydir_mtime
569                 if cache_valid:
570                         # Migrate old metadata to unicode.
571                         for k, v in metadata.items():
572                                 metadata[k] = _unicode_decode(v,
573                                         encoding=_encodings['repo.content'], errors='replace')
574
575                         mydata.update(metadata)
576                         pull_me.difference_update(mydata)
577
578                 if pull_me:
579                         # pull any needed data and cache it
580                         aux_keys = list(pull_me)
581                         for k, v in zip(aux_keys,
582                                 self._aux_get(mycpv, aux_keys, st=mydir_stat)):
583                                 mydata[k] = v
584                         if not cache_valid or cache_these.difference(metadata):
585                                 cache_data = {}
586                                 if cache_valid and metadata:
587                                         cache_data.update(metadata)
588                                 for aux_key in cache_these:
589                                         cache_data[aux_key] = mydata[aux_key]
590                                 self._aux_cache["packages"][mycpv] = (mydir_mtime, cache_data)
591                                 self._aux_cache["modified"].add(mycpv)
592
593                 if _slot_re.match(mydata['SLOT']) is None:
594                         # Empty or invalid slot triggers InvalidAtom exceptions when
595                         # generating slot atoms for packages, so translate it to '0' here.
596                         mydata['SLOT'] = _unicode_decode('0')
597
598                 return [mydata[x] for x in wants]
599
600         def _aux_get(self, mycpv, wants, st=None):
601                 mydir = self.getpath(mycpv)
602                 if st is None:
603                         try:
604                                 st = os.stat(mydir)
605                         except OSError as e:
606                                 if e.errno == errno.ENOENT:
607                                         raise KeyError(mycpv)
608                                 elif e.errno == PermissionDenied.errno:
609                                         raise PermissionDenied(mydir)
610                                 else:
611                                         raise
612                 if not stat.S_ISDIR(st.st_mode):
613                         raise KeyError(mycpv)
614                 results = []
615                 for x in wants:
616                         if x == "_mtime_":
617                                 results.append(st[stat.ST_MTIME])
618                                 continue
619                         try:
620                                 myf = codecs.open(
621                                         _unicode_encode(os.path.join(mydir, x),
622                                         encoding=_encodings['fs'], errors='strict'),
623                                         mode='r', encoding=_encodings['repo.content'],
624                                         errors='replace')
625                                 try:
626                                         myd = myf.read()
627                                 finally:
628                                         myf.close()
629                                 # Preserve \n for metadata that is known to
630                                 # contain multiple lines.
631                                 if self._aux_multi_line_re.match(x) is None:
632                                         myd = " ".join(myd.split())
633                         except IOError:
634                                 myd = _unicode_decode('')
635                         if x == "EAPI" and not myd:
636                                 results.append(_unicode_decode('0'))
637                         else:
638                                 results.append(myd)
639                 return results
640
641         def aux_update(self, cpv, values):
642                 mylink = self._dblink(cpv)
643                 if not mylink.exists():
644                         raise KeyError(cpv)
645                 self._bump_mtime(cpv)
646                 self._clear_pkg_cache(mylink)
647                 for k, v in values.items():
648                         if v:
649                                 mylink.setfile(k, v)
650                         else:
651                                 try:
652                                         os.unlink(os.path.join(self.getpath(cpv), k))
653                                 except EnvironmentError:
654                                         pass
655                 self._bump_mtime(cpv)
656
657         def counter_tick(self, myroot=None, mycpv=None):
658                 """
659                 @param myroot: ignored, self._eroot is used instead
660                 """
661                 return self.counter_tick_core(incrementing=1, mycpv=mycpv)
662
663         def get_counter_tick_core(self, myroot=None, mycpv=None):
664                 """
665                 Use this method to retrieve the counter instead
666                 of having to trust the value of a global counter
667                 file that can lead to invalid COUNTER
668                 generation. When cache is valid, the package COUNTER
669                 files are not read and we rely on the timestamp of
670                 the package directory to validate cache. The stat
671                 calls should only take a short time, so performance
672                 is sufficient without having to rely on a potentially
673                 corrupt global counter file.
674
675                 The global counter file located at
676                 $CACHE_PATH/counter serves to record the
677                 counter of the last installed package and
678                 it also corresponds to the total number of
679                 installation actions that have occurred in
680                 the history of this package database.
681
682                 @param myroot: ignored, self._eroot is used instead
683                 """
684                 myroot = None
685                 cp_list = self.cp_list
686                 max_counter = 0
687                 for cp in self.cp_all():
688                         for cpv in cp_list(cp):
689                                 try:
690                                         counter = int(self.aux_get(cpv, ["COUNTER"])[0])
691                                 except (KeyError, OverflowError, ValueError):
692                                         continue
693                                 if counter > max_counter:
694                                         max_counter = counter
695
696                 new_vdb = False
697                 counter = -1
698                 try:
699                         cfile = codecs.open(
700                                 _unicode_encode(self._counter_path,
701                                 encoding=_encodings['fs'], errors='strict'),
702                                 mode='r', encoding=_encodings['repo.content'],
703                                 errors='replace')
704                 except EnvironmentError as e:
705                         new_vdb = not bool(self.cpv_all())
706                         if not new_vdb:
707                                 writemsg(_("!!! Unable to read COUNTER file: '%s'\n") % \
708                                         self._counter_path, noiselevel=-1)
709                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
710                         del e
711                 else:
712                         try:
713                                 try:
714                                         counter = long(cfile.readline().strip())
715                                 finally:
716                                         cfile.close()
717                         except (OverflowError, ValueError) as e:
718                                 writemsg(_("!!! COUNTER file is corrupt: '%s'\n") % \
719                                         self._counter_path, noiselevel=-1)
720                                 writemsg("!!! %s\n" % str(e), noiselevel=-1)
721                                 del e
722
723                 # We must ensure that we return a counter
724                 # value that is at least as large as the
725                 # highest one from the installed packages,
726                 # since having a corrupt value that is too low
727                 # can trigger incorrect AUTOCLEAN behavior due
728                 # to newly installed packages having lower
729                 # COUNTERs than the previous version in the
730                 # same slot.
731                 if counter > max_counter:
732                         max_counter = counter
733
734                 if counter < 0 and not new_vdb:
735                         writemsg(_("!!! Initializing COUNTER to " \
736                                 "value of %d\n") % max_counter, noiselevel=-1)
737
738                 return max_counter + 1
739
740         def counter_tick_core(self, myroot=None, incrementing=1, mycpv=None):
741                 """
742                 This method will grab the next COUNTER value and record it back
743                 to the global file.  Returns new counter value.
744
745                 @param myroot: ignored, self._eroot is used instead
746                 """
747                 myroot = None
748                 counter = self.get_counter_tick_core(mycpv=mycpv) - 1
749                 if incrementing:
750                         #increment counter
751                         counter += 1
752                         # use same permissions as config._init_dirs()
753                         ensure_dirs(os.path.dirname(self._counter_path),
754                                 gid=portage_gid, mode=0o2750, mask=0o2)
755                         # update new global counter file
756                         write_atomic(self._counter_path, str(counter))
757                 return counter
758
759         def _dblink(self, cpv):
760                 category, pf = catsplit(cpv)
761                 return dblink(category, pf, settings=self.settings,
762                         vartree=self.vartree, treetype="vartree")
763
764         def removeFromContents(self, pkg, paths, relative_paths=True):
765                 """
766                 @param pkg: cpv for an installed package
767                 @type pkg: string
768                 @param paths: paths of files to remove from contents
769                 @type paths: iterable
770                 """
771                 if not hasattr(pkg, "getcontents"):
772                         pkg = self._dblink(pkg)
773                 root = self._eroot
774                 root_len = len(root) - 1
775                 new_contents = pkg.getcontents().copy()
776                 removed = 0
777
778                 for filename in paths:
779                         filename = _unicode_decode(filename,
780                                 encoding=_encodings['content'], errors='strict')
781                         filename = normalize_path(filename)
782                         if relative_paths:
783                                 relative_filename = filename
784                         else:
785                                 relative_filename = filename[root_len:]
786                         contents_key = pkg._match_contents(relative_filename)
787                         if contents_key:
788                                 del new_contents[contents_key]
789                                 removed += 1
790
791                 if removed:
792                         self._bump_mtime(pkg.mycpv)
793                         f = atomic_ofstream(os.path.join(pkg.dbdir, "CONTENTS"))
794                         write_contents(new_contents, root, f)
795                         f.close()
796                         self._bump_mtime(pkg.mycpv)
797                         pkg._clear_contents_cache()
798
799         class _owners_cache(object):
800                 """
801                 This class maintains an hash table that serves to index package
802                 contents by mapping the basename of file to a list of possible
803                 packages that own it. This is used to optimize owner lookups
804                 by narrowing the search down to a smaller number of packages.
805                 """
806                 try:
807                         from hashlib import md5 as _new_hash
808                 except ImportError:
809                         from md5 import new as _new_hash
810
811                 _hash_bits = 16
812                 _hex_chars = int(_hash_bits / 4)
813
814                 def __init__(self, vardb):
815                         self._vardb = vardb
816
817                 def add(self, cpv):
818                         root_len = len(self._vardb._eroot)
819                         contents = self._vardb._dblink(cpv).getcontents()
820                         pkg_hash = self._hash_pkg(cpv)
821                         if not contents:
822                                 # Empty path is a code used to represent empty contents.
823                                 self._add_path("", pkg_hash)
824                         for x in contents:
825                                 self._add_path(x[root_len:], pkg_hash)
826                         self._vardb._aux_cache["modified"].add(cpv)
827
828                 def _add_path(self, path, pkg_hash):
829                         """
830                         Empty path is a code that represents empty contents.
831                         """
832                         if path:
833                                 name = os.path.basename(path.rstrip(os.path.sep))
834                                 if not name:
835                                         return
836                         else:
837                                 name = path
838                         name_hash = self._hash_str(name)
839                         base_names = self._vardb._aux_cache["owners"]["base_names"]
840                         pkgs = base_names.get(name_hash)
841                         if pkgs is None:
842                                 pkgs = {}
843                                 base_names[name_hash] = pkgs
844                         pkgs[pkg_hash] = None
845
846                 def _hash_str(self, s):
847                         h = self._new_hash()
848                         # Always use a constant utf_8 encoding here, since
849                         # the "default" encoding can change.
850                         h.update(_unicode_encode(s,
851                                 encoding=_encodings['repo.content'],
852                                 errors='backslashreplace'))
853                         h = h.hexdigest()
854                         h = h[-self._hex_chars:]
855                         h = int(h, 16)
856                         return h
857
858                 def _hash_pkg(self, cpv):
859                         counter, mtime = self._vardb.aux_get(
860                                 cpv, ["COUNTER", "_mtime_"])
861                         try:
862                                 counter = int(counter)
863                         except ValueError:
864                                 counter = 0
865                         return (cpv, counter, mtime)
866
867         class _owners_db(object):
868
869                 def __init__(self, vardb):
870                         self._vardb = vardb
871
872                 def populate(self):
873                         self._populate()
874
875                 def _populate(self, scheduler=None):
876                         owners_cache = vardbapi._owners_cache(self._vardb)
877                         cached_hashes = set()
878                         base_names = self._vardb._aux_cache["owners"]["base_names"]
879
880                         # Take inventory of all cached package hashes.
881                         for name, hash_values in list(base_names.items()):
882                                 if not isinstance(hash_values, dict):
883                                         del base_names[name]
884                                         continue
885                                 cached_hashes.update(hash_values)
886
887                         # Create sets of valid package hashes and uncached packages.
888                         uncached_pkgs = set()
889                         hash_pkg = owners_cache._hash_pkg
890                         valid_pkg_hashes = set()
891                         for cpv in self._vardb.cpv_all():
892                                 hash_value = hash_pkg(cpv)
893                                 valid_pkg_hashes.add(hash_value)
894                                 if hash_value not in cached_hashes:
895                                         uncached_pkgs.add(cpv)
896
897                         # Cache any missing packages.
898                         for cpv in uncached_pkgs:
899
900                                 if scheduler is not None:
901                                         scheduler.scheduleYield()
902
903                                 owners_cache.add(cpv)
904
905                         # Delete any stale cache.
906                         stale_hashes = cached_hashes.difference(valid_pkg_hashes)
907                         if stale_hashes:
908                                 for base_name_hash, bucket in list(base_names.items()):
909                                         for hash_value in stale_hashes.intersection(bucket):
910                                                 del bucket[hash_value]
911                                         if not bucket:
912                                                 del base_names[base_name_hash]
913
914                         return owners_cache
915
916                 def get_owners(self, path_iter, scheduler=None):
917                         """
918                         @return the owners as a dblink -> set(files) mapping.
919                         """
920                         owners = {}
921                         for owner, f in self.iter_owners(path_iter, scheduler=scheduler):
922                                 owned_files = owners.get(owner)
923                                 if owned_files is None:
924                                         owned_files = set()
925                                         owners[owner] = owned_files
926                                 owned_files.add(f)
927                         return owners
928
929                 def getFileOwnerMap(self, path_iter):
930                         owners = self.get_owners(path_iter)
931                         file_owners = {}
932                         for pkg_dblink, files in owners.items():
933                                 for f in files:
934                                         owner_set = file_owners.get(f)
935                                         if owner_set is None:
936                                                 owner_set = set()
937                                                 file_owners[f] = owner_set
938                                         owner_set.add(pkg_dblink)
939                         return file_owners
940
941                 def iter_owners(self, path_iter, scheduler=None):
942                         """
943                         Iterate over tuples of (dblink, path). In order to avoid
944                         consuming too many resources for too much time, resources
945                         are only allocated for the duration of a given iter_owners()
946                         call. Therefore, to maximize reuse of resources when searching
947                         for multiple files, it's best to search for them all in a single
948                         call.
949                         """
950
951                         if not isinstance(path_iter, list):
952                                 path_iter = list(path_iter)
953
954                         if len(path_iter) > 10:
955                                 for x in self._iter_owners_low_mem(path_iter,
956                                         scheduler=scheduler):
957                                         yield x
958                                 return
959
960                         owners_cache = self._populate(scheduler=scheduler)
961
962                         vardb = self._vardb
963                         root = vardb._eroot
964                         hash_pkg = owners_cache._hash_pkg
965                         hash_str = owners_cache._hash_str
966                         base_names = self._vardb._aux_cache["owners"]["base_names"]
967
968                         dblink_cache = {}
969
970                         def dblink(cpv):
971                                 x = dblink_cache.get(cpv)
972                                 if x is None:
973                                         if len(dblink_cache) > 20:
974                                                 # Ensure that we don't run out of memory.
975                                                 raise StopIteration()
976                                         x = self._vardb._dblink(cpv)
977                                         dblink_cache[cpv] = x
978                                 return x
979
980                         while path_iter:
981
982                                 path = path_iter.pop()
983                                 is_basename = os.sep != path[:1]
984                                 if is_basename:
985                                         name = path
986                                 else:
987                                         name = os.path.basename(path.rstrip(os.path.sep))
988
989                                 if not name:
990                                         continue
991
992                                 name_hash = hash_str(name)
993                                 pkgs = base_names.get(name_hash)
994                                 owners = []
995                                 if pkgs is not None:
996                                         try:
997                                                 for hash_value in pkgs:
998                                                         if not isinstance(hash_value, tuple) or \
999                                                                 len(hash_value) != 3:
1000                                                                 continue
1001                                                         cpv, counter, mtime = hash_value
1002                                                         if not isinstance(cpv, basestring):
1003                                                                 continue
1004                                                         try:
1005                                                                 current_hash = hash_pkg(cpv)
1006                                                         except KeyError:
1007                                                                 continue
1008
1009                                                         if current_hash != hash_value:
1010                                                                 continue
1011
1012                                                         if is_basename:
1013                                                                 for p in dblink(cpv).getcontents():
1014                                                                         if os.path.basename(p) == name:
1015                                                                                 owners.append((cpv, p[len(root):]))
1016                                                         else:
1017                                                                 if dblink(cpv).isowner(path):
1018                                                                         owners.append((cpv, path))
1019
1020                                                         if scheduler is not None:
1021                                                                 scheduler.scheduleYield()
1022
1023                                         except StopIteration:
1024                                                 path_iter.append(path)
1025                                                 del owners[:]
1026                                                 dblink_cache.clear()
1027                                                 gc.collect()
1028                                                 for x in self._iter_owners_low_mem(path_iter,
1029                                                         scheduler=scheduler):
1030                                                         yield x
1031                                                 return
1032                                         else:
1033                                                 for cpv, p in owners:
1034                                                         yield (dblink(cpv), p)
1035
1036                 def _iter_owners_low_mem(self, path_list, scheduler=None):
1037                         """
1038                         This implemention will make a short-lived dblink instance (and
1039                         parse CONTENTS) for every single installed package. This is
1040                         slower and but uses less memory than the method which uses the
1041                         basename cache.
1042                         """
1043
1044                         if not path_list:
1045                                 return
1046
1047                         path_info_list = []
1048                         for path in path_list:
1049                                 is_basename = os.sep != path[:1]
1050                                 if is_basename:
1051                                         name = path
1052                                 else:
1053                                         name = os.path.basename(path.rstrip(os.path.sep))
1054                                 path_info_list.append((path, name, is_basename))
1055
1056                         root = self._vardb._eroot
1057                         for cpv in self._vardb.cpv_all():
1058
1059                                 if scheduler is not None:
1060                                         scheduler.scheduleYield()
1061
1062                                 dblnk =  self._vardb._dblink(cpv)
1063
1064                                 for path, name, is_basename in path_info_list:
1065                                         if is_basename:
1066                                                 for p in dblnk.getcontents():
1067                                                         if os.path.basename(p) == name:
1068                                                                 yield dblnk, p[len(root):]
1069                                         else:
1070                                                 if dblnk.isowner(path):
1071                                                         yield dblnk, path
1072
1073 class vartree(object):
1074         "this tree will scan a var/db/pkg database located at root (passed to init)"
1075         def __init__(self, root=None, virtual=None, categories=None,
1076                 settings=None):
1077
1078                 if settings is None:
1079                         settings = portage.settings
1080                 self.root = settings['ROOT']
1081
1082                 if root is not None and root != self.root:
1083                         warnings.warn("The 'root' parameter of the " + \
1084                                 "portage.dbapi.vartree.vartree" + \
1085                                 " constructor is now unused. Use " + \
1086                                 "settings['ROOT'] instead.",
1087                                 DeprecationWarning, stacklevel=2)
1088
1089                 self.settings = settings
1090                 self.dbapi = vardbapi(settings=settings, vartree=self)
1091                 self.populated = 1
1092
1093         def getpath(self, mykey, filename=None):
1094                 return self.dbapi.getpath(mykey, filename=filename)
1095
1096         def zap(self, mycpv):
1097                 return
1098
1099         def inject(self, mycpv):
1100                 return
1101
1102         def get_provide(self, mycpv):
1103                 myprovides = []
1104                 mylines = None
1105                 try:
1106                         mylines, myuse = self.dbapi.aux_get(mycpv, ["PROVIDE", "USE"])
1107                         if mylines:
1108                                 myuse = myuse.split()
1109                                 mylines = use_reduce(mylines, uselist=myuse, flat=True)
1110                                 for myprovide in mylines:
1111                                         mys = catpkgsplit(myprovide)
1112                                         if not mys:
1113                                                 mys = myprovide.split("/")
1114                                         myprovides += [mys[0] + "/" + mys[1]]
1115                         return myprovides
1116                 except SystemExit as e:
1117                         raise
1118                 except Exception as e:
1119                         mydir = self.dbapi.getpath(mycpv)
1120                         writemsg(_("\nParse Error reading PROVIDE and USE in '%s'\n") % mydir,
1121                                 noiselevel=-1)
1122                         if mylines:
1123                                 writemsg(_("Possibly Invalid: '%s'\n") % str(mylines),
1124                                         noiselevel=-1)
1125                         writemsg(_("Exception: %s\n\n") % str(e), noiselevel=-1)
1126                         return []
1127
1128         def get_all_provides(self):
1129                 myprovides = {}
1130                 for node in self.getallcpv():
1131                         for mykey in self.get_provide(node):
1132                                 if mykey in myprovides:
1133                                         myprovides[mykey] += [node]
1134                                 else:
1135                                         myprovides[mykey] = [node]
1136                 return myprovides
1137
1138         def dep_bestmatch(self, mydep, use_cache=1):
1139                 "compatibility method -- all matches, not just visible ones"
1140                 #mymatch=best(match(dep_expand(mydep,self.dbapi),self.dbapi))
1141                 mymatch = best(self.dbapi.match(
1142                         dep_expand(mydep, mydb=self.dbapi, settings=self.settings),
1143                         use_cache=use_cache))
1144                 if mymatch is None:
1145                         return ""
1146                 else:
1147                         return mymatch
1148
1149         def dep_match(self, mydep, use_cache=1):
1150                 "compatibility method -- we want to see all matches, not just visible ones"
1151                 #mymatch = match(mydep,self.dbapi)
1152                 mymatch = self.dbapi.match(mydep, use_cache=use_cache)
1153                 if mymatch is None:
1154                         return []
1155                 else:
1156                         return mymatch
1157
1158         def exists_specific(self, cpv):
1159                 return self.dbapi.cpv_exists(cpv)
1160
1161         def getallcpv(self):
1162                 """temporary function, probably to be renamed --- Gets a list of all
1163                 category/package-versions installed on the system."""
1164                 return self.dbapi.cpv_all()
1165
1166         def getallnodes(self):
1167                 """new behavior: these are all *unmasked* nodes.  There may or may not be available
1168                 masked package for nodes in this nodes list."""
1169                 return self.dbapi.cp_all()
1170
1171         def getebuildpath(self, fullpackage):
1172                 cat, package = catsplit(fullpackage)
1173                 return self.getpath(fullpackage, filename=package+".ebuild")
1174
1175         def getslot(self, mycatpkg):
1176                 "Get a slot for a catpkg; assume it exists."
1177                 try:
1178                         return self.dbapi.aux_get(mycatpkg, ["SLOT"])[0]
1179                 except KeyError:
1180                         return ""
1181
1182         def populate(self):
1183                 self.populated=1
1184
1185 class dblink(object):
1186         """
1187         This class provides an interface to the installed package database
1188         At present this is implemented as a text backend in /var/db/pkg.
1189         """
1190
1191         import re
1192         _normalize_needed = re.compile(r'//|^[^/]|./$|(^|/)\.\.?(/|$)')
1193
1194         _contents_re = re.compile(r'^(' + \
1195                 r'(?P<dir>(dev|dir|fif) (.+))|' + \
1196                 r'(?P<obj>(obj) (.+) (\S+) (\d+))|' + \
1197                 r'(?P<sym>(sym) (.+) -> (.+) (\d+))' + \
1198                 r')$'
1199         )
1200
1201         # When looping over files for merge/unmerge, temporarily yield to the
1202         # scheduler each time this many files are processed.
1203         _file_merge_yield_interval = 20
1204
1205         def __init__(self, cat, pkg, myroot=None, settings=None, treetype=None,
1206                 vartree=None, blockers=None, scheduler=None):
1207                 """
1208                 Creates a DBlink object for a given CPV.
1209                 The given CPV may not be present in the database already.
1210                 
1211                 @param cat: Category
1212                 @type cat: String
1213                 @param pkg: Package (PV)
1214                 @type pkg: String
1215                 @param myroot: ignored, settings['ROOT'] is used instead
1216                 @type myroot: String (Path)
1217                 @param settings: Typically portage.settings
1218                 @type settings: portage.config
1219                 @param treetype: one of ['porttree','bintree','vartree']
1220                 @type treetype: String
1221                 @param vartree: an instance of vartree corresponding to myroot.
1222                 @type vartree: vartree
1223                 """
1224
1225                 if settings is None:
1226                         raise TypeError("settings argument is required")
1227
1228                 mysettings = settings
1229                 myroot = settings['ROOT']
1230                 self.cat = cat
1231                 self.pkg = pkg
1232                 self.mycpv = self.cat + "/" + self.pkg
1233                 self.mysplit = list(catpkgsplit(self.mycpv)[1:])
1234                 self.mysplit[0] = "%s/%s" % (self.cat, self.mysplit[0])
1235                 self.treetype = treetype
1236                 if vartree is None:
1237                         vartree = portage.db[myroot]["vartree"]
1238                 self.vartree = vartree
1239                 self._blockers = blockers
1240                 self._scheduler = scheduler
1241
1242                 # WARNING: EROOT support is experimental and may be incomplete
1243                 # for cases in which EPREFIX is non-empty.
1244                 self._eroot = mysettings['EROOT']
1245                 self.dbroot = normalize_path(os.path.join(self._eroot, VDB_PATH))
1246                 self.dbcatdir = self.dbroot+"/"+cat
1247                 self.dbpkgdir = self.dbcatdir+"/"+pkg
1248                 self.dbtmpdir = self.dbcatdir+"/-MERGING-"+pkg
1249                 self.dbdir = self.dbpkgdir
1250
1251                 self._lock_vdb = None
1252
1253                 self.settings = mysettings
1254                 self._verbose = self.settings.get("PORTAGE_VERBOSE") == "1"
1255
1256                 self.myroot=myroot
1257                 self._installed_instance = None
1258                 self.contentscache = None
1259                 self._contents_inodes = None
1260                 self._contents_basenames = None
1261                 self._linkmap_broken = False
1262                 self._md5_merge_map = {}
1263                 self._hash_key = (self.myroot, self.mycpv)
1264                 self._protect_obj = None
1265
1266         def __hash__(self):
1267                 return hash(self._hash_key)
1268
1269         def __eq__(self, other):
1270                 return isinstance(other, dblink) and \
1271                         self._hash_key == other._hash_key
1272
1273         def _get_protect_obj(self):
1274
1275                 if self._protect_obj is None:
1276                         self._protect_obj = ConfigProtect(self._eroot,
1277                         portage.util.shlex_split(
1278                                 self.settings.get("CONFIG_PROTECT", "")),
1279                         portage.util.shlex_split(
1280                                 self.settings.get("CONFIG_PROTECT_MASK", "")))
1281
1282                 return self._protect_obj
1283
1284         def isprotected(self, obj):
1285                 return self._get_protect_obj().isprotected(obj)
1286
1287         def updateprotect(self):
1288                 self._get_protect_obj().updateprotect()
1289
1290         def lockdb(self):
1291                 if self._lock_vdb:
1292                         raise AssertionError("Lock already held.")
1293                 # At least the parent needs to exist for the lock file.
1294                 ensure_dirs(self.dbroot)
1295                 if self._scheduler is None:
1296                         self._lock_vdb = lockdir(self.dbroot)
1297                 else:
1298                         async_lock = AsynchronousLock(path=self.dbroot,
1299                                 scheduler=self._scheduler)
1300                         async_lock.start()
1301                         async_lock.wait()
1302                         self._lock_vdb = async_lock.lock_obj
1303
1304         def unlockdb(self):
1305                 if self._lock_vdb:
1306                         unlockdir(self._lock_vdb)
1307                         self._lock_vdb = None
1308
1309         def getpath(self):
1310                 "return path to location of db information (for >>> informational display)"
1311                 return self.dbdir
1312
1313         def exists(self):
1314                 "does the db entry exist?  boolean."
1315                 return os.path.exists(self.dbdir)
1316
1317         def delete(self):
1318                 """
1319                 Remove this entry from the database
1320                 """
1321                 if not os.path.exists(self.dbdir):
1322                         return
1323
1324                 # Check validity of self.dbdir before attempting to remove it.
1325                 if not self.dbdir.startswith(self.dbroot):
1326                         writemsg(_("portage.dblink.delete(): invalid dbdir: %s\n") % \
1327                                 self.dbdir, noiselevel=-1)
1328                         return
1329
1330                 shutil.rmtree(self.dbdir)
1331                 # If empty, remove parent category directory.
1332                 try:
1333                         os.rmdir(os.path.dirname(self.dbdir))
1334                 except OSError:
1335                         pass
1336                 self.vartree.dbapi._remove(self)
1337
1338         def clearcontents(self):
1339                 """
1340                 For a given db entry (self), erase the CONTENTS values.
1341                 """
1342                 if os.path.exists(self.dbdir+"/CONTENTS"):
1343                         os.unlink(self.dbdir+"/CONTENTS")
1344
1345         def _clear_contents_cache(self):
1346                 self.contentscache = None
1347                 self._contents_inodes = None
1348                 self._contents_basenames = None
1349
1350         def getcontents(self):
1351                 """
1352                 Get the installed files of a given package (aka what that package installed)
1353                 """
1354                 contents_file = os.path.join(self.dbdir, "CONTENTS")
1355                 if self.contentscache is not None:
1356                         return self.contentscache
1357                 pkgfiles = {}
1358                 try:
1359                         myc = codecs.open(_unicode_encode(contents_file,
1360                                 encoding=_encodings['fs'], errors='strict'),
1361                                 mode='r', encoding=_encodings['repo.content'],
1362                                 errors='replace')
1363                 except EnvironmentError as e:
1364                         if e.errno != errno.ENOENT:
1365                                 raise
1366                         del e
1367                         self.contentscache = pkgfiles
1368                         return pkgfiles
1369                 mylines = myc.readlines()
1370                 myc.close()
1371                 null_byte = "\0"
1372                 normalize_needed = self._normalize_needed
1373                 contents_re = self._contents_re
1374                 obj_index = contents_re.groupindex['obj']
1375                 dir_index = contents_re.groupindex['dir']
1376                 sym_index = contents_re.groupindex['sym']
1377                 # CONTENTS files already contain EPREFIX
1378                 myroot = self.settings['ROOT']
1379                 if myroot == os.path.sep:
1380                         myroot = None
1381                 pos = 0
1382                 errors = []
1383                 for pos, line in enumerate(mylines):
1384                         if null_byte in line:
1385                                 # Null bytes are a common indication of corruption.
1386                                 errors.append((pos + 1, _("Null byte found in CONTENTS entry")))
1387                                 continue
1388                         line = line.rstrip("\n")
1389                         m = contents_re.match(line)
1390                         if m is None:
1391                                 errors.append((pos + 1, _("Unrecognized CONTENTS entry")))
1392                                 continue
1393
1394                         if m.group(obj_index) is not None:
1395                                 base = obj_index
1396                                 #format: type, mtime, md5sum
1397                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1398                         elif m.group(dir_index) is not None:
1399                                 base = dir_index
1400                                 #format: type
1401                                 data = (m.group(base+1),)
1402                         elif m.group(sym_index) is not None:
1403                                 base = sym_index
1404                                 #format: type, mtime, dest
1405                                 data = (m.group(base+1), m.group(base+4), m.group(base+3))
1406                         else:
1407                                 # This won't happen as long the regular expression
1408                                 # is written to only match valid entries.
1409                                 raise AssertionError(_("required group not found " + \
1410                                         "in CONTENTS entry: '%s'") % line)
1411
1412                         path = m.group(base+2)
1413                         if normalize_needed.search(path) is not None:
1414                                 path = normalize_path(path)
1415                                 if not path.startswith(os.path.sep):
1416                                         path = os.path.sep + path
1417
1418                         if myroot is not None:
1419                                 path = os.path.join(myroot, path.lstrip(os.path.sep))
1420
1421                         pkgfiles[path] = data
1422
1423                 if errors:
1424                         writemsg(_("!!! Parse error in '%s'\n") % contents_file, noiselevel=-1)
1425                         for pos, e in errors:
1426                                 writemsg(_("!!!   line %d: %s\n") % (pos, e), noiselevel=-1)
1427                 self.contentscache = pkgfiles
1428                 return pkgfiles
1429
1430         def unmerge(self, pkgfiles=None, trimworld=None, cleanup=True,
1431                 ldpath_mtimes=None, others_in_slot=None):
1432                 """
1433                 Calls prerm
1434                 Unmerges a given package (CPV)
1435                 calls postrm
1436                 calls cleanrm
1437                 calls env_update
1438                 
1439                 @param pkgfiles: files to unmerge (generally self.getcontents() )
1440                 @type pkgfiles: Dictionary
1441                 @param trimworld: Unused
1442                 @type trimworld: Boolean
1443                 @param cleanup: cleanup to pass to doebuild (see doebuild)
1444                 @type cleanup: Boolean
1445                 @param ldpath_mtimes: mtimes to pass to env_update (see env_update)
1446                 @type ldpath_mtimes: Dictionary
1447                 @param others_in_slot: all dblink instances in this slot, excluding self
1448                 @type others_in_slot: list
1449                 @rtype: Integer
1450                 @returns:
1451                 1. os.EX_OK if everything went well.
1452                 2. return code of the failed phase (for prerm, postrm, cleanrm)
1453                 
1454                 Notes:
1455                 The caller must ensure that lockdb() and unlockdb() are called
1456                 before and after this method.
1457                 """
1458
1459                 if trimworld is not None:
1460                         warnings.warn("The trimworld parameter of the " + \
1461                                 "portage.dbapi.vartree.dblink.unmerge()" + \
1462                                 " method is now unused.",
1463                                 DeprecationWarning, stacklevel=2)
1464
1465                 self.vartree.dbapi._bump_mtime(self.mycpv)
1466                 showMessage = self._display_merge
1467                 if self.vartree.dbapi._categories is not None:
1468                         self.vartree.dbapi._categories = None
1469                 # When others_in_slot is supplied, the security check has already been
1470                 # done for this slot, so it shouldn't be repeated until the next
1471                 # replacement or unmerge operation.
1472                 if others_in_slot is None:
1473                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1474                         slot_matches = self.vartree.dbapi.match(
1475                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1476                         others_in_slot = []
1477                         for cur_cpv in slot_matches:
1478                                 if cur_cpv == self.mycpv:
1479                                         continue
1480                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1481                                         settings=self.settings, vartree=self.vartree,
1482                                         treetype="vartree"))
1483
1484                         retval = self._security_check([self] + others_in_slot)
1485                         if retval:
1486                                 return retval
1487
1488                 contents = self.getcontents()
1489                 # Now, don't assume that the name of the ebuild is the same as the
1490                 # name of the dir; the package may have been moved.
1491                 myebuildpath = None
1492                 ebuild_phase = "prerm"
1493                 log_path = None
1494                 mystuff = os.listdir(self.dbdir)
1495                 for x in mystuff:
1496                         if x.endswith(".ebuild"):
1497                                 myebuildpath = os.path.join(self.dbdir, self.pkg + ".ebuild")
1498                                 if x[:-7] != self.pkg:
1499                                         # Clean up after vardbapi.move_ent() breakage in
1500                                         # portage versions before 2.1.2
1501                                         os.rename(os.path.join(self.dbdir, x), myebuildpath)
1502                                         write_atomic(os.path.join(self.dbdir, "PF"), self.pkg+"\n")
1503                                 break
1504
1505                 self.settings.setcpv(self.mycpv, mydb=self.vartree.dbapi)
1506                 if myebuildpath:
1507                         try:
1508                                 doebuild_environment(myebuildpath, "prerm",
1509                                         settings=self.settings, db=self.vartree.dbapi)
1510                         except UnsupportedAPIException as e:
1511                                 # Sometimes this happens due to corruption of the EAPI file.
1512                                 writemsg(_("!!! FAILED prerm: %s\n") % \
1513                                         os.path.join(self.dbdir, "EAPI"), noiselevel=-1)
1514                                 writemsg("%s\n" % str(e), noiselevel=-1)
1515                                 myebuildpath = None
1516
1517                 builddir_lock = None
1518                 scheduler = self._scheduler
1519                 retval = os.EX_OK
1520                 failures = 0
1521                 try:
1522                         if myebuildpath:
1523                                 builddir_lock = EbuildBuildDir(
1524                                         scheduler=(scheduler or PollScheduler().sched_iface),
1525                                         settings=self.settings)
1526                                 builddir_lock.lock()
1527
1528                                 prepare_build_dirs(settings=self.settings, cleanup=True)
1529                                 log_path = self.settings.get("PORTAGE_LOG_FILE")
1530
1531                                 if scheduler is None:
1532                                         retval = _spawn_phase('prerm', self.settings)
1533                                 else:
1534                                         retval = scheduler.dblinkEbuildPhase(
1535                                                 self, self.vartree.dbapi, myebuildpath, ebuild_phase)
1536
1537                                 # XXX: Decide how to handle failures here.
1538                                 if retval != os.EX_OK:
1539                                         failures += 1
1540                                         writemsg(_("!!! FAILED prerm: %s\n") % retval, noiselevel=-1)
1541
1542                         self._unmerge_pkgfiles(pkgfiles, others_in_slot)
1543                         self._clear_contents_cache()
1544
1545                         # Remove the registration of preserved libs for this pkg instance
1546                         plib_registry = self.vartree.dbapi._plib_registry
1547                         if plib_registry is None:
1548                                 # preserve-libs is entirely disabled
1549                                 pass
1550                         else:
1551                                 plib_registry.unregister(self.mycpv, self.settings["SLOT"],
1552                                         self.vartree.dbapi.cpv_counter(self.mycpv))
1553
1554                         if myebuildpath:
1555                                 ebuild_phase = "postrm"
1556                                 if scheduler is None:
1557                                         retval = _spawn_phase(ebuild_phase, self.settings)
1558                                 else:
1559                                         retval = scheduler.dblinkEbuildPhase(
1560                                                 self, self.vartree.dbapi, myebuildpath, ebuild_phase)
1561
1562                                 # XXX: Decide how to handle failures here.
1563                                 if retval != os.EX_OK:
1564                                         failures += 1
1565                                         writemsg(_("!!! FAILED postrm: %s\n") % retval, noiselevel=-1)
1566
1567                         # Skip this if another package in the same slot has just been
1568                         # merged on top of this package, since the other package has
1569                         # already called LinkageMap.rebuild() and passed it's NEEDED file
1570                         # in as an argument.
1571                         if not others_in_slot:
1572                                 self._linkmap_rebuild(exclude_pkgs=(self.mycpv,))
1573
1574                         # remove preserved libraries that don't have any consumers left
1575                         cpv_lib_map = self._find_unused_preserved_libs()
1576                         if cpv_lib_map:
1577                                 self._remove_preserved_libs(cpv_lib_map)
1578                                 for cpv, removed in cpv_lib_map.items():
1579                                         if not self.vartree.dbapi.cpv_exists(cpv):
1580                                                 for dblnk in others_in_slot:
1581                                                         if dblnk.mycpv == cpv:
1582                                                                 # This one just got merged so it doesn't
1583                                                                 # register with cpv_exists() yet.
1584                                                                 self.vartree.dbapi.removeFromContents(
1585                                                                         dblnk, removed)
1586                                                                 break
1587                                                 continue
1588                                         self.vartree.dbapi.removeFromContents(cpv, removed)
1589                         else:
1590                                 # Prune any preserved libs that may have
1591                                 # been unmerged with this package.
1592                                 if plib_registry is None:
1593                                         # preserve-libs is entirely disabled
1594                                         pass
1595                                 else:
1596                                         plib_registry.pruneNonExisting()
1597
1598                 finally:
1599                         self.vartree.dbapi._bump_mtime(self.mycpv)
1600                         if builddir_lock:
1601                                 try:
1602                                         if myebuildpath:
1603                                                 if retval != os.EX_OK:
1604                                                         msg_lines = []
1605                                                         msg = _("The '%(ebuild_phase)s' "
1606                                                         "phase of the '%(cpv)s' package "
1607                                                         "has failed with exit value %(retval)s.") % \
1608                                                         {"ebuild_phase":ebuild_phase, "cpv":self.mycpv,
1609                                                         "retval":retval}
1610                                                         from textwrap import wrap
1611                                                         msg_lines.extend(wrap(msg, 72))
1612                                                         msg_lines.append("")
1613
1614                                                         ebuild_name = os.path.basename(myebuildpath)
1615                                                         ebuild_dir = os.path.dirname(myebuildpath)
1616                                                         msg = _("The problem occurred while executing "
1617                                                         "the ebuild file named '%(ebuild_name)s' "
1618                                                         "located in the '%(ebuild_dir)s' directory. "
1619                                                         "If necessary, manually remove "
1620                                                         "the environment.bz2 file and/or the "
1621                                                         "ebuild file located in that directory.") % \
1622                                                         {"ebuild_name":ebuild_name, "ebuild_dir":ebuild_dir}
1623                                                         msg_lines.extend(wrap(msg, 72))
1624                                                         msg_lines.append("")
1625
1626                                                         msg = _("Removal "
1627                                                         "of the environment.bz2 file is "
1628                                                         "preferred since it may allow the "
1629                                                         "removal phases to execute successfully. "
1630                                                         "The ebuild will be "
1631                                                         "sourced and the eclasses "
1632                                                         "from the current portage tree will be used "
1633                                                         "when necessary. Removal of "
1634                                                         "the ebuild file will cause the "
1635                                                         "pkg_prerm() and pkg_postrm() removal "
1636                                                         "phases to be skipped entirely.")
1637                                                         msg_lines.extend(wrap(msg, 72))
1638
1639                                                         self._eerror(ebuild_phase, msg_lines)
1640
1641                                                 # process logs created during pre/postrm
1642                                                 elog_process(self.mycpv, self.settings)
1643
1644                                         if retval == os.EX_OK:
1645                                                 # myebuildpath might be None, so ensure
1646                                                 # it has a sane value for the clean phase,
1647                                                 # even though it won't really be sourced.
1648                                                 myebuildpath = os.path.join(self.dbdir,
1649                                                         self.pkg + ".ebuild")
1650                                                 doebuild_environment(myebuildpath, "cleanrm",
1651                                                         settings=self.settings, db=self.vartree.dbapi)
1652                                                 if scheduler is None:
1653                                                         _spawn_phase("cleanrm", self.settings)
1654                                                 else:
1655                                                         scheduler.dblinkEbuildPhase(
1656                                                                 self, self.vartree.dbapi,
1657                                                                 myebuildpath, "cleanrm")
1658                                 finally:
1659                                         if builddir_lock is not None:
1660                                                 builddir_lock.unlock()
1661
1662                 if log_path is not None:
1663
1664                         if not failures and 'unmerge-logs' not in self.settings.features:
1665                                 try:
1666                                         os.unlink(log_path)
1667                                 except OSError:
1668                                         pass
1669
1670                         try:
1671                                 st = os.stat(log_path)
1672                         except OSError:
1673                                 pass
1674                         else:
1675                                 if st.st_size == 0:
1676                                         try:
1677                                                 os.unlink(log_path)
1678                                         except OSError:
1679                                                 pass
1680
1681                 if log_path is not None and os.path.exists(log_path):
1682                         # Restore this since it gets lost somewhere above and it
1683                         # needs to be set for _display_merge() to be able to log.
1684                         # Note that the log isn't necessarily supposed to exist
1685                         # since if PORT_LOGDIR is unset then it's a temp file
1686                         # so it gets cleaned above.
1687                         self.settings["PORTAGE_LOG_FILE"] = log_path
1688                 else:
1689                         self.settings.pop("PORTAGE_LOG_FILE", None)
1690
1691                 env_update(target_root=self.settings['ROOT'],
1692                         prev_mtimes=ldpath_mtimes,
1693                         contents=contents, env=self.settings.environ(),
1694                         writemsg_level=self._display_merge)
1695                 return os.EX_OK
1696
1697         def _display_merge(self, msg, level=0, noiselevel=0):
1698                 if not self._verbose and noiselevel >= 0 and level < logging.WARN:
1699                         return
1700                 if self._scheduler is not None:
1701                         self._scheduler.dblinkDisplayMerge(self, msg,
1702                                 level=level, noiselevel=noiselevel)
1703                         return
1704                 writemsg_level(msg, level=level, noiselevel=noiselevel)
1705
1706         def _unmerge_pkgfiles(self, pkgfiles, others_in_slot):
1707                 """
1708                 
1709                 Unmerges the contents of a package from the liveFS
1710                 Removes the VDB entry for self
1711                 
1712                 @param pkgfiles: typically self.getcontents()
1713                 @type pkgfiles: Dictionary { filename: [ 'type', '?', 'md5sum' ] }
1714                 @param others_in_slot: all dblink instances in this slot, excluding self
1715                 @type others_in_slot: list
1716                 @rtype: None
1717                 """
1718
1719                 os = _os_merge
1720                 perf_md5 = perform_md5
1721                 showMessage = self._display_merge
1722                 scheduler = self._scheduler
1723
1724                 if not pkgfiles:
1725                         showMessage(_("No package files given... Grabbing a set.\n"))
1726                         pkgfiles = self.getcontents()
1727
1728                 if others_in_slot is None:
1729                         others_in_slot = []
1730                         slot = self.vartree.dbapi.aux_get(self.mycpv, ["SLOT"])[0]
1731                         slot_matches = self.vartree.dbapi.match(
1732                                 "%s:%s" % (portage.cpv_getkey(self.mycpv), slot))
1733                         for cur_cpv in slot_matches:
1734                                 if cur_cpv == self.mycpv:
1735                                         continue
1736                                 others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
1737                                         settings=self.settings,
1738                                         vartree=self.vartree, treetype="vartree"))
1739
1740                 dest_root = self._eroot
1741                 dest_root_len = len(dest_root) - 1
1742
1743                 conf_mem_file = os.path.join(dest_root, CONFIG_MEMORY_FILE)
1744                 cfgfiledict = grabdict(conf_mem_file)
1745                 stale_confmem = []
1746
1747                 unmerge_orphans = "unmerge-orphans" in self.settings.features
1748                 calc_prelink = "prelink-checksums" in self.settings.features
1749
1750                 if pkgfiles:
1751                         self.updateprotect()
1752                         mykeys = list(pkgfiles)
1753                         mykeys.sort()
1754                         mykeys.reverse()
1755
1756                         #process symlinks second-to-last, directories last.
1757                         mydirs = []
1758                         ignored_unlink_errnos = (
1759                                 errno.EBUSY, errno.ENOENT,
1760                                 errno.ENOTDIR, errno.EISDIR)
1761                         ignored_rmdir_errnos = (
1762                                 errno.EEXIST, errno.ENOTEMPTY,
1763                                 errno.EBUSY, errno.ENOENT,
1764                                 errno.ENOTDIR, errno.EISDIR)
1765                         modprotect = os.path.join(self._eroot, "lib/modules/")
1766
1767                         def unlink(file_name, lstatobj):
1768                                 if bsd_chflags:
1769                                         if lstatobj.st_flags != 0:
1770                                                 bsd_chflags.lchflags(file_name, 0)
1771                                         parent_name = os.path.dirname(file_name)
1772                                         # Use normal stat/chflags for the parent since we want to
1773                                         # follow any symlinks to the real parent directory.
1774                                         pflags = os.stat(parent_name).st_flags
1775                                         if pflags != 0:
1776                                                 bsd_chflags.chflags(parent_name, 0)
1777                                 try:
1778                                         if not stat.S_ISLNK(lstatobj.st_mode):
1779                                                 # Remove permissions to ensure that any hardlinks to
1780                                                 # suid/sgid files are rendered harmless.
1781                                                 os.chmod(file_name, 0)
1782                                         os.unlink(file_name)
1783                                 except OSError as ose:
1784                                         # If the chmod or unlink fails, you are in trouble.
1785                                         # With Prefix this can be because the file is owned
1786                                         # by someone else (a screwup by root?), on a normal
1787                                         # system maybe filesystem corruption.  In any case,
1788                                         # if we backtrace and die here, we leave the system
1789                                         # in a totally undefined state, hence we just bleed
1790                                         # like hell and continue to hopefully finish all our
1791                                         # administrative and pkg_postinst stuff.
1792                                         self._eerror("postrm", 
1793                                                 ["Could not chmod or unlink '%s': %s" % \
1794                                                 (file_name, ose)])
1795                                 finally:
1796                                         if bsd_chflags and pflags != 0:
1797                                                 # Restore the parent flags we saved before unlinking
1798                                                 bsd_chflags.chflags(parent_name, pflags)
1799
1800                         def show_unmerge(zing, desc, file_type, file_name):
1801                                         showMessage("%s %s %s %s\n" % \
1802                                                 (zing, desc.ljust(8), file_type, file_name))
1803
1804                         unmerge_desc = {}
1805                         unmerge_desc["cfgpro"] = _("cfgpro")
1806                         unmerge_desc["replaced"] = _("replaced")
1807                         unmerge_desc["!dir"] = _("!dir")
1808                         unmerge_desc["!empty"] = _("!empty")
1809                         unmerge_desc["!fif"] = _("!fif")
1810                         unmerge_desc["!found"] = _("!found")
1811                         unmerge_desc["!md5"] = _("!md5")
1812                         unmerge_desc["!mtime"] = _("!mtime")
1813                         unmerge_desc["!obj"] = _("!obj")
1814                         unmerge_desc["!sym"] = _("!sym")
1815
1816                         real_root = self.settings['ROOT']
1817                         real_root_len = len(real_root) - 1
1818
1819                         for i, objkey in enumerate(mykeys):
1820
1821                                 if scheduler is not None and \
1822                                         0 == i % self._file_merge_yield_interval:
1823                                         scheduler.scheduleYield()
1824
1825                                 obj = normalize_path(objkey)
1826                                 if os is _os_merge:
1827                                         try:
1828                                                 _unicode_encode(obj,
1829                                                         encoding=_encodings['merge'], errors='strict')
1830                                         except UnicodeEncodeError:
1831                                                 # The package appears to have been merged with a 
1832                                                 # different value of sys.getfilesystemencoding(),
1833                                                 # so fall back to utf_8 if appropriate.
1834                                                 try:
1835                                                         _unicode_encode(obj,
1836                                                                 encoding=_encodings['fs'], errors='strict')
1837                                                 except UnicodeEncodeError:
1838                                                         pass
1839                                                 else:
1840                                                         os = portage.os
1841                                                         perf_md5 = portage.checksum.perform_md5
1842
1843                                 file_data = pkgfiles[objkey]
1844                                 file_type = file_data[0]
1845                                 statobj = None
1846                                 try:
1847                                         statobj = os.stat(obj)
1848                                 except OSError:
1849                                         pass
1850                                 lstatobj = None
1851                                 try:
1852                                         lstatobj = os.lstat(obj)
1853                                 except (OSError, AttributeError):
1854                                         pass
1855                                 islink = lstatobj is not None and stat.S_ISLNK(lstatobj.st_mode)
1856                                 if lstatobj is None:
1857                                                 show_unmerge("---", unmerge_desc["!found"], file_type, obj)
1858                                                 continue
1859                                 # don't use EROOT, CONTENTS entries already contain EPREFIX
1860                                 if obj.startswith(real_root):
1861                                         relative_path = obj[real_root_len:]
1862                                         is_owned = False
1863                                         for dblnk in others_in_slot:
1864                                                 if dblnk.isowner(relative_path):
1865                                                         is_owned = True
1866                                                         break
1867                                         if is_owned:
1868                                                 # A new instance of this package claims the file, so
1869                                                 # don't unmerge it.
1870                                                 show_unmerge("---", unmerge_desc["replaced"], file_type, obj)
1871                                                 continue
1872                                         elif relative_path in cfgfiledict:
1873                                                 stale_confmem.append(relative_path)
1874                                 # next line includes a tweak to protect modules from being unmerged,
1875                                 # but we don't protect modules from being overwritten if they are
1876                                 # upgraded. We effectively only want one half of the config protection
1877                                 # functionality for /lib/modules. For portage-ng both capabilities
1878                                 # should be able to be independently specified.
1879                                 # TODO: For rebuilds, re-parent previous modules to the new
1880                                 # installed instance (so they are not orphans). For normal
1881                                 # uninstall (not rebuild/reinstall), remove the modules along
1882                                 # with all other files (leave no orphans).
1883                                 if obj.startswith(modprotect):
1884                                         show_unmerge("---", unmerge_desc["cfgpro"], file_type, obj)
1885                                         continue
1886
1887                                 # Don't unlink symlinks to directories here since that can
1888                                 # remove /lib and /usr/lib symlinks.
1889                                 if unmerge_orphans and \
1890                                         lstatobj and not stat.S_ISDIR(lstatobj.st_mode) and \
1891                                         not (islink and statobj and stat.S_ISDIR(statobj.st_mode)) and \
1892                                         not self.isprotected(obj):
1893                                         try:
1894                                                 unlink(obj, lstatobj)
1895                                         except EnvironmentError as e:
1896                                                 if e.errno not in ignored_unlink_errnos:
1897                                                         raise
1898                                                 del e
1899                                         show_unmerge("<<<", "", file_type, obj)
1900                                         continue
1901
1902                                 lmtime = str(lstatobj[stat.ST_MTIME])
1903                                 if (pkgfiles[objkey][0] not in ("dir", "fif", "dev")) and (lmtime != pkgfiles[objkey][1]):
1904                                         show_unmerge("---", unmerge_desc["!mtime"], file_type, obj)
1905                                         continue
1906
1907                                 if pkgfiles[objkey][0] == "dir":
1908                                         if lstatobj is None or not stat.S_ISDIR(lstatobj.st_mode):
1909                                                 show_unmerge("---", unmerge_desc["!dir"], file_type, obj)
1910                                                 continue
1911                                         mydirs.append(obj)
1912                                 elif pkgfiles[objkey][0] == "sym":
1913                                         if not islink:
1914                                                 show_unmerge("---", unmerge_desc["!sym"], file_type, obj)
1915                                                 continue
1916                                         # Go ahead and unlink symlinks to directories here when
1917                                         # they're actually recorded as symlinks in the contents.
1918                                         # Normally, symlinks such as /lib -> lib64 are not recorded
1919                                         # as symlinks in the contents of a package.  If a package
1920                                         # installs something into ${D}/lib/, it is recorded in the
1921                                         # contents as a directory even if it happens to correspond
1922                                         # to a symlink when it's merged to the live filesystem.
1923                                         try:
1924                                                 unlink(obj, lstatobj)
1925                                                 show_unmerge("<<<", "", file_type, obj)
1926                                         except (OSError, IOError) as e:
1927                                                 if e.errno not in ignored_unlink_errnos:
1928                                                         raise
1929                                                 del e
1930                                                 show_unmerge("!!!", "", file_type, obj)
1931                                 elif pkgfiles[objkey][0] == "obj":
1932                                         if statobj is None or not stat.S_ISREG(statobj.st_mode):
1933                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
1934                                                 continue
1935                                         mymd5 = None
1936                                         try:
1937                                                 mymd5 = perf_md5(obj, calc_prelink=calc_prelink)
1938                                         except FileNotFound as e:
1939                                                 # the file has disappeared between now and our stat call
1940                                                 show_unmerge("---", unmerge_desc["!obj"], file_type, obj)
1941                                                 continue
1942
1943                                         # string.lower is needed because db entries used to be in upper-case.  The
1944                                         # string.lower allows for backwards compatibility.
1945                                         if mymd5 != pkgfiles[objkey][2].lower():
1946                                                 show_unmerge("---", unmerge_desc["!md5"], file_type, obj)
1947                                                 continue
1948                                         try:
1949                                                 unlink(obj, lstatobj)
1950                                         except (OSError, IOError) as e:
1951                                                 if e.errno not in ignored_unlink_errnos:
1952                                                         raise
1953                                                 del e
1954                                         show_unmerge("<<<", "", file_type, obj)
1955                                 elif pkgfiles[objkey][0] == "fif":
1956                                         if not stat.S_ISFIFO(lstatobj[stat.ST_MODE]):
1957                                                 show_unmerge("---", unmerge_desc["!fif"], file_type, obj)
1958                                                 continue
1959                                         show_unmerge("---", "", file_type, obj)
1960                                 elif pkgfiles[objkey][0] == "dev":
1961                                         show_unmerge("---", "", file_type, obj)
1962
1963                         mydirs.sort()
1964                         mydirs.reverse()
1965
1966                         for obj in mydirs:
1967                                 try:
1968                                         if bsd_chflags:
1969                                                 lstatobj = os.lstat(obj)
1970                                                 if lstatobj.st_flags != 0:
1971                                                         bsd_chflags.lchflags(obj, 0)
1972                                                 parent_name = os.path.dirname(obj)
1973                                                 # Use normal stat/chflags for the parent since we want to
1974                                                 # follow any symlinks to the real parent directory.
1975                                                 pflags = os.stat(parent_name).st_flags
1976                                                 if pflags != 0:
1977                                                         bsd_chflags.chflags(parent_name, 0)
1978                                         try:
1979                                                 os.rmdir(obj)
1980                                         finally:
1981                                                 if bsd_chflags and pflags != 0:
1982                                                         # Restore the parent flags we saved before unlinking
1983                                                         bsd_chflags.chflags(parent_name, pflags)
1984                                         show_unmerge("<<<", "", "dir", obj)
1985                                 except EnvironmentError as e:
1986                                         if e.errno not in ignored_rmdir_errnos:
1987                                                 raise
1988                                         if e.errno != errno.ENOENT:
1989                                                 show_unmerge("---", unmerge_desc["!empty"], "dir", obj)
1990                                         del e
1991
1992                 # Remove stale entries from config memory.
1993                 if stale_confmem:
1994                         for filename in stale_confmem:
1995                                 del cfgfiledict[filename]
1996                         writedict(cfgfiledict, conf_mem_file)
1997
1998                 #remove self from vartree database so that our own virtual gets zapped if we're the last node
1999                 self.vartree.zap(self.mycpv)
2000
2001         def isowner(self, filename, destroot=None):
2002                 """ 
2003                 Check if a file belongs to this package. This may
2004                 result in a stat call for the parent directory of
2005                 every installed file, since the inode numbers are
2006                 used to work around the problem of ambiguous paths
2007                 caused by symlinked directories. The results of
2008                 stat calls are cached to optimize multiple calls
2009                 to this method.
2010
2011                 @param filename:
2012                 @type filename:
2013                 @param destroot:
2014                 @type destroot:
2015                 @rtype: Boolean
2016                 @returns:
2017                 1. True if this package owns the file.
2018                 2. False if this package does not own the file.
2019                 """
2020
2021                 if destroot is not None and destroot != self._eroot:
2022                         warnings.warn("The second parameter of the " + \
2023                                 "portage.dbapi.vartree.dblink.isowner()" + \
2024                                 " is now unused. Instead " + \
2025                                 "self.settings['EROOT'] will be used.",
2026                                 DeprecationWarning, stacklevel=2)
2027
2028                 return bool(self._match_contents(filename))
2029
2030         def _match_contents(self, filename, destroot=None):
2031                 """
2032                 The matching contents entry is returned, which is useful
2033                 since the path may differ from the one given by the caller,
2034                 due to symlinks.
2035
2036                 @rtype: String
2037                 @return: the contents entry corresponding to the given path, or False
2038                         if the file is not owned by this package.
2039                 """
2040
2041                 filename = _unicode_decode(filename,
2042                         encoding=_encodings['content'], errors='strict')
2043
2044                 if destroot is not None and destroot != self._eroot:
2045                         warnings.warn("The second parameter of the " + \
2046                                 "portage.dbapi.vartree.dblink._match_contents()" + \
2047                                 " is now unused. Instead " + \
2048                                 "self.settings['EROOT'] will be used.",
2049                                 DeprecationWarning, stacklevel=2)
2050
2051                 # don't use EROOT here, image already contains EPREFIX
2052                 destroot = self.settings['ROOT']
2053
2054                 # The given filename argument might have a different encoding than the
2055                 # the filenames contained in the contents, so use separate wrapped os
2056                 # modules for each. The basename is more likely to contain non-ascii
2057                 # characters than the directory path, so use os_filename_arg for all
2058                 # operations involving the basename of the filename arg.
2059                 os_filename_arg = _os_merge
2060                 os = _os_merge
2061
2062                 try:
2063                         _unicode_encode(filename,
2064                                 encoding=_encodings['merge'], errors='strict')
2065                 except UnicodeEncodeError:
2066                         # The package appears to have been merged with a
2067                         # different value of sys.getfilesystemencoding(),
2068                         # so fall back to utf_8 if appropriate.
2069                         try:
2070                                 _unicode_encode(filename,
2071                                         encoding=_encodings['fs'], errors='strict')
2072                         except UnicodeEncodeError:
2073                                 pass
2074                         else:
2075                                 os_filename_arg = portage.os
2076
2077                 destfile = normalize_path(
2078                         os_filename_arg.path.join(destroot,
2079                         filename.lstrip(os_filename_arg.path.sep)))
2080
2081                 pkgfiles = self.getcontents()
2082                 if pkgfiles and destfile in pkgfiles:
2083                         return destfile
2084                 if pkgfiles:
2085                         basename = os_filename_arg.path.basename(destfile)
2086                         if self._contents_basenames is None:
2087
2088                                 try:
2089                                         for x in pkgfiles:
2090                                                 _unicode_encode(x,
2091                                                         encoding=_encodings['merge'],
2092                                                         errors='strict')
2093                                 except UnicodeEncodeError:
2094                                         # The package appears to have been merged with a
2095                                         # different value of sys.getfilesystemencoding(),
2096                                         # so fall back to utf_8 if appropriate.
2097                                         try:
2098                                                 for x in pkgfiles:
2099                                                         _unicode_encode(x,
2100                                                                 encoding=_encodings['fs'],
2101                                                                 errors='strict')
2102                                         except UnicodeEncodeError:
2103                                                 pass
2104                                         else:
2105                                                 os = portage.os
2106
2107                                 self._contents_basenames = set(
2108                                         os.path.basename(x) for x in pkgfiles)
2109                         if basename not in self._contents_basenames:
2110                                 # This is a shortcut that, in most cases, allows us to
2111                                 # eliminate this package as an owner without the need
2112                                 # to examine inode numbers of parent directories.
2113                                 return False
2114
2115                         # Use stat rather than lstat since we want to follow
2116                         # any symlinks to the real parent directory.
2117                         parent_path = os_filename_arg.path.dirname(destfile)
2118                         try:
2119                                 parent_stat = os_filename_arg.stat(parent_path)
2120                         except EnvironmentError as e:
2121                                 if e.errno != errno.ENOENT:
2122                                         raise
2123                                 del e
2124                                 return False
2125                         if self._contents_inodes is None:
2126
2127                                 if os is _os_merge:
2128                                         try:
2129                                                 for x in pkgfiles:
2130                                                         _unicode_encode(x,
2131                                                                 encoding=_encodings['merge'],
2132                                                                 errors='strict')
2133                                         except UnicodeEncodeError:
2134                                                 # The package appears to have been merged with a 
2135                                                 # different value of sys.getfilesystemencoding(),
2136                                                 # so fall back to utf_8 if appropriate.
2137                                                 try:
2138                                                         for x in pkgfiles:
2139                                                                 _unicode_encode(x,
2140                                                                         encoding=_encodings['fs'],
2141                                                                         errors='strict')
2142                                                 except UnicodeEncodeError:
2143                                                         pass
2144                                                 else:
2145                                                         os = portage.os
2146
2147                                 self._contents_inodes = {}
2148                                 parent_paths = set()
2149                                 for x in pkgfiles:
2150                                         p_path = os.path.dirname(x)
2151                                         if p_path in parent_paths:
2152                                                 continue
2153                                         parent_paths.add(p_path)
2154                                         try:
2155                                                 s = os.stat(p_path)
2156                                         except OSError:
2157                                                 pass
2158                                         else:
2159                                                 inode_key = (s.st_dev, s.st_ino)
2160                                                 # Use lists of paths in case multiple
2161                                                 # paths reference the same inode.
2162                                                 p_path_list = self._contents_inodes.get(inode_key)
2163                                                 if p_path_list is None:
2164                                                         p_path_list = []
2165                                                         self._contents_inodes[inode_key] = p_path_list
2166                                                 if p_path not in p_path_list:
2167                                                         p_path_list.append(p_path)
2168
2169                         p_path_list = self._contents_inodes.get(
2170                                 (parent_stat.st_dev, parent_stat.st_ino))
2171                         if p_path_list:
2172                                 for p_path in p_path_list:
2173                                         x = os_filename_arg.path.join(p_path, basename)
2174                                         if x in pkgfiles:
2175                                                 return x
2176
2177                 return False
2178
2179         def _linkmap_rebuild(self, **kwargs):
2180                 """
2181                 Rebuild the self._linkmap if it's not broken due to missing
2182                 scanelf binary. Also, return early if preserve-libs is disabled
2183                 and the preserve-libs registry is empty.
2184                 """
2185                 if self._linkmap_broken or \
2186                         self.vartree.dbapi._linkmap is None or \
2187                         self.vartree.dbapi._plib_registry is None or \
2188                         ("preserve-libs" not in self.settings.features and \
2189                         not self.vartree.dbapi._plib_registry.hasEntries()):
2190                         return
2191                 try:
2192                         self.vartree.dbapi._linkmap.rebuild(**kwargs)
2193                 except CommandNotFound as e:
2194                         self._linkmap_broken = True
2195                         self._display_merge(_("!!! Disabling preserve-libs " \
2196                                 "due to error: Command Not Found: %s\n") % (e,),
2197                                 level=logging.ERROR, noiselevel=-1)
2198
2199         def _find_libs_to_preserve(self):
2200                 """
2201                 Get set of relative paths for libraries to be preserved. The file
2202                 paths are selected from self._installed_instance.getcontents().
2203                 """
2204                 if self._linkmap_broken or \
2205                         self.vartree.dbapi._linkmap is None or \
2206                         self.vartree.dbapi._plib_registry is None or \
2207                         self._installed_instance is None or \
2208                         "preserve-libs" not in self.settings.features:
2209                         return None
2210
2211                 os = _os_merge
2212                 linkmap = self.vartree.dbapi._linkmap
2213                 installed_instance = self._installed_instance
2214                 old_contents = installed_instance.getcontents()
2215                 root = self._eroot
2216                 root_len = len(root) - 1
2217                 lib_graph = digraph()
2218                 path_node_map = {}
2219
2220                 def path_to_node(path):
2221                         node = path_node_map.get(path)
2222                         if node is None:
2223                                 node = LinkageMap._LibGraphNode(path, root)
2224                                 alt_path_node = lib_graph.get(node)
2225                                 if alt_path_node is not None:
2226                                         node = alt_path_node
2227                                 node.alt_paths.add(path)
2228                                 path_node_map[path] = node
2229                         return node
2230
2231                 consumer_map = {}
2232                 provider_nodes = set()
2233                 # Create provider nodes and add them to the graph.
2234                 for f_abs in old_contents:
2235
2236                         if os is _os_merge:
2237                                 try:
2238                                         _unicode_encode(f_abs,
2239                                                 encoding=_encodings['merge'], errors='strict')
2240                                 except UnicodeEncodeError:
2241                                         # The package appears to have been merged with a 
2242                                         # different value of sys.getfilesystemencoding(),
2243                                         # so fall back to utf_8 if appropriate.
2244                                         try:
2245                                                 _unicode_encode(f_abs,
2246                                                         encoding=_encodings['fs'], errors='strict')
2247                                         except UnicodeEncodeError:
2248                                                 pass
2249                                         else:
2250                                                 os = portage.os
2251
2252                         f = f_abs[root_len:]
2253                         if self.isowner(f):
2254                                 continue
2255                         try:
2256                                 consumers = linkmap.findConsumers(f)
2257                         except KeyError:
2258                                 continue
2259                         if not consumers:
2260                                 continue
2261                         provider_node = path_to_node(f)
2262                         lib_graph.add(provider_node, None)
2263                         provider_nodes.add(provider_node)
2264                         consumer_map[provider_node] = consumers
2265
2266                 # Create consumer nodes and add them to the graph.
2267                 # Note that consumers can also be providers.
2268                 for provider_node, consumers in consumer_map.items():
2269                         for c in consumers:
2270                                 if self.isowner(c):
2271                                         continue
2272                                 consumer_node = path_to_node(c)
2273                                 if installed_instance.isowner(c) and \
2274                                         consumer_node not in provider_nodes:
2275                                         # This is not a provider, so it will be uninstalled.
2276                                         continue
2277                                 lib_graph.add(provider_node, consumer_node)
2278
2279                 # Locate nodes which should be preserved. They consist of all
2280                 # providers that are reachable from consumers that are not
2281                 # providers themselves.
2282                 preserve_nodes = set()
2283                 for consumer_node in lib_graph.root_nodes():
2284                         if consumer_node in provider_nodes:
2285                                 continue
2286                         # Preserve all providers that are reachable from this consumer.
2287                         node_stack = lib_graph.child_nodes(consumer_node)
2288                         while node_stack:
2289                                 provider_node = node_stack.pop()
2290                                 if provider_node in preserve_nodes:
2291                                         continue
2292                                 preserve_nodes.add(provider_node)
2293                                 node_stack.extend(lib_graph.child_nodes(provider_node))
2294
2295                 preserve_paths = set()
2296                 for preserve_node in preserve_nodes:
2297                         # Make sure that at least one of the paths is not a symlink.
2298                         # This prevents symlinks from being erroneously preserved by
2299                         # themselves when the old instance installed symlinks that
2300                         # the new instance does not install.
2301                         have_lib = False
2302                         for f in preserve_node.alt_paths:
2303                                 f_abs = os.path.join(root, f.lstrip(os.sep))
2304                                 try:
2305                                         if stat.S_ISREG(os.lstat(f_abs).st_mode):
2306                                                 have_lib = True
2307                                                 break
2308                                 except OSError:
2309                                         continue
2310
2311                         if have_lib:
2312                                 preserve_paths.update(preserve_node.alt_paths)
2313
2314                 return preserve_paths
2315
2316         def _add_preserve_libs_to_contents(self, preserve_paths):
2317                 """
2318                 Preserve libs returned from _find_libs_to_preserve().
2319                 """
2320
2321                 if not preserve_paths:
2322                         return
2323
2324                 os = _os_merge
2325                 showMessage = self._display_merge
2326                 root = self._eroot
2327
2328                 # Copy contents entries from the old package to the new one.
2329                 new_contents = self.getcontents().copy()
2330                 old_contents = self._installed_instance.getcontents()
2331                 for f in sorted(preserve_paths):
2332                         f = _unicode_decode(f,
2333                                 encoding=_encodings['content'], errors='strict')
2334                         f_abs = os.path.join(root, f.lstrip(os.sep))
2335                         contents_entry = old_contents.get(f_abs)
2336                         if contents_entry is None:
2337                                 # This will probably never happen, but it might if one of the
2338                                 # paths returned from findConsumers() refers to one of the libs
2339                                 # that should be preserved yet the path is not listed in the
2340                                 # contents. Such a path might belong to some other package, so
2341                                 # it shouldn't be preserved here.
2342                                 showMessage(_("!!! File '%s' will not be preserved "
2343                                         "due to missing contents entry\n") % (f_abs,),
2344                                         level=logging.ERROR, noiselevel=-1)
2345                                 preserve_paths.remove(f)
2346                                 continue
2347                         new_contents[f_abs] = contents_entry
2348                         obj_type = contents_entry[0]
2349                         showMessage(_(">>> needed    %s %s\n") % (obj_type, f_abs),
2350                                 noiselevel=-1)
2351                         # Add parent directories to contents if necessary.
2352                         parent_dir = os.path.dirname(f_abs)
2353                         while len(parent_dir) > len(root):
2354                                 new_contents[parent_dir] = ["dir"]
2355                                 prev = parent_dir
2356                                 parent_dir = os.path.dirname(parent_dir)
2357                                 if prev == parent_dir:
2358                                         break
2359                 outfile = atomic_ofstream(os.path.join(self.dbtmpdir, "CONTENTS"))
2360                 write_contents(new_contents, root, outfile)
2361                 outfile.close()
2362                 self._clear_contents_cache()
2363
2364         def _find_unused_preserved_libs(self):
2365                 """
2366                 Find preserved libraries that don't have any consumers left.
2367                 """
2368
2369                 if self._linkmap_broken or \
2370                         self.vartree.dbapi._linkmap is None or \
2371                         self.vartree.dbapi._plib_registry is None or \
2372                         not self.vartree.dbapi._plib_registry.hasEntries():
2373                         return {}
2374
2375                 # Since preserved libraries can be consumers of other preserved
2376                 # libraries, use a graph to track consumer relationships.
2377                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2378                 lib_graph = digraph()
2379                 preserved_nodes = set()
2380                 preserved_paths = set()
2381                 path_cpv_map = {}
2382                 path_node_map = {}
2383                 root = self._eroot
2384
2385                 def path_to_node(path):
2386                         node = path_node_map.get(path)
2387                         if node is None:
2388                                 node = LinkageMap._LibGraphNode(path, root)
2389                                 alt_path_node = lib_graph.get(node)
2390                                 if alt_path_node is not None:
2391                                         node = alt_path_node
2392                                 node.alt_paths.add(path)
2393                                 path_node_map[path] = node
2394                         return node
2395
2396                 linkmap = self.vartree.dbapi._linkmap
2397                 for cpv, plibs in plib_dict.items():
2398                         for f in plibs:
2399                                 path_cpv_map[f] = cpv
2400                                 preserved_node = path_to_node(f)
2401                                 if not preserved_node.file_exists():
2402                                         continue
2403                                 lib_graph.add(preserved_node, None)
2404                                 preserved_paths.add(f)
2405                                 preserved_nodes.add(preserved_node)
2406                                 for c in self.vartree.dbapi._linkmap.findConsumers(f):
2407                                         consumer_node = path_to_node(c)
2408                                         if not consumer_node.file_exists():
2409                                                 continue
2410                                         # Note that consumers may also be providers.
2411                                         lib_graph.add(preserved_node, consumer_node)
2412
2413                 # Eliminate consumers having providers with the same soname as an
2414                 # installed library that is not preserved. This eliminates
2415                 # libraries that are erroneously preserved due to a move from one
2416                 # directory to another.
2417                 provider_cache = {}
2418                 for preserved_node in preserved_nodes:
2419                         soname = linkmap.getSoname(preserved_node)
2420                         for consumer_node in lib_graph.parent_nodes(preserved_node):
2421                                 if consumer_node in preserved_nodes:
2422                                         continue
2423                                 providers = provider_cache.get(consumer_node)
2424                                 if providers is None:
2425                                         providers = linkmap.findProviders(consumer_node)
2426                                         provider_cache[consumer_node] = providers
2427                                 providers = providers.get(soname)
2428                                 if providers is None:
2429                                         continue
2430                                 for provider in providers:
2431                                         if provider in preserved_paths:
2432                                                 continue
2433                                         provider_node = path_to_node(provider)
2434                                         if not provider_node.file_exists():
2435                                                 continue
2436                                         if provider_node in preserved_nodes:
2437                                                 continue
2438                                         # An alternative provider seems to be
2439                                         # installed, so drop this edge.
2440                                         lib_graph.remove_edge(preserved_node, consumer_node)
2441                                         break
2442
2443                 cpv_lib_map = {}
2444                 while not lib_graph.empty():
2445                         root_nodes = preserved_nodes.intersection(lib_graph.root_nodes())
2446                         if not root_nodes:
2447                                 break
2448                         lib_graph.difference_update(root_nodes)
2449                         unlink_list = set()
2450                         for node in root_nodes:
2451                                 unlink_list.update(node.alt_paths)
2452                         unlink_list = sorted(unlink_list)
2453                         for obj in unlink_list:
2454                                 cpv = path_cpv_map.get(obj)
2455                                 if cpv is None:
2456                                         # This means that a symlink is in the preserved libs
2457                                         # registry, but the actual lib it points to is not.
2458                                         self._display_merge(_("!!! symlink to lib is preserved, "
2459                                                 "but not the lib itself:\n!!! '%s'\n") % (obj,),
2460                                                 level=logging.ERROR, noiselevel=-1)
2461                                         continue
2462                                 removed = cpv_lib_map.get(cpv)
2463                                 if removed is None:
2464                                         removed = set()
2465                                         cpv_lib_map[cpv] = removed
2466                                 removed.add(obj)
2467
2468                 return cpv_lib_map
2469
2470         def _remove_preserved_libs(self, cpv_lib_map):
2471                 """
2472                 Remove files returned from _find_unused_preserved_libs().
2473                 """
2474
2475                 os = _os_merge
2476
2477                 files_to_remove = set()
2478                 for files in cpv_lib_map.values():
2479                         files_to_remove.update(files)
2480                 files_to_remove = sorted(files_to_remove)
2481                 showMessage = self._display_merge
2482                 root = self._eroot
2483
2484                 parent_dirs = set()
2485                 for obj in files_to_remove:
2486                         obj = os.path.join(root, obj.lstrip(os.sep))
2487                         parent_dirs.add(os.path.dirname(obj))
2488                         if os.path.islink(obj):
2489                                 obj_type = _("sym")
2490                         else:
2491                                 obj_type = _("obj")
2492                         try:
2493                                 os.unlink(obj)
2494                         except OSError as e:
2495                                 if e.errno != errno.ENOENT:
2496                                         raise
2497                                 del e
2498                         else:
2499                                 showMessage(_("<<< !needed  %s %s\n") % (obj_type, obj),
2500                                         noiselevel=-1)
2501
2502                 # Remove empty parent directories if possible.
2503                 while parent_dirs:
2504                         x = parent_dirs.pop()
2505                         while True:
2506                                 try:
2507                                         os.rmdir(x)
2508                                 except OSError:
2509                                         break
2510                                 prev = x
2511                                 x = os.path.dirname(x)
2512                                 if x == prev:
2513                                         break
2514
2515                 self.vartree.dbapi._plib_registry.pruneNonExisting()
2516
2517         def _collision_protect(self, srcroot, destroot, mypkglist, mycontents):
2518
2519                         os = _os_merge
2520
2521                         collision_ignore = set([normalize_path(myignore) for myignore in \
2522                                 portage.util.shlex_split(
2523                                 self.settings.get("COLLISION_IGNORE", ""))])
2524
2525                         # For collisions with preserved libraries, the current package
2526                         # will assume ownership and the libraries will be unregistered.
2527                         if self.vartree.dbapi._plib_registry is None:
2528                                 # preserve-libs is entirely disabled
2529                                 plib_cpv_map = None
2530                                 plib_paths = None
2531                                 plib_inodes = {}
2532                         else:
2533                                 plib_dict = self.vartree.dbapi._plib_registry.getPreservedLibs()
2534                                 plib_cpv_map = {}
2535                                 plib_paths = set()
2536                                 for cpv, paths in plib_dict.items():
2537                                         plib_paths.update(paths)
2538                                         for f in paths:
2539                                                 plib_cpv_map[f] = cpv
2540                                 plib_inodes = self._lstat_inode_map(plib_paths)
2541
2542                         plib_collisions = {}
2543
2544                         showMessage = self._display_merge
2545                         scheduler = self._scheduler
2546                         stopmerge = False
2547                         collisions = []
2548                         destroot = self.settings['ROOT']
2549                         showMessage(_(" %s checking %d files for package collisions\n") % \
2550                                 (colorize("GOOD", "*"), len(mycontents)))
2551                         for i, f in enumerate(mycontents):
2552                                 if i % 1000 == 0 and i != 0:
2553                                         showMessage(_("%d files checked ...\n") % i)
2554
2555                                 if scheduler is not None and \
2556                                         0 == i % self._file_merge_yield_interval:
2557                                         scheduler.scheduleYield()
2558
2559                                 dest_path = normalize_path(
2560                                         os.path.join(destroot, f.lstrip(os.path.sep)))
2561                                 try:
2562                                         dest_lstat = os.lstat(dest_path)
2563                                 except EnvironmentError as e:
2564                                         if e.errno == errno.ENOENT:
2565                                                 del e
2566                                                 continue
2567                                         elif e.errno == errno.ENOTDIR:
2568                                                 del e
2569                                                 # A non-directory is in a location where this package
2570                                                 # expects to have a directory.
2571                                                 dest_lstat = None
2572                                                 parent_path = dest_path
2573                                                 while len(parent_path) > len(destroot):
2574                                                         parent_path = os.path.dirname(parent_path)
2575                                                         try:
2576                                                                 dest_lstat = os.lstat(parent_path)
2577                                                                 break
2578                                                         except EnvironmentError as e:
2579                                                                 if e.errno != errno.ENOTDIR:
2580                                                                         raise
2581                                                                 del e
2582                                                 if not dest_lstat:
2583                                                         raise AssertionError(
2584                                                                 "unable to find non-directory " + \
2585                                                                 "parent for '%s'" % dest_path)
2586                                                 dest_path = parent_path
2587                                                 f = os.path.sep + dest_path[len(destroot):]
2588                                                 if f in collisions:
2589                                                         continue
2590                                         else:
2591                                                 raise
2592                                 if f[0] != "/":
2593                                         f="/"+f
2594
2595                                 plibs = plib_inodes.get((dest_lstat.st_dev, dest_lstat.st_ino))
2596                                 if plibs:
2597                                         for path in plibs:
2598                                                 cpv = plib_cpv_map[path]
2599                                                 paths = plib_collisions.get(cpv)
2600                                                 if paths is None:
2601                                                         paths = set()
2602                                                         plib_collisions[cpv] = paths
2603                                                 paths.add(path)
2604                                         # The current package will assume ownership and the
2605                                         # libraries will be unregistered, so exclude this
2606                                         # path from the normal collisions.
2607                                         continue
2608
2609                                 isowned = False
2610                                 full_path = os.path.join(destroot, f.lstrip(os.path.sep))
2611                                 for ver in mypkglist:
2612                                         if ver.isowner(f):
2613                                                 isowned = True
2614                                                 break
2615                                 if not isowned and self.isprotected(full_path):
2616                                         isowned = True
2617                                 if not isowned:
2618                                         stopmerge = True
2619                                         if collision_ignore:
2620                                                 if f in collision_ignore:
2621                                                         stopmerge = False
2622                                                 else:
2623                                                         for myignore in collision_ignore:
2624                                                                 if f.startswith(myignore + os.path.sep):
2625                                                                         stopmerge = False
2626                                                                         break
2627                                         if stopmerge:
2628                                                 collisions.append(f)
2629                         return collisions, plib_collisions
2630
2631         def _lstat_inode_map(self, path_iter):
2632                 """
2633                 Use lstat to create a map of the form:
2634                   {(st_dev, st_ino) : set([path1, path2, ...])}
2635                 Multiple paths may reference the same inode due to hardlinks.
2636                 All lstat() calls are relative to self.myroot.
2637                 """
2638
2639                 os = _os_merge
2640
2641                 root = self._eroot
2642                 inode_map = {}
2643                 for f in path_iter:
2644                         path = os.path.join(root, f.lstrip(os.sep))
2645                         try:
2646                                 st = os.lstat(path)
2647                         except OSError as e:
2648                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
2649                                         raise
2650                                 del e
2651                                 continue
2652                         key = (st.st_dev, st.st_ino)
2653                         paths = inode_map.get(key)
2654                         if paths is None:
2655                                 paths = set()
2656                                 inode_map[key] = paths
2657                         paths.add(f)
2658                 return inode_map
2659
2660         def _security_check(self, installed_instances):
2661                 if not installed_instances:
2662                         return 0
2663
2664                 os = _os_merge
2665
2666                 showMessage = self._display_merge
2667                 scheduler = self._scheduler
2668
2669                 file_paths = set()
2670                 for dblnk in installed_instances:
2671                         file_paths.update(dblnk.getcontents())
2672                 inode_map = {}
2673                 real_paths = set()
2674                 for i, path in enumerate(file_paths):
2675
2676                         if scheduler is not None and \
2677                                 0 == i % self._file_merge_yield_interval:
2678                                 scheduler.scheduleYield()
2679
2680                         if os is _os_merge:
2681                                 try:
2682                                         _unicode_encode(path,
2683                                                 encoding=_encodings['merge'], errors='strict')
2684                                 except UnicodeEncodeError:
2685                                         # The package appears to have been merged with a 
2686                                         # different value of sys.getfilesystemencoding(),
2687                                         # so fall back to utf_8 if appropriate.
2688                                         try:
2689                                                 _unicode_encode(path,
2690                                                         encoding=_encodings['fs'], errors='strict')
2691                                         except UnicodeEncodeError:
2692                                                 pass
2693                                         else:
2694                                                 os = portage.os
2695
2696                         try:
2697                                 s = os.lstat(path)
2698                         except OSError as e:
2699                                 if e.errno not in (errno.ENOENT, errno.ENOTDIR):
2700                                         raise
2701                                 del e
2702                                 continue
2703                         if not stat.S_ISREG(s.st_mode):
2704                                 continue
2705                         path = os.path.realpath(path)
2706                         if path in real_paths:
2707                                 continue
2708                         real_paths.add(path)
2709                         if s.st_nlink > 1 and \
2710                                 s.st_mode & (stat.S_ISUID | stat.S_ISGID):
2711                                 k = (s.st_dev, s.st_ino)
2712                                 inode_map.setdefault(k, []).append((path, s))
2713                 suspicious_hardlinks = []
2714                 for path_list in inode_map.values():
2715                         path, s = path_list[0]
2716                         if len(path_list) == s.st_nlink:
2717                                 # All hardlinks seem to be owned by this package.
2718                                 continue
2719                         suspicious_hardlinks.append(path_list)
2720                 if not suspicious_hardlinks:
2721                         return 0
2722
2723                 msg = []
2724                 msg.append(_("suid/sgid file(s) "
2725                         "with suspicious hardlink(s):"))
2726                 msg.append("")
2727                 for path_list in suspicious_hardlinks:
2728                         for path, s in path_list:
2729                                 msg.append("\t%s" % path)
2730                 msg.append("")
2731                 msg.append(_("See the Gentoo Security Handbook " 
2732                         "guide for advice on how to proceed."))
2733
2734                 self._eerror("preinst", msg)
2735
2736                 return 1
2737
2738         def _eqawarn(self, phase, lines):
2739                 from portage.elog.messages import eqawarn as _eqawarn
2740                 if self._scheduler is None:
2741                         for l in lines:
2742                                 _eqawarn(l, phase=phase, key=self.settings.mycpv)
2743                 else:
2744                         self._scheduler.dblinkElog(self,
2745                                 phase, _eqawarn, lines)
2746
2747         def _eerror(self, phase, lines):
2748                 from portage.elog.messages import eerror as _eerror
2749                 if self._scheduler is None:
2750                         for l in lines:
2751                                 _eerror(l, phase=phase, key=self.settings.mycpv)
2752                 else:
2753                         self._scheduler.dblinkElog(self,
2754                                 phase, _eerror, lines)
2755
2756         def treewalk(self, srcroot, destroot, inforoot, myebuild, cleanup=0,
2757                 mydbapi=None, prev_mtimes=None):
2758                 """
2759                 
2760                 This function does the following:
2761                 
2762                 calls self._preserve_libs if FEATURES=preserve-libs
2763                 calls self._collision_protect if FEATURES=collision-protect
2764                 calls doebuild(mydo=pkg_preinst)
2765                 Merges the package to the livefs
2766                 unmerges old version (if required)
2767                 calls doebuild(mydo=pkg_postinst)
2768                 calls env_update
2769                 calls elog_process
2770                 
2771                 @param srcroot: Typically this is ${D}
2772                 @type srcroot: String (Path)
2773                 @param destroot: ignored, self.settings['ROOT'] is used instead
2774                 @type destroot: String (Path)
2775                 @param inforoot: root of the vardb entry ?
2776                 @type inforoot: String (Path)
2777                 @param myebuild: path to the ebuild that we are processing
2778                 @type myebuild: String (Path)
2779                 @param mydbapi: dbapi which is handed to doebuild.
2780                 @type mydbapi: portdbapi instance
2781                 @param prev_mtimes: { Filename:mtime } mapping for env_update
2782                 @type prev_mtimes: Dictionary
2783                 @rtype: Boolean
2784                 @returns:
2785                 1. 0 on success
2786                 2. 1 on failure
2787                 
2788                 secondhand is a list of symlinks that have been skipped due to their target
2789                 not existing; we will merge these symlinks at a later time.
2790                 """
2791
2792                 os = _os_merge
2793
2794                 srcroot = _unicode_decode(srcroot,
2795                         encoding=_encodings['content'], errors='strict')
2796                 destroot = self.settings['ROOT']
2797                 inforoot = _unicode_decode(inforoot,
2798                         encoding=_encodings['content'], errors='strict')
2799                 myebuild = _unicode_decode(myebuild,
2800                         encoding=_encodings['content'], errors='strict')
2801
2802                 showMessage = self._display_merge
2803                 scheduler = self._scheduler
2804
2805                 srcroot = normalize_path(srcroot).rstrip(os.path.sep) + os.path.sep
2806
2807                 if not os.path.isdir(srcroot):
2808                         showMessage(_("!!! Directory Not Found: D='%s'\n") % srcroot,
2809                                 level=logging.ERROR, noiselevel=-1)
2810                         return 1
2811
2812                 slot = ''
2813                 for var_name in ('CHOST', 'SLOT'):
2814                         if var_name == 'CHOST' and self.cat == 'virtual':
2815                                 try:
2816                                         os.unlink(os.path.join(inforoot, var_name))
2817                                 except OSError:
2818                                         pass
2819                                 continue
2820
2821                         try:
2822                                 val = codecs.open(_unicode_encode(
2823                                         os.path.join(inforoot, var_name),
2824                                         encoding=_encodings['fs'], errors='strict'),
2825                                         mode='r', encoding=_encodings['repo.content'],
2826                                         errors='replace').readline().strip()
2827                         except EnvironmentError as e:
2828                                 if e.errno != errno.ENOENT:
2829                                         raise
2830                                 del e
2831                                 val = ''
2832
2833                         if var_name == 'SLOT':
2834                                 slot = val
2835
2836                                 if not slot.strip():
2837                                         slot = self.settings.get(var_name, '')
2838                                         if not slot.strip():
2839                                                 showMessage(_("!!! SLOT is undefined\n"),
2840                                                         level=logging.ERROR, noiselevel=-1)
2841                                                 return 1
2842                                         write_atomic(os.path.join(inforoot, var_name), slot + '\n')
2843
2844                         if val != self.settings.get(var_name, ''):
2845                                 self._eqawarn('preinst',
2846                                         [_("QA Notice: Expected %(var_name)s='%(expected_value)s', got '%(actual_value)s'\n") % \
2847                                         {"var_name":var_name, "expected_value":self.settings.get(var_name, ''), "actual_value":val}])
2848
2849                 def eerror(lines):
2850                         self._eerror("preinst", lines)
2851
2852                 if not os.path.exists(self.dbcatdir):
2853                         ensure_dirs(self.dbcatdir)
2854
2855                 otherversions = []
2856                 for v in self.vartree.dbapi.cp_list(self.mysplit[0]):
2857                         otherversions.append(v.split("/")[1])
2858
2859                 cp = self.mysplit[0]
2860                 slot_atom = "%s:%s" % (cp, slot)
2861
2862                 # filter any old-style virtual matches
2863                 slot_matches = [cpv for cpv in self.vartree.dbapi.match(slot_atom) \
2864                         if cpv_getkey(cpv) == cp]
2865
2866                 if self.mycpv not in slot_matches and \
2867                         self.vartree.dbapi.cpv_exists(self.mycpv):
2868                         # handle multislot or unapplied slotmove
2869                         slot_matches.append(self.mycpv)
2870
2871                 others_in_slot = []
2872                 from portage import config
2873                 for cur_cpv in slot_matches:
2874                         # Clone the config in case one of these has to be unmerged since
2875                         # we need it to have private ${T} etc... for things like elog.
2876                         others_in_slot.append(dblink(self.cat, catsplit(cur_cpv)[1],
2877                                 settings=config(clone=self.settings),
2878                                 vartree=self.vartree, treetype="vartree",
2879                                 scheduler=self._scheduler))
2880
2881                 retval = self._security_check(others_in_slot)
2882                 if retval:
2883                         return retval
2884
2885                 self.settings["REPLACING_VERSIONS"] = " ".join( 
2886                         [portage.versions.cpv_getversion(other.mycpv) for other in others_in_slot] )
2887                 self.settings.backup_changes("REPLACING_VERSIONS")
2888
2889                 if slot_matches:
2890                         # Used by self.isprotected().
2891                         max_dblnk = None
2892                         max_counter = -1
2893                         for dblnk in others_in_slot:
2894                                 cur_counter = self.vartree.dbapi.cpv_counter(dblnk.mycpv)
2895                                 if cur_counter > max_counter:
2896                                         max_counter = cur_counter
2897                                         max_dblnk = dblnk
2898                         self._installed_instance = max_dblnk
2899
2900                 # We check for unicode encoding issues after src_install. However,
2901                 # the check must be repeated here for binary packages (it's
2902                 # inexpensive since we call os.walk() here anyway).
2903                 unicode_errors = []
2904
2905                 while True:
2906
2907                         unicode_error = False
2908
2909                         myfilelist = []
2910                         mylinklist = []
2911                         paths_with_newlines = []
2912                         srcroot_len = len(srcroot)
2913                         def onerror(e):
2914                                 raise
2915                         for parent, dirs, files in os.walk(srcroot, onerror=onerror):
2916                                 try:
2917                                         parent = _unicode_decode(parent,
2918                                                 encoding=_encodings['merge'], errors='strict')
2919                                 except UnicodeDecodeError:
2920                                         new_parent = _unicode_decode(parent,
2921                                                 encoding=_encodings['merge'], errors='replace')
2922                                         new_parent = _unicode_encode(new_parent,
2923                                                 encoding=_encodings['merge'], errors='backslashreplace')
2924                                         new_parent = _unicode_decode(new_parent,
2925                                                 encoding=_encodings['merge'], errors='replace')
2926                                         os.rename(parent, new_parent)
2927                                         unicode_error = True
2928                                         unicode_errors.append(new_parent[srcroot_len:])
2929                                         break
2930
2931                                 for fname in files:
2932                                         try:
2933                                                 fname = _unicode_decode(fname,
2934                                                         encoding=_encodings['merge'], errors='strict')
2935                                         except UnicodeDecodeError:
2936                                                 fpath = portage._os.path.join(
2937                                                         parent.encode(_encodings['merge']), fname)
2938                                                 new_fname = _unicode_decode(fname,
2939                                                         encoding=_encodings['merge'], errors='replace')
2940                                                 new_fname = _unicode_encode(new_fname,
2941                                                         encoding=_encodings['merge'], errors='backslashreplace')
2942                                                 new_fname = _unicode_decode(new_fname,
2943                                                         encoding=_encodings['merge'], errors='replace')
2944                                                 new_fpath = os.path.join(parent, new_fname)
2945                                                 os.rename(fpath, new_fpath)
2946                                                 unicode_error = True
2947                                                 unicode_errors.append(new_fpath[srcroot_len:])
2948                                                 fname = new_fname
2949                                                 fpath = new_fpath
2950                                         else:
2951                                                 fpath = os.path.join(parent, fname)
2952
2953                                         relative_path = fpath[srcroot_len:]
2954
2955                                         if "\n" in relative_path:
2956                                                 paths_with_newlines.append(relative_path)
2957
2958                                         file_mode = os.lstat(fpath).st_mode
2959                                         if stat.S_ISREG(file_mode):
2960                                                 myfilelist.append(relative_path)
2961                                         elif stat.S_ISLNK(file_mode):
2962                                                 # Note: os.walk puts symlinks to directories in the "dirs"
2963                                                 # list and it does not traverse them since that could lead
2964                                                 # to an infinite recursion loop.
2965                                                 mylinklist.append(relative_path)
2966
2967                                 if unicode_error:
2968                                         break
2969
2970                         if not unicode_error:
2971                                 break
2972
2973                 if unicode_errors:
2974                         eerror(portage._merge_unicode_error(unicode_errors))
2975
2976                 if paths_with_newlines:
2977                         msg = []
2978                         msg.append(_("This package installs one or more files containing a newline (\\n) character:"))
2979                         msg.append("")
2980                         paths_with_newlines.sort()
2981                         for f in paths_with_newlines:
2982                                 msg.append("\t/%s" % (f.replace("\n", "\\n")))
2983                         msg.append("")
2984                         msg.append(_("package %s NOT merged") % self.mycpv)
2985                         msg.append("")
2986                         eerror(msg)
2987                         return 1
2988
2989                 # If there are no files to merge, and an installed package in the same
2990                 # slot has files, it probably means that something went wrong.
2991                 if self.settings.get("PORTAGE_PACKAGE_EMPTY_ABORT") == "1" and \
2992                         not myfilelist and not mylinklist and others_in_slot:
2993                         installed_files = None
2994                         for other_dblink in others_in_slot:
2995                                 installed_files = other_dblink.getcontents()
2996                                 if not installed_files:
2997                                         continue
2998                                 from textwrap import wrap
2999                                 wrap_width = 72
3000                                 msg = []
3001                                 d = {
3002                                         "new_cpv":self.mycpv,
3003                                         "old_cpv":other_dblink.mycpv
3004                                 }
3005                                 msg.extend(wrap(_("The '%(new_cpv)s' package will not install "
3006                                         "any files, but the currently installed '%(old_cpv)s'"
3007                                         " package has the following files: ") % d, wrap_width))
3008                                 msg.append("")
3009                                 msg.extend(sorted(installed_files))
3010                                 msg.append("")
3011                                 msg.append(_("package %s NOT merged") % self.mycpv)
3012                                 msg.append("")
3013                                 msg.extend(wrap(
3014                                         _("Manually run `emerge --unmerge =%s` if you "
3015                                         "really want to remove the above files. Set "
3016                                         "PORTAGE_PACKAGE_EMPTY_ABORT=\"0\" in "
3017                                         "/etc/make.conf if you do not want to "
3018                                         "abort in cases like this.") % other_dblink.mycpv,
3019                                         wrap_width))
3020                                 eerror(msg)
3021                         if installed_files:
3022                                 return 1
3023
3024                 # check for package collisions
3025                 blockers = None
3026                 if self._blockers is not None:
3027                         # This is only supposed to be called when
3028                         # the vdb is locked, like it is here.
3029                         blockers = self._blockers()
3030                 if blockers is None:
3031                         blockers = []
3032                 collisions, plib_collisions = \
3033                         self._collision_protect(srcroot, destroot,
3034                         others_in_slot + blockers, myfilelist + mylinklist)
3035
3036                 # Make sure the ebuild environment is initialized and that ${T}/elog
3037                 # exists for logging of collision-protect eerror messages.
3038                 if myebuild is None:
3039                         myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3040                 doebuild_environment(myebuild, "preinst",
3041                         settings=self.settings, db=mydbapi)
3042                 prepare_build_dirs(settings=self.settings, cleanup=cleanup)
3043
3044                 if collisions:
3045                         collision_protect = "collision-protect" in self.settings.features
3046                         protect_owned = "protect-owned" in self.settings.features
3047                         msg = _("This package will overwrite one or more files that"
3048                         " may belong to other packages (see list below).")
3049                         if not (collision_protect or protect_owned):
3050                                 msg += _(" Add either \"collision-protect\" or" 
3051                                 " \"protect-owned\" to FEATURES in"
3052                                 " make.conf if you would like the merge to abort"
3053                                 " in cases like this. See the make.conf man page for"
3054                                 " more information about these features.")
3055                         if self.settings.get("PORTAGE_QUIET") != "1":
3056                                 msg += _(" You can use a command such as"
3057                                 " `portageq owners / <filename>` to identify the"
3058                                 " installed package that owns a file. If portageq"
3059                                 " reports that only one package owns a file then do NOT"
3060                                 " file a bug report. A bug report is only useful if it"
3061                                 " identifies at least two or more packages that are known"
3062                                 " to install the same file(s)."
3063                                 " If a collision occurs and you"
3064                                 " can not explain where the file came from then you"
3065                                 " should simply ignore the collision since there is not"
3066                                 " enough information to determine if a real problem"
3067                                 " exists. Please do NOT file a bug report at"
3068                                 " http://bugs.gentoo.org unless you report exactly which"
3069                                 " two packages install the same file(s). Once again,"
3070                                 " please do NOT file a bug report unless you have"
3071                                 " completely understood the above message.")
3072
3073                         self.settings["EBUILD_PHASE"] = "preinst"
3074                         from textwrap import wrap
3075                         msg = wrap(msg, 70)
3076                         if collision_protect:
3077                                 msg.append("")
3078                                 msg.append(_("package %s NOT merged") % self.settings.mycpv)
3079                         msg.append("")
3080                         msg.append(_("Detected file collision(s):"))
3081                         msg.append("")
3082
3083                         for f in collisions:
3084                                 msg.append("\t%s" % \
3085                                         os.path.join(destroot, f.lstrip(os.path.sep)))
3086
3087                         eerror(msg)
3088
3089                         owners = None
3090                         if collision_protect or protect_owned:
3091                                 msg = []
3092                                 msg.append("")
3093                                 msg.append(_("Searching all installed"
3094                                         " packages for file collisions..."))
3095                                 msg.append("")
3096                                 msg.append(_("Press Ctrl-C to Stop"))
3097                                 msg.append("")
3098                                 eerror(msg)
3099
3100                                 if len(collisions) > 20:
3101                                         # get_owners is slow for large numbers of files, so
3102                                         # don't look them all up.
3103                                         collisions = collisions[:20]
3104                                 owners = self.vartree.dbapi._owners.get_owners(collisions,
3105                                         scheduler=self._scheduler)
3106                                 self.vartree.dbapi.flush_cache()
3107
3108                                 for pkg, owned_files in owners.items():
3109                                         cpv = pkg.mycpv
3110                                         msg = []
3111                                         msg.append("%s" % cpv)
3112                                         for f in sorted(owned_files):
3113                                                 msg.append("\t%s" % os.path.join(destroot,
3114                                                         f.lstrip(os.path.sep)))
3115                                         msg.append("")
3116                                         eerror(msg)
3117
3118                                 if not owners:
3119                                         eerror([_("None of the installed"
3120                                                 " packages claim the file(s)."), ""])
3121
3122                         # The explanation about the collision and how to solve
3123                         # it may not be visible via a scrollback buffer, especially
3124                         # if the number of file collisions is large. Therefore,
3125                         # show a summary at the end.
3126                         if collision_protect:
3127                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3128                                         self.settings.mycpv
3129                         elif protect_owned and owners:
3130                                 msg = _("Package '%s' NOT merged due to file collisions.") % \
3131                                         self.settings.mycpv
3132                         else:
3133                                 msg = _("Package '%s' merged despite file collisions.") % \
3134                                         self.settings.mycpv
3135                         msg += _(" If necessary, refer to your elog "
3136                                 "messages for the whole content of the above message.")
3137                         eerror(wrap(msg, 70))
3138
3139                         if collision_protect or (protect_owned and owners):
3140                                 return 1
3141
3142                 # The merge process may move files out of the image directory,
3143                 # which causes invalidation of the .installed flag.
3144                 try:
3145                         os.unlink(os.path.join(
3146                                 os.path.dirname(normalize_path(srcroot)), ".installed"))
3147                 except OSError as e:
3148                         if e.errno != errno.ENOENT:
3149                                 raise
3150                         del e
3151
3152                 self.dbdir = self.dbtmpdir
3153                 self.delete()
3154                 ensure_dirs(self.dbtmpdir)
3155
3156                 # run preinst script
3157                 if scheduler is None:
3158                         showMessage(_(">>> Merging %(cpv)s to %(destroot)s\n") % {"cpv":self.mycpv, "destroot":destroot})
3159                         a = _spawn_phase("preinst", self.settings)
3160                 else:
3161                         a = scheduler.dblinkEbuildPhase(
3162                                 self, mydbapi, myebuild, "preinst")
3163
3164                 # XXX: Decide how to handle failures here.
3165                 if a != os.EX_OK:
3166                         showMessage(_("!!! FAILED preinst: ")+str(a)+"\n",
3167                                 level=logging.ERROR, noiselevel=-1)
3168                         return a
3169
3170                 # copy "info" files (like SLOT, CFLAGS, etc.) into the database
3171                 for x in os.listdir(inforoot):
3172                         self.copyfile(inforoot+"/"+x)
3173
3174                 # write local package counter for recording
3175                 counter = self.vartree.dbapi.counter_tick(mycpv=self.mycpv)
3176                 codecs.open(_unicode_encode(os.path.join(self.dbtmpdir, 'COUNTER'),
3177                         encoding=_encodings['fs'], errors='strict'),
3178                         'w', encoding=_encodings['repo.content'], errors='backslashreplace'
3179                         ).write(str(counter))
3180
3181                 # open CONTENTS file (possibly overwriting old one) for recording
3182                 outfile = codecs.open(_unicode_encode(
3183                         os.path.join(self.dbtmpdir, 'CONTENTS'),
3184                         encoding=_encodings['fs'], errors='strict'),
3185                         mode='w', encoding=_encodings['repo.content'],
3186                         errors='backslashreplace')
3187
3188                 self.updateprotect()
3189
3190                 #if we have a file containing previously-merged config file md5sums, grab it.
3191                 conf_mem_file = os.path.join(self._eroot, CONFIG_MEMORY_FILE)
3192                 cfgfiledict = grabdict(conf_mem_file)
3193                 cfgfiledict_orig = cfgfiledict.copy()
3194                 if "NOCONFMEM" in self.settings:
3195                         cfgfiledict["IGNORE"]=1
3196                 else:
3197                         cfgfiledict["IGNORE"]=0
3198
3199                 # Always behave like --noconfmem is enabled for downgrades
3200                 # so that people who don't know about this option are less
3201                 # likely to get confused when doing upgrade/downgrade cycles.
3202                 pv_split = catpkgsplit(self.mycpv)[1:]
3203                 for other in others_in_slot:
3204                         if pkgcmp(pv_split, catpkgsplit(other.mycpv)[1:]) < 0:
3205                                 cfgfiledict["IGNORE"] = 1
3206                                 break
3207
3208                 # Don't bump mtimes on merge since some application require
3209                 # preservation of timestamps.  This means that the unmerge phase must
3210                 # check to see if file belongs to an installed instance in the same
3211                 # slot.
3212                 mymtime = None
3213
3214                 # set umask to 0 for merging; back up umask, save old one in prevmask (since this is a global change)
3215                 prevmask = os.umask(0)
3216                 secondhand = []
3217
3218                 # we do a first merge; this will recurse through all files in our srcroot but also build up a
3219                 # "second hand" of symlinks to merge later
3220                 if self.mergeme(srcroot, destroot, outfile, secondhand, "", cfgfiledict, mymtime):
3221                         return 1
3222
3223                 # now, it's time for dealing our second hand; we'll loop until we can't merge anymore.  The rest are
3224                 # broken symlinks.  We'll merge them too.
3225                 lastlen = 0
3226                 while len(secondhand) and len(secondhand)!=lastlen:
3227                         # clear the thirdhand.  Anything from our second hand that
3228                         # couldn't get merged will be added to thirdhand.
3229
3230                         thirdhand = []
3231                         if self.mergeme(srcroot, destroot, outfile, thirdhand,
3232                                 secondhand, cfgfiledict, mymtime):
3233                                 return 1
3234
3235                         #swap hands
3236                         lastlen = len(secondhand)
3237
3238                         # our thirdhand now becomes our secondhand.  It's ok to throw
3239                         # away secondhand since thirdhand contains all the stuff that
3240                         # couldn't be merged.
3241                         secondhand = thirdhand
3242
3243                 if len(secondhand):
3244                         # force merge of remaining symlinks (broken or circular; oh well)
3245                         if self.mergeme(srcroot, destroot, outfile, None,
3246                                 secondhand, cfgfiledict, mymtime):
3247                                 return 1
3248                 self._md5_merge_map.clear()
3249
3250                 #restore umask
3251                 os.umask(prevmask)
3252
3253                 #if we opened it, close it
3254                 outfile.flush()
3255                 outfile.close()
3256
3257                 # write out our collection of md5sums
3258                 cfgfiledict.pop("IGNORE", None)
3259                 if cfgfiledict != cfgfiledict_orig:
3260                         ensure_dirs(os.path.dirname(conf_mem_file),
3261                                 gid=portage_gid, mode=0o2750, mask=0o2)
3262                         writedict(cfgfiledict, conf_mem_file)
3263
3264                 # These caches are populated during collision-protect and the data
3265                 # they contain is now invalid. It's very important to invalidate
3266                 # the contents_inodes cache so that FEATURES=unmerge-orphans
3267                 # doesn't unmerge anything that belongs to this package that has
3268                 # just been merged.
3269                 for dblnk in others_in_slot:
3270                         dblnk._clear_contents_cache()
3271                 self._clear_contents_cache()
3272
3273                 linkmap = self.vartree.dbapi._linkmap
3274                 if linkmap is None:
3275                         # preserve-libs is entirely disabled
3276                         preserve_paths = None
3277                 else:
3278                         self._linkmap_rebuild(include_file=os.path.join(inforoot,
3279                                 linkmap._needed_aux_key))
3280
3281                         # Preserve old libs if they are still in use
3282                         preserve_paths = self._find_libs_to_preserve()
3283                         if preserve_paths:
3284                                 self._add_preserve_libs_to_contents(preserve_paths)
3285
3286                 # If portage is reinstalling itself, remove the old
3287                 # version now since we want to use the temporary
3288                 # PORTAGE_BIN_PATH that will be removed when we return.
3289                 reinstall_self = False
3290                 if self.myroot == "/" and \
3291                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]):
3292                         reinstall_self = True
3293
3294                 if scheduler is None:
3295                         def emerge_log(msg):
3296                                 pass
3297                 else:
3298                         emerge_log = scheduler.dblinkEmergeLog
3299
3300                 autoclean = self.settings.get("AUTOCLEAN", "yes") == "yes"
3301
3302                 if autoclean:
3303                         emerge_log(_(" >>> AUTOCLEAN: %s") % (slot_atom,))
3304
3305                 others_in_slot.append(self)  # self has just been merged
3306                 for dblnk in list(others_in_slot):
3307                         if dblnk is self:
3308                                 continue
3309                         if not (autoclean or dblnk.mycpv == self.mycpv or reinstall_self):
3310                                 continue
3311                         showMessage(_(">>> Safely unmerging already-installed instance...\n"))
3312                         emerge_log(_(" === Unmerging... (%s)") % (dblnk.mycpv,))
3313                         others_in_slot.remove(dblnk) # dblnk will unmerge itself now
3314                         dblnk._linkmap_broken = self._linkmap_broken
3315                         dblnk.settings["REPLACED_BY_VERSION"] = portage.versions.cpv_getversion(self.mycpv)
3316                         dblnk.settings.backup_changes("REPLACED_BY_VERSION")
3317                         unmerge_rval = dblnk.unmerge(ldpath_mtimes=prev_mtimes,
3318                                 others_in_slot=others_in_slot)
3319                         dblnk.settings.pop("REPLACED_BY_VERSION", None)
3320
3321                         if unmerge_rval == os.EX_OK:
3322                                 emerge_log(_(" >>> unmerge success: %s") % (dblnk.mycpv,))
3323                         else:
3324                                 emerge_log(_(" !!! unmerge FAILURE: %s") % (dblnk.mycpv,))
3325
3326                         # TODO: Check status and abort if necessary.
3327                         dblnk.delete()
3328                         showMessage(_(">>> Original instance of package unmerged safely.\n"))
3329
3330                 if len(others_in_slot) > 1:
3331                         showMessage(colorize("WARN", _("WARNING:"))
3332                                 + _(" AUTOCLEAN is disabled.  This can cause serious"
3333                                 " problems due to overlapping packages.\n"),
3334                                 level=logging.WARN, noiselevel=-1)
3335
3336                 # We hold both directory locks.
3337                 self.dbdir = self.dbpkgdir
3338                 self.delete()
3339                 _movefile(self.dbtmpdir, self.dbpkgdir, mysettings=self.settings)
3340
3341                 # keep track of the libs we preserved
3342                 if self.vartree.dbapi._plib_registry is not None and \
3343                         preserve_paths:
3344                         self.vartree.dbapi._plib_registry.register(self.mycpv,
3345                                 slot, counter, sorted(preserve_paths))
3346
3347                 # Check for file collisions with blocking packages
3348                 # and remove any colliding files from their CONTENTS
3349                 # since they now belong to this package.
3350                 self._clear_contents_cache()
3351                 contents = self.getcontents()
3352                 destroot_len = len(destroot) - 1
3353                 for blocker in blockers:
3354                         self.vartree.dbapi.removeFromContents(blocker, iter(contents),
3355                                 relative_paths=False)
3356
3357                 # Unregister any preserved libs that this package has overwritten
3358                 # and update the contents of the packages that owned them.
3359                 plib_registry = self.vartree.dbapi._plib_registry
3360                 if plib_registry is None:
3361                         # preserve-libs is entirely disabled
3362                         pass
3363                 else:
3364                         plib_dict = plib_registry.getPreservedLibs()
3365                         for cpv, paths in plib_collisions.items():
3366                                 if cpv not in plib_dict:
3367                                         continue
3368                                 if cpv == self.mycpv:
3369                                         continue
3370                                 try:
3371                                         slot, counter = self.vartree.dbapi.aux_get(
3372                                                 cpv, ["SLOT", "COUNTER"])
3373                                 except KeyError:
3374                                         continue
3375                                 remaining = [f for f in plib_dict[cpv] if f not in paths]
3376                                 plib_registry.register(cpv, slot, counter, remaining)
3377                                 self.vartree.dbapi.removeFromContents(cpv, paths)
3378
3379                 self.vartree.dbapi._add(self)
3380                 contents = self.getcontents()
3381
3382                 #do postinst script
3383                 self.settings["PORTAGE_UPDATE_ENV"] = \
3384                         os.path.join(self.dbpkgdir, "environment.bz2")
3385                 self.settings.backup_changes("PORTAGE_UPDATE_ENV")
3386                 try:
3387                         if scheduler is None:
3388                                 a = _spawn_phase("postinst", self.settings)
3389                                 if a == os.EX_OK:
3390                                         showMessage(_(">>> %s merged.\n") % self.mycpv)
3391                         else:
3392                                 a = scheduler.dblinkEbuildPhase(
3393                                         self, mydbapi, myebuild, "postinst")
3394                 finally:
3395                         self.settings.pop("PORTAGE_UPDATE_ENV", None)
3396
3397                 if a != os.EX_OK:
3398                         # It's stupid to bail out here, so keep going regardless of
3399                         # phase return code.
3400                         showMessage(_("!!! FAILED postinst: ")+str(a)+"\n",
3401                                 level=logging.ERROR, noiselevel=-1)
3402
3403                 downgrade = False
3404                 for v in otherversions:
3405                         if pkgcmp(catpkgsplit(self.pkg)[1:], catpkgsplit(v)[1:]) < 0:
3406                                 downgrade = True
3407
3408                 #update environment settings, library paths. DO NOT change symlinks.
3409                 env_update(makelinks=(not downgrade),
3410                         target_root=self.settings['ROOT'], prev_mtimes=prev_mtimes,
3411                         contents=contents, env=self.settings.environ(),
3412                         writemsg_level=self._display_merge)
3413
3414                 # For gcc upgrades, preserved libs have to be removed after the
3415                 # the library path has been updated.
3416                 self._linkmap_rebuild()
3417                 cpv_lib_map = self._find_unused_preserved_libs()
3418                 if cpv_lib_map:
3419                         self._remove_preserved_libs(cpv_lib_map)
3420                         for cpv, removed in cpv_lib_map.items():
3421                                 if not self.vartree.dbapi.cpv_exists(cpv):
3422                                         continue
3423                                 self.vartree.dbapi.removeFromContents(cpv, removed)
3424
3425                 return os.EX_OK
3426
3427         def _new_backup_path(self, p):
3428                 """
3429                 The works for any type path, such as a regular file, symlink,
3430                 or directory. The parent directory is assumed to exist.
3431                 The returned filename is of the form p + '.backup.' + x, where
3432                 x guarantees that the returned path does not exist yet.
3433                 """
3434                 os = _os_merge
3435
3436                 x = -1
3437                 while True:
3438                         x += 1
3439                         backup_p = p + '.backup.' + str(x).rjust(4, '0')
3440                         try:
3441                                 os.lstat(backup_p)
3442                         except OSError:
3443                                 break
3444
3445                 return backup_p
3446
3447         def mergeme(self, srcroot, destroot, outfile, secondhand, stufftomerge, cfgfiledict, thismtime):
3448                 """
3449                 
3450                 This function handles actual merging of the package contents to the livefs.
3451                 It also handles config protection.
3452                 
3453                 @param srcroot: Where are we copying files from (usually ${D})
3454                 @type srcroot: String (Path)
3455                 @param destroot: Typically ${ROOT}
3456                 @type destroot: String (Path)
3457                 @param outfile: File to log operations to
3458                 @type outfile: File Object
3459                 @param secondhand: A set of items to merge in pass two (usually
3460                 or symlinks that point to non-existing files that may get merged later)
3461                 @type secondhand: List
3462                 @param stufftomerge: Either a diretory to merge, or a list of items.
3463                 @type stufftomerge: String or List
3464                 @param cfgfiledict: { File:mtime } mapping for config_protected files
3465                 @type cfgfiledict: Dictionary
3466                 @param thismtime: The current time (typically long(time.time())
3467                 @type thismtime: Long
3468                 @rtype: None or Boolean
3469                 @returns:
3470                 1. True on failure
3471                 2. None otherwise
3472                 
3473                 """
3474
3475                 showMessage = self._display_merge
3476                 writemsg = self._display_merge
3477                 scheduler = self._scheduler
3478
3479                 os = _os_merge
3480                 sep = os.sep
3481                 join = os.path.join
3482                 srcroot = normalize_path(srcroot).rstrip(sep) + sep
3483                 destroot = normalize_path(destroot).rstrip(sep) + sep
3484                 calc_prelink = "prelink-checksums" in self.settings.features
3485
3486                 # this is supposed to merge a list of files.  There will be 2 forms of argument passing.
3487                 if isinstance(stufftomerge, basestring):
3488                         #A directory is specified.  Figure out protection paths, listdir() it and process it.
3489                         mergelist = os.listdir(join(srcroot, stufftomerge))
3490                         offset = stufftomerge
3491                 else:
3492                         mergelist = stufftomerge
3493                         offset = ""
3494
3495                 for i, x in enumerate(mergelist):
3496
3497                         if scheduler is not None and \
3498                                 0 == i % self._file_merge_yield_interval:
3499                                 scheduler.scheduleYield()
3500
3501                         mysrc = join(srcroot, offset, x)
3502                         mydest = join(destroot, offset, x)
3503                         # myrealdest is mydest without the $ROOT prefix (makes a difference if ROOT!="/")
3504                         myrealdest = join(sep, offset, x)
3505                         # stat file once, test using S_* macros many times (faster that way)
3506                         mystat = os.lstat(mysrc)
3507                         mymode = mystat[stat.ST_MODE]
3508                         # handy variables; mydest is the target object on the live filesystems;
3509                         # mysrc is the source object in the temporary install dir
3510                         try:
3511                                 mydstat = os.lstat(mydest)
3512                                 mydmode = mydstat.st_mode
3513                         except OSError as e:
3514                                 if e.errno != errno.ENOENT:
3515                                         raise
3516                                 del e
3517                                 #dest file doesn't exist
3518                                 mydstat = None
3519                                 mydmode = None
3520
3521                         if stat.S_ISLNK(mymode):
3522                                 # we are merging a symbolic link
3523                                 myabsto = abssymlink(mysrc)
3524                                 if myabsto.startswith(srcroot):
3525                                         myabsto = myabsto[len(srcroot):]
3526                                 myabsto = myabsto.lstrip(sep)
3527                                 myto = os.readlink(mysrc)
3528                                 if self.settings and self.settings["D"]:
3529                                         if myto.startswith(self.settings["D"]):
3530                                                 myto = myto[len(self.settings["D"]):]
3531                                 # myrealto contains the path of the real file to which this symlink points.
3532                                 # we can simply test for existence of this file to see if the target has been merged yet
3533                                 myrealto = normalize_path(os.path.join(destroot, myabsto))
3534                                 if mydmode!=None:
3535                                         #destination exists
3536                                         if not stat.S_ISLNK(mydmode):
3537                                                 if stat.S_ISDIR(mydmode):
3538                                                         # directory in the way: we can't merge a symlink over a directory
3539                                                         # we won't merge this, continue with next file...
3540                                                         continue
3541
3542                                                 if os.path.exists(mysrc) and stat.S_ISDIR(os.stat(mysrc)[stat.ST_MODE]):
3543                                                         # Kill file blocking installation of symlink to dir #71787
3544                                                         pass
3545                                                 elif self.isprotected(mydest):
3546                                                         # Use md5 of the target in ${D} if it exists...
3547                                                         try:
3548                                                                 newmd5 = perform_md5(join(srcroot, myabsto))
3549                                                         except FileNotFound:
3550                                                                 # Maybe the target is merged already.
3551                                                                 try:
3552                                                                         newmd5 = perform_md5(myrealto)
3553                                                                 except FileNotFound:
3554                                                                         newmd5 = None
3555                                                         mydest = new_protect_filename(mydest, newmd5=newmd5)
3556
3557                                 # if secondhand is None it means we're operating in "force" mode and should not create a second hand.
3558                                 if (secondhand != None) and (not os.path.exists(myrealto)):
3559                                         # either the target directory doesn't exist yet or the target file doesn't exist -- or
3560                                         # the target is a broken symlink.  We will add this file to our "second hand" and merge
3561                                         # it later.
3562                                         secondhand.append(mysrc[len(srcroot):])
3563                                         continue
3564                                 # unlinking no longer necessary; "movefile" will overwrite symlinks atomically and correctly
3565                                 mymtime = movefile(mysrc, mydest, newmtime=thismtime,
3566                                         sstat=mystat, mysettings=self.settings,
3567                                         encoding=_encodings['merge'])
3568                                 if mymtime != None:
3569                                         showMessage(">>> %s -> %s\n" % (mydest, myto))
3570                                         outfile.write("sym "+myrealdest+" -> "+myto+" "+str(mymtime)+"\n")
3571                                 else:
3572                                         showMessage(_("!!! Failed to move file.\n"),
3573                                                 level=logging.ERROR, noiselevel=-1)
3574                                         showMessage("!!! %s -> %s\n" % (mydest, myto),
3575                                                 level=logging.ERROR, noiselevel=-1)
3576                                         return 1
3577                         elif stat.S_ISDIR(mymode):
3578                                 # we are merging a directory
3579                                 if mydmode != None:
3580                                         # destination exists
3581
3582                                         if bsd_chflags:
3583                                                 # Save then clear flags on dest.
3584                                                 dflags = mydstat.st_flags
3585                                                 if dflags != 0:
3586                                                         bsd_chflags.lchflags(mydest, 0)
3587
3588                                         if not os.access(mydest, os.W_OK):
3589                                                 pkgstuff = pkgsplit(self.pkg)
3590                                                 writemsg(_("\n!!! Cannot write to '%s'.\n") % mydest, noiselevel=-1)
3591                                                 writemsg(_("!!! Please check permissions and directories for broken symlinks.\n"))
3592                                                 writemsg(_("!!! You may start the merge process again by using ebuild:\n"))
3593                                                 writemsg("!!! ebuild "+self.settings["PORTDIR"]+"/"+self.cat+"/"+pkgstuff[0]+"/"+self.pkg+".ebuild merge\n")
3594                                                 writemsg(_("!!! And finish by running this: env-update\n\n"))
3595                                                 return 1
3596
3597                                         if stat.S_ISDIR(mydmode) or \
3598                                                 (stat.S_ISLNK(mydmode) and os.path.isdir(mydest)):
3599                                                 # a symlink to an existing directory will work for us; keep it:
3600                                                 showMessage("--- %s/\n" % mydest)
3601                                                 if bsd_chflags:
3602                                                         bsd_chflags.lchflags(mydest, dflags)
3603                                         else:
3604                                                 # a non-directory and non-symlink-to-directory.  Won't work for us.  Move out of the way.
3605                                                 backup_dest = self._new_backup_path(mydest)
3606                                                 msg = []
3607                                                 msg.append("")
3608                                                 msg.append(_("Installation of a directory is blocked by a file:"))
3609                                                 msg.append("  '%s'" % mydest)
3610                                                 msg.append(_("This file will be renamed to a different name:"))
3611                                                 msg.append("  '%s'" % backup_dest)
3612                                                 msg.append("")
3613                                                 self._eerror("preinst", msg)
3614                                                 if movefile(mydest, backup_dest,
3615                                                         mysettings=self.settings,
3616                                                         encoding=_encodings['merge']) is None:
3617                                                         return 1
3618                                                 showMessage(_("bak %s %s.backup\n") % (mydest, mydest),
3619                                                         level=logging.ERROR, noiselevel=-1)
3620                                                 #now create our directory
3621                                                 try:
3622                                                         if self.settings.selinux_enabled():
3623                                                                 _selinux_merge.mkdir(mydest, mysrc)
3624                                                         else:
3625                                                                 os.mkdir(mydest)
3626                                                 except OSError as e:
3627                                                         # Error handling should be equivalent to
3628                                                         # portage.util.ensure_dirs() for cases
3629                                                         # like bug #187518.
3630                                                         if e.errno in (errno.EEXIST,):
3631                                                                 pass
3632                                                         elif os.path.isdir(mydest):
3633                                                                 pass
3634                                                         else:
3635                                                                 raise
3636                                                         del e
3637
3638                                                 if bsd_chflags:
3639                                                         bsd_chflags.lchflags(mydest, dflags)
3640                                                 os.chmod(mydest, mystat[0])
3641                                                 os.chown(mydest, mystat[4], mystat[5])
3642                                                 showMessage(">>> %s/\n" % mydest)
3643                                 else:
3644                                         try:
3645                                                 #destination doesn't exist
3646                                                 if self.settings.selinux_enabled():
3647                                                         _selinux_merge.mkdir(mydest, mysrc)
3648                                                 else:
3649                                                         os.mkdir(mydest)
3650                                         except OSError as e:
3651                                                 # Error handling should be equivalent to
3652                                                 # portage.util.ensure_dirs() for cases
3653                                                 # like bug #187518.
3654                                                 if e.errno in (errno.EEXIST,):
3655                                                         pass
3656                                                 elif os.path.isdir(mydest):
3657                                                         pass
3658                                                 else:
3659                                                         raise
3660                                                 del e
3661                                         os.chmod(mydest, mystat[0])
3662                                         os.chown(mydest, mystat[4], mystat[5])
3663                                         showMessage(">>> %s/\n" % mydest)
3664                                 outfile.write("dir "+myrealdest+"\n")
3665                                 # recurse and merge this directory
3666                                 if self.mergeme(srcroot, destroot, outfile, secondhand,
3667                                         join(offset, x), cfgfiledict, thismtime):
3668                                         return 1
3669                         elif stat.S_ISREG(mymode):
3670                                 # we are merging a regular file
3671                                 mymd5 = perform_md5(mysrc, calc_prelink=calc_prelink)
3672                                 # calculate config file protection stuff
3673                                 mydestdir = os.path.dirname(mydest)
3674                                 moveme = 1
3675                                 zing = "!!!"
3676                                 mymtime = None
3677                                 protected = self.isprotected(mydest)
3678                                 if mydmode != None:
3679                                         # destination file exists
3680                                         
3681                                         if stat.S_ISDIR(mydmode):
3682                                                 # install of destination is blocked by an existing directory with the same name
3683                                                 newdest = self._new_backup_path(mydest)
3684                                                 msg = []
3685                                                 msg.append("")
3686                                                 msg.append(_("Installation of a regular file is blocked by a directory:"))
3687                                                 msg.append("  '%s'" % mydest)
3688                                                 msg.append(_("This file will be merged with a different name:"))
3689                                                 msg.append("  '%s'" % newdest)
3690                                                 msg.append("")
3691                                                 self._eerror("preinst", msg)
3692                                                 mydest = newdest
3693
3694                                         elif stat.S_ISREG(mydmode) or (stat.S_ISLNK(mydmode) and os.path.exists(mydest) and stat.S_ISREG(os.stat(mydest)[stat.ST_MODE])):
3695                                                 # install of destination is blocked by an existing regular file,
3696                                                 # or by a symlink to an existing regular file;
3697                                                 # now, config file management may come into play.
3698                                                 # we only need to tweak mydest if cfg file management is in play.
3699                                                 if protected:
3700                                                         # we have a protection path; enable config file management.
3701                                                         cfgprot = 0
3702                                                         destmd5 = perform_md5(mydest, calc_prelink=calc_prelink)
3703                                                         if mymd5 == destmd5:
3704                                                                 #file already in place; simply update mtimes of destination
3705                                                                 moveme = 1
3706                                                         else:
3707                                                                 if mymd5 == cfgfiledict.get(myrealdest, [None])[0]:
3708                                                                         """ An identical update has previously been
3709                                                                         merged.  Skip it unless the user has chosen
3710                                                                         --noconfmem."""
3711                                                                         moveme = cfgfiledict["IGNORE"]
3712                                                                         cfgprot = cfgfiledict["IGNORE"]
3713                                                                         if not moveme:
3714                                                                                 zing = "---"
3715                                                                                 mymtime = mystat[stat.ST_MTIME]
3716                                                                 else:
3717                                                                         moveme = 1
3718                                                                         cfgprot = 1
3719                                                         if moveme:
3720                                                                 # Merging a new file, so update confmem.
3721                                                                 cfgfiledict[myrealdest] = [mymd5]
3722                                                         elif destmd5 == cfgfiledict.get(myrealdest, [None])[0]:
3723                                                                 """A previously remembered update has been
3724                                                                 accepted, so it is removed from confmem."""
3725                                                                 del cfgfiledict[myrealdest]
3726
3727                                                         if cfgprot:
3728                                                                 mydest = new_protect_filename(mydest, newmd5=mymd5)
3729
3730                                 # whether config protection or not, we merge the new file the
3731                                 # same way.  Unless moveme=0 (blocking directory)
3732                                 if moveme:
3733                                         # Do not hardlink files unless they are in the same
3734                                         # directory, since otherwise tar may not be able to
3735                                         # extract a tarball of the resulting hardlinks due to
3736                                         # 'Invalid cross-device link' errors (depends on layout of
3737                                         # mount points). Also, don't hardlink zero-byte files since
3738                                         # it doesn't save any space, and don't hardlink
3739                                         # CONFIG_PROTECTed files since config files shouldn't be
3740                                         # hardlinked to eachother (for example, shadow installs
3741                                         # several identical config files inside /etc/pam.d/).
3742                                         parent_dir = os.path.dirname(myrealdest)
3743                                         hardlink_key = (parent_dir, mymd5, mystat.st_size,
3744                                                 mystat.st_mode, mystat.st_uid, mystat.st_gid)
3745
3746                                         hardlink_candidates = None
3747                                         if not protected and mystat.st_size != 0:
3748                                                 hardlink_candidates = self._md5_merge_map.get(hardlink_key)
3749                                                 if hardlink_candidates is None:
3750                                                         hardlink_candidates = []
3751                                                         self._md5_merge_map[hardlink_key] = hardlink_candidates
3752
3753                                         mymtime = movefile(mysrc, mydest, newmtime=thismtime,
3754                                                 sstat=mystat, mysettings=self.settings,
3755                                                 hardlink_candidates=hardlink_candidates,
3756                                                 encoding=_encodings['merge'])
3757                                         if mymtime is None:
3758                                                 return 1
3759                                         if hardlink_candidates is not None:
3760                                                 hardlink_candidates.append(mydest)
3761                                         zing = ">>>"
3762
3763                                 if mymtime != None:
3764                                         outfile.write("obj "+myrealdest+" "+mymd5+" "+str(mymtime)+"\n")
3765                                 showMessage("%s %s\n" % (zing,mydest))
3766                         else:
3767                                 # we are merging a fifo or device node
3768                                 zing = "!!!"
3769                                 if mydmode is None:
3770                                         # destination doesn't exist
3771                                         if movefile(mysrc, mydest, newmtime=thismtime,
3772                                                 sstat=mystat, mysettings=self.settings,
3773                                                 encoding=_encodings['merge']) is not None:
3774                                                 zing = ">>>"
3775                                         else:
3776                                                 return 1
3777                                 if stat.S_ISFIFO(mymode):
3778                                         outfile.write("fif %s\n" % myrealdest)
3779                                 else:
3780                                         outfile.write("dev %s\n" % myrealdest)
3781                                 showMessage(zing + " " + mydest + "\n")
3782
3783         def merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
3784                 mydbapi=None, prev_mtimes=None):
3785                 """
3786                 If portage is reinstalling itself, create temporary
3787                 copies of PORTAGE_BIN_PATH and PORTAGE_PYM_PATH in order
3788                 to avoid relying on the new versions which may be
3789                 incompatible. Register an atexit hook to clean up the
3790                 temporary directories. Pre-load elog modules here since
3791                 we won't be able to later if they get unmerged (happens
3792                 when namespace changes).
3793
3794                 @param myroot: ignored, self._eroot is used instead
3795                 """
3796                 myroot = None
3797                 if self.vartree.dbapi._categories is not None:
3798                         self.vartree.dbapi._categories = None
3799                 if self.myroot == "/" and \
3800                         match_from_list(PORTAGE_PACKAGE_ATOM, [self.mycpv]) and \
3801                         (not self.vartree.dbapi.cpv_exists(self.mycpv) or \
3802                         '9999' in self.mycpv or \
3803                         'git' in self.settings.get('INHERITED', '').split()):
3804                         # Load lazily referenced portage submodules into memory,
3805                         # so imports won't fail during portage upgrade/downgrade.
3806                         portage.proxy.lazyimport._preload_portage_submodules()
3807                         settings = self.settings
3808                         base_path_orig = os.path.dirname(settings["PORTAGE_BIN_PATH"])
3809                         from tempfile import mkdtemp
3810
3811                         # Make the temp directory inside PORTAGE_TMPDIR since, unlike
3812                         # /tmp, it can't be mounted with the "noexec" option.
3813                         base_path_tmp = mkdtemp("", "._portage_reinstall_.",
3814                                 settings["PORTAGE_TMPDIR"])
3815                         from portage.process import atexit_register
3816                         atexit_register(shutil.rmtree, base_path_tmp)
3817                         dir_perms = 0o755
3818                         for subdir in "bin", "pym":
3819                                 var_name = "PORTAGE_%s_PATH" % subdir.upper()
3820                                 var_orig = settings[var_name]
3821                                 var_new = os.path.join(base_path_tmp, subdir)
3822                                 settings[var_name] = var_new
3823                                 settings.backup_changes(var_name)
3824                                 shutil.copytree(var_orig, var_new, symlinks=True)
3825                                 os.chmod(var_new, dir_perms)
3826                         os.chmod(base_path_tmp, dir_perms)
3827                         # This serves so pre-load the modules.
3828                         elog_process(self.mycpv, self.settings)
3829
3830                 return self._merge(mergeroot, inforoot,
3831                                 myebuild=myebuild, cleanup=cleanup,
3832                                 mydbapi=mydbapi, prev_mtimes=prev_mtimes)
3833
3834         def _merge(self, mergeroot, inforoot, myroot=None, myebuild=None, cleanup=0,
3835                 mydbapi=None, prev_mtimes=None):
3836                 """
3837                 @param myroot: ignored, self._eroot is used instead
3838                 """
3839                 myroot = None
3840                 retval = -1
3841                 self.lockdb()
3842                 self.vartree.dbapi._bump_mtime(self.mycpv)
3843                 try:
3844                         plib_registry = self.vartree.dbapi._plib_registry
3845                         if plib_registry is None:
3846                                 # preserve-libs is entirely disabled
3847                                 pass
3848                         else:
3849                                 plib_registry.load()
3850                                 plib_registry.pruneNonExisting()
3851
3852                         retval = self.treewalk(mergeroot, myroot, inforoot, myebuild,
3853                                 cleanup=cleanup, mydbapi=mydbapi, prev_mtimes=prev_mtimes)
3854
3855                         # If PORTAGE_BUILDDIR doesn't exist, then it probably means
3856                         # fail-clean is enabled, and the success/die hooks have
3857                         # already been called by _emerge.EbuildPhase (via
3858                         # self._scheduler.dblinkEbuildPhase) prior to cleaning.
3859                         if os.path.isdir(self.settings['PORTAGE_BUILDDIR']):
3860
3861                                 if retval == os.EX_OK:
3862                                         phase = 'success_hooks'
3863                                 else:
3864                                         phase = 'die_hooks'
3865
3866                                 if self._scheduler is None:
3867                                         ebuild_phase = MiscFunctionsProcess(
3868                                                 background=False,
3869                                                 commands=[phase],
3870                                                 scheduler=PollScheduler().sched_iface,
3871                                                 settings=self.settings)
3872                                         ebuild_phase.start()
3873                                         ebuild_phase.wait()
3874                                 else:
3875                                         self._scheduler.dblinkEbuildPhase(
3876                                                 self, mydbapi, myebuild, phase)
3877
3878                                 elog_process(self.mycpv, self.settings)
3879
3880                                 if 'noclean' not in self.settings.features and \
3881                                         (retval == os.EX_OK or \
3882                                         'fail-clean' in self.settings.features):
3883                                         if myebuild is None:
3884                                                 myebuild = os.path.join(inforoot, self.pkg + ".ebuild")
3885
3886                                         doebuild_environment(myebuild, "clean",
3887                                                 settings=self.settings, db=mydbapi)
3888                                         if self._scheduler is None:
3889                                                 _spawn_phase("clean", self.settings)
3890                                         else:
3891                                                 self._scheduler.dblinkEbuildPhase(
3892                                                         self, mydbapi, myebuild, "clean")
3893
3894                 finally:
3895                         self.settings.pop('REPLACING_VERSIONS', None)
3896                         if self.vartree.dbapi._linkmap is None:
3897                                 # preserve-libs is entirely disabled
3898                                 pass
3899                         else:
3900                                 self.vartree.dbapi._linkmap._clear_cache()
3901                         self.unlockdb()
3902                         self.vartree.dbapi._bump_mtime(self.mycpv)
3903                 return retval
3904
3905         def getstring(self,name):
3906                 "returns contents of a file with whitespace converted to spaces"
3907                 if not os.path.exists(self.dbdir+"/"+name):
3908                         return ""
3909                 mydata = codecs.open(
3910                         _unicode_encode(os.path.join(self.dbdir, name),
3911                         encoding=_encodings['fs'], errors='strict'),
3912                         mode='r', encoding=_encodings['repo.content'], errors='replace'
3913                         ).read().split()
3914                 return " ".join(mydata)
3915
3916         def copyfile(self,fname):
3917                 shutil.copyfile(fname,self.dbdir+"/"+os.path.basename(fname))
3918
3919         def getfile(self,fname):
3920                 if not os.path.exists(self.dbdir+"/"+fname):
3921                         return ""
3922                 return codecs.open(_unicode_encode(os.path.join(self.dbdir, fname),
3923                         encoding=_encodings['fs'], errors='strict'), 
3924                         mode='r', encoding=_encodings['repo.content'], errors='replace'
3925                         ).read()
3926
3927         def setfile(self,fname,data):
3928                 kwargs = {}
3929                 if fname == 'environment.bz2' or not isinstance(data, basestring):
3930                         kwargs['mode'] = 'wb'
3931                 else:
3932                         kwargs['mode'] = 'w'
3933                         kwargs['encoding'] = _encodings['repo.content']
3934                 write_atomic(os.path.join(self.dbdir, fname), data, **kwargs)
3935
3936         def getelements(self,ename):
3937                 if not os.path.exists(self.dbdir+"/"+ename):
3938                         return []
3939                 mylines = codecs.open(_unicode_encode(
3940                         os.path.join(self.dbdir, ename),
3941                         encoding=_encodings['fs'], errors='strict'),
3942                         mode='r', encoding=_encodings['repo.content'], errors='replace'
3943                         ).readlines()
3944                 myreturn = []
3945                 for x in mylines:
3946                         for y in x[:-1].split():
3947                                 myreturn.append(y)
3948                 return myreturn
3949
3950         def setelements(self,mylist,ename):
3951                 myelement = codecs.open(_unicode_encode(
3952                         os.path.join(self.dbdir, ename),
3953                         encoding=_encodings['fs'], errors='strict'),
3954                         mode='w', encoding=_encodings['repo.content'],
3955                         errors='backslashreplace')
3956                 for x in mylist:
3957                         myelement.write(x+"\n")
3958                 myelement.close()
3959
3960         def isregular(self):
3961                 "Is this a regular package (does it have a CATEGORY file?  A dblink can be virtual *and* regular)"
3962                 return os.path.exists(os.path.join(self.dbdir, "CATEGORY"))
3963
3964 def merge(mycat, mypkg, pkgloc, infloc,
3965         myroot=None, settings=None, myebuild=None,
3966         mytree=None, mydbapi=None, vartree=None, prev_mtimes=None, blockers=None,
3967         scheduler=None):
3968         """
3969         @param myroot: ignored, settings['EROOT'] is used instead
3970         """
3971         myroot = None
3972         if settings is None:
3973                 raise TypeError("settings argument is required")
3974         if not os.access(settings['EROOT'], os.W_OK):
3975                 writemsg(_("Permission denied: access('%s', W_OK)\n") % settings['EROOT'],
3976                         noiselevel=-1)
3977                 return errno.EACCES
3978         mylink = dblink(mycat, mypkg, settings=settings, treetype=mytree,
3979                 vartree=vartree, blockers=blockers, scheduler=scheduler)
3980         return mylink.merge(pkgloc, infloc, myebuild=myebuild,
3981                 mydbapi=mydbapi, prev_mtimes=prev_mtimes)
3982
3983 def unmerge(cat, pkg, myroot=None, settings=None,
3984         mytrimworld=None, vartree=None,
3985         ldpath_mtimes=None, scheduler=None):
3986         """
3987         @param myroot: ignored, settings['EROOT'] is used instead
3988         @param mytrimworld: ignored
3989         """
3990         myroot = None
3991         if settings is None:
3992                 raise TypeError("settings argument is required")
3993         mylink = dblink(cat, pkg, settings=settings, treetype="vartree",
3994                 vartree=vartree, scheduler=scheduler)
3995         vartree = mylink.vartree
3996         try:
3997                 mylink.lockdb()
3998                 if mylink.exists():
3999                         plib_registry = vartree.dbapi._plib_registry
4000                         if plib_registry is None:
4001                                 # preserve-libs is entirely disabled
4002                                 pass
4003                         else:
4004                                 plib_registry.load()
4005                                 plib_registry.pruneNonExisting()
4006                         retval = mylink.unmerge(ldpath_mtimes=ldpath_mtimes)
4007                         if retval == os.EX_OK:
4008                                 mylink.delete()
4009                         return retval
4010                 return os.EX_OK
4011         finally:
4012                 if vartree.dbapi._linkmap is None:
4013                         # preserve-libs is entirely disabled
4014                         pass
4015                 else:
4016                         vartree.dbapi._linkmap._clear_cache()
4017                 mylink.unlockdb()
4018
4019 def write_contents(contents, root, f):
4020         """
4021         Write contents to any file like object. The file will be left open.
4022         """
4023         root_len = len(root) - 1
4024         for filename in sorted(contents):
4025                 entry_data = contents[filename]
4026                 entry_type = entry_data[0]
4027                 relative_filename = filename[root_len:]
4028                 if entry_type == "obj":
4029                         entry_type, mtime, md5sum = entry_data
4030                         line = "%s %s %s %s\n" % \
4031                                 (entry_type, relative_filename, md5sum, mtime)
4032                 elif entry_type == "sym":
4033                         entry_type, mtime, link = entry_data
4034                         line = "%s %s -> %s %s\n" % \
4035                                 (entry_type, relative_filename, link, mtime)
4036                 else: # dir, dev, fif
4037                         line = "%s %s\n" % (entry_type, relative_filename)
4038                 f.write(line)
4039
4040 def tar_contents(contents, root, tar, protect=None, onProgress=None):
4041         os = _os_merge
4042
4043         try:
4044                 for x in contents:
4045                         _unicode_encode(x,
4046                                 encoding=_encodings['merge'],
4047                                 errors='strict')
4048         except UnicodeEncodeError:
4049                 # The package appears to have been merged with a
4050                 # different value of sys.getfilesystemencoding(),
4051                 # so fall back to utf_8 if appropriate.
4052                 try:
4053                         for x in contents:
4054                                 _unicode_encode(x,
4055                                         encoding=_encodings['fs'],
4056                                         errors='strict')
4057                 except UnicodeEncodeError:
4058                         pass
4059                 else:
4060                         os = portage.os
4061
4062         from portage.util import normalize_path
4063         import tarfile
4064         root = normalize_path(root).rstrip(os.path.sep) + os.path.sep
4065         id_strings = {}
4066         maxval = len(contents)
4067         curval = 0
4068         if onProgress:
4069                 onProgress(maxval, 0)
4070         paths = list(contents)
4071         paths.sort()
4072         for path in paths:
4073                 curval += 1
4074                 try:
4075                         lst = os.lstat(path)
4076                 except OSError as e:
4077                         if e.errno != errno.ENOENT:
4078                                 raise
4079                         del e
4080                         if onProgress:
4081                                 onProgress(maxval, curval)
4082                         continue
4083                 contents_type = contents[path][0]
4084                 if path.startswith(root):
4085                         arcname = path[len(root):]
4086                 else:
4087                         raise ValueError("invalid root argument: '%s'" % root)
4088                 live_path = path
4089                 if 'dir' == contents_type and \
4090                         not stat.S_ISDIR(lst.st_mode) and \
4091                         os.path.isdir(live_path):
4092                         # Even though this was a directory in the original ${D}, it exists
4093                         # as a symlink to a directory in the live filesystem.  It must be
4094                         # recorded as a real directory in the tar file to ensure that tar
4095                         # can properly extract it's children.
4096                         live_path = os.path.realpath(live_path)
4097                 tarinfo = tar.gettarinfo(live_path, arcname)
4098
4099                 if stat.S_ISREG(lst.st_mode):
4100                         # break hardlinks due to bug #185305
4101                         tarinfo.type = tarfile.REGTYPE
4102                         if protect and protect(path):
4103                                 # Create an empty file as a place holder in order to avoid
4104                                 # potential collision-protect issues.
4105                                 f = tempfile.TemporaryFile()
4106                                 f.write(_unicode_encode(
4107                                         "# empty file because --include-config=n " + \
4108                                         "when `quickpkg` was used\n"))
4109                                 f.flush()
4110                                 f.seek(0)
4111                                 tarinfo.size = os.fstat(f.fileno()).st_size
4112                                 tar.addfile(tarinfo, f)
4113                                 f.close()
4114                         else:
4115                                 f = open(_unicode_encode(path,
4116                                         encoding=object.__getattribute__(os, '_encoding'),
4117                                         errors='strict'), 'rb')
4118                                 try:
4119                                         tar.addfile(tarinfo, f)
4120                                 finally:
4121                                         f.close()
4122                 else:
4123                         tar.addfile(tarinfo)
4124                 if onProgress:
4125                         onProgress(maxval, curval)