20eecd65a68e25801839bc5099130847605f6b6c
[portage.git] / pym / portage / util / __init__.py
1 # Copyright 2004-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = ['apply_permissions', 'apply_recursive_permissions',
5         'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
6         'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
7         'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict',
8         'grabdict_package', 'grabfile', 'grabfile_package', 'grablines',
9         'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals',
10         'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist',
11         'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand',
12         'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout']
13
14 from copy import deepcopy
15 import errno
16 import io
17 try:
18         from itertools import filterfalse
19 except ImportError:
20         from itertools import ifilterfalse as filterfalse
21 import logging
22 import re
23 import shlex
24 import stat
25 import string
26 import sys
27 import traceback
28
29 import portage
30 portage.proxy.lazyimport.lazyimport(globals(),
31         'pickle',
32         'portage.dep:Atom',
33         'portage.util.listdir:_ignorecvs_dirs'
34 )
35
36 from portage import os
37 from portage import subprocess_getstatusoutput
38 from portage import _encodings
39 from portage import _os_merge
40 from portage import _unicode_encode
41 from portage import _unicode_decode
42 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
43        OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem
44 from portage.localization import _
45 from portage.proxy.objectproxy import ObjectProxy
46 from portage.cache.mappings import UserDict
47
48 noiselimit = 0
49
50 def initialize_logger(level=logging.WARN):
51         """Sets up basic logging of portage activities
52         Args:
53                 level: the level to emit messages at ('info', 'debug', 'warning' ...)
54         Returns:
55                 None
56         """
57         logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
58
59 def writemsg(mystr,noiselevel=0,fd=None):
60         """Prints out warning and debug messages based on the noiselimit setting"""
61         global noiselimit
62         if fd is None:
63                 fd = sys.stderr
64         if noiselevel <= noiselimit:
65                 # avoid potential UnicodeEncodeError
66                 if isinstance(fd, io.StringIO):
67                         mystr = _unicode_decode(mystr,
68                                 encoding=_encodings['content'], errors='replace')
69                 else:
70                         mystr = _unicode_encode(mystr,
71                                 encoding=_encodings['stdio'], errors='backslashreplace')
72                         if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr):
73                                 fd = fd.buffer
74                 fd.write(mystr)
75                 fd.flush()
76
77 def writemsg_stdout(mystr,noiselevel=0):
78         """Prints messages stdout based on the noiselimit setting"""
79         writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
80
81 def writemsg_level(msg, level=0, noiselevel=0):
82         """
83         Show a message for the given level as defined by the logging module
84         (default is 0). When level >= logging.WARNING then the message is
85         sent to stderr, otherwise it is sent to stdout. The noiselevel is
86         passed directly to writemsg().
87
88         @type msg: str
89         @param msg: a message string, including newline if appropriate
90         @type level: int
91         @param level: a numeric logging level (see the logging module)
92         @type noiselevel: int
93         @param noiselevel: passed directly to writemsg
94         """
95         if level >= logging.WARNING:
96                 fd = sys.stderr
97         else:
98                 fd = sys.stdout
99         writemsg(msg, noiselevel=noiselevel, fd=fd)
100
101 def normalize_path(mypath):
102         """ 
103         os.path.normpath("//foo") returns "//foo" instead of "/foo"
104         We dislike this behavior so we create our own normpath func
105         to fix it.
106         """
107         if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes):
108                 path_sep = os.path.sep.encode()
109         else:
110                 path_sep = os.path.sep
111
112         if mypath.startswith(path_sep):
113                 # posixpath.normpath collapses 3 or more leading slashes to just 1.
114                 return os.path.normpath(2*path_sep + mypath)
115         else:
116                 return os.path.normpath(mypath)
117
118 def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
119         """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
120         begins with a #, it is ignored, as are empty lines"""
121
122         mylines=grablines(myfilename, recursive, remember_source_file=True)
123         newlines=[]
124
125         for x, source_file in mylines:
126                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
127                 #into single spaces.
128                 myline = x.split()
129                 if x and x[0] != "#":
130                         mylinetemp = []
131                         for item in myline:
132                                 if item[:1] != "#":
133                                         mylinetemp.append(item)
134                                 else:
135                                         break
136                         myline = mylinetemp
137
138                 myline = " ".join(myline)
139                 if not myline:
140                         continue
141                 if myline[0]=="#":
142                         # Check if we have a compat-level string. BC-integration data.
143                         # '##COMPAT==>N<==' 'some string attached to it'
144                         mylinetest = myline.split("<==",1)
145                         if len(mylinetest) == 2:
146                                 myline_potential = mylinetest[1]
147                                 mylinetest = mylinetest[0].split("##COMPAT==>")
148                                 if len(mylinetest) == 2:
149                                         if compat_level >= int(mylinetest[1]):
150                                                 # It's a compat line, and the key matches.
151                                                 newlines.append(myline_potential)
152                                 continue
153                         else:
154                                 continue
155                 if remember_source_file:
156                         newlines.append((myline, source_file))
157                 else:
158                         newlines.append(myline)
159         return newlines
160
161 def map_dictlist_vals(func,myDict):
162         """Performs a function on each value of each key in a dictlist.
163         Returns a new dictlist."""
164         new_dl = {}
165         for key in myDict:
166                 new_dl[key] = []
167                 new_dl[key] = [func(x) for x in myDict[key]]
168         return new_dl
169
170 def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
171         """
172         Stacks an array of dict-types into one array. Optionally merging or
173         overwriting matching key/value pairs for the dict[key]->list.
174         Returns a single dict. Higher index in lists is preferenced.
175         
176         Example usage:
177            >>> from portage.util import stack_dictlist
178                 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
179                 >>> {'a':'b','x':'y'}
180                 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True )
181                 >>> {'a':['b','c'] }
182                 >>> a = {'KEYWORDS':['x86','alpha']}
183                 >>> b = {'KEYWORDS':['-x86']}
184                 >>> print stack_dictlist( [a,b] )
185                 >>> { 'KEYWORDS':['x86','alpha','-x86']}
186                 >>> print stack_dictlist( [a,b], incremental=True)
187                 >>> { 'KEYWORDS':['alpha'] }
188                 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
189                 >>> { 'KEYWORDS':['alpha'] }
190         
191         @param original_dicts a list of (dictionary objects or None)
192         @type list
193         @param incremental True or false depending on whether new keys should overwrite
194            keys which already exist.
195         @type boolean
196         @param incrementals A list of items that should be incremental (-foo removes foo from
197            the returned dict).
198         @type list
199         @param ignore_none Appears to be ignored, but probably was used long long ago.
200         @type boolean
201         
202         """
203         final_dict = {}
204         for mydict in original_dicts:
205                 if mydict is None:
206                         continue
207                 for y in mydict:
208                         if not y in final_dict:
209                                 final_dict[y] = []
210                         
211                         for thing in mydict[y]:
212                                 if thing:
213                                         if incremental or y in incrementals:
214                                                 if thing == "-*":
215                                                         final_dict[y] = []
216                                                         continue
217                                                 elif thing[:1] == '-':
218                                                         try:
219                                                                 final_dict[y].remove(thing[1:])
220                                                         except ValueError:
221                                                                 pass
222                                                         continue
223                                         if thing not in final_dict[y]:
224                                                 final_dict[y].append(thing)
225                         if y in final_dict and not final_dict[y]:
226                                 del final_dict[y]
227         return final_dict
228
229 def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
230         """Stacks an array of dict-types into one array. Optionally merging or
231         overwriting matching key/value pairs for the dict[key]->string.
232         Returns a single dict."""
233         final_dict = {}
234         for mydict in dicts:
235                 if not mydict:
236                         continue
237                 for k, v in mydict.items():
238                         if k in final_dict and (incremental or (k in incrementals)):
239                                 final_dict[k] += " " + v
240                         else:
241                                 final_dict[k]  = v
242         return final_dict
243
244 def append_repo(atom_list, repo_name, remember_source_file=False):
245         """
246         Takes a list of valid atoms without repo spec and appends ::repo_name.
247         """
248         if remember_source_file:
249                 return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
250                         for atom, source in atom_list]
251         else:
252                 return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
253                         for atom in atom_list]
254
255 def stack_lists(lists, incremental=1, remember_source_file=False,
256         warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
257         """Stacks an array of list-types into one array. Optionally removing
258         distinct values using '-value' notation. Higher index is preferenced.
259
260         all elements must be hashable."""
261         matched_removals = set()
262         unmatched_removals = {}
263         new_list = {}
264         for sub_list in lists:
265                 for token in sub_list:
266                         token_key = token
267                         if remember_source_file:
268                                 token, source_file = token
269                         else:
270                                 source_file = False
271
272                         if token is None:
273                                 continue
274
275                         if incremental:
276                                 if token == "-*":
277                                         new_list.clear()
278                                 elif token[:1] == '-':
279                                         matched = False
280                                         if ignore_repo and not "::" in token:
281                                                 #Let -cat/pkg remove cat/pkg::repo.
282                                                 to_be_removed = []
283                                                 token_slice = token[1:]
284                                                 for atom in new_list:
285                                                         atom_without_repo = atom
286                                                         if atom.repo is not None:
287                                                                 # Atom.without_repo instantiates a new Atom,
288                                                                 # which is unnecessary here, so use string
289                                                                 # replacement instead.
290                                                                 atom_without_repo = \
291                                                                         atom.replace("::" + atom.repo, "", 1)
292                                                         if atom_without_repo == token_slice:
293                                                                 to_be_removed.append(atom)
294                                                 if to_be_removed:
295                                                         matched = True
296                                                         for atom in to_be_removed:
297                                                                 new_list.pop(atom)
298                                         else:
299                                                 try:
300                                                         new_list.pop(token[1:])
301                                                         matched = True
302                                                 except KeyError:
303                                                         pass
304
305                                         if not matched:
306                                                 if source_file and \
307                                                         (strict_warn_for_unmatched_removal or \
308                                                         token_key not in matched_removals):
309                                                         unmatched_removals.setdefault(source_file, set()).add(token)
310                                         else:
311                                                 matched_removals.add(token_key)
312                                 else:
313                                         new_list[token] = source_file
314                         else:
315                                 new_list[token] = source_file
316
317         if warn_for_unmatched_removal:
318                 for source_file, tokens in unmatched_removals.items():
319                         if len(tokens) > 3:
320                                 selected = [tokens.pop(), tokens.pop(), tokens.pop()]
321                                 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \
322                                         (source_file, ", ".join(selected), len(tokens)),
323                                         noiselevel=-1)
324                         else:
325                                 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)),
326                                         noiselevel=-1)
327
328         if remember_source_file:
329                 return list(new_list.items())
330         else:
331                 return list(new_list)
332
333 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
334         """
335         This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
336         
337         @param myfilename: file to process
338         @type myfilename: string (path)
339         @param juststrings: only return strings
340         @type juststrings: Boolean (integer)
341         @param empty: Ignore certain lines
342         @type empty: Boolean (integer)
343         @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends )
344         @type recursive: Boolean (integer)
345         @param incremental: Append to the return list, don't overwrite
346         @type incremental: Boolean (integer)
347         @rtype: Dictionary
348         @returns:
349         1.  Returns the lines in a file in a dictionary, for example:
350                 'sys-apps/portage x86 amd64 ppc'
351                 would return
352                 { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
353                 the line syntax is key : [list of values]
354         """
355         newdict={}
356         for x in grablines(myfilename, recursive):
357                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
358                 #into single spaces.
359                 if x[0] == "#":
360                         continue
361                 myline=x.split()
362                 mylinetemp = []
363                 for item in myline:
364                         if item[:1] != "#":
365                                 mylinetemp.append(item)
366                         else:
367                                 break
368                 myline = mylinetemp
369                 if len(myline) < 2 and empty == 0:
370                         continue
371                 if len(myline) < 1 and empty == 1:
372                         continue
373                 if incremental:
374                         newdict.setdefault(myline[0], []).extend(myline[1:])
375                 else:
376                         newdict[myline[0]] = myline[1:]
377         if juststrings:
378                 for k, v in newdict.items():
379                         newdict[k] = " ".join(v)
380         return newdict
381
382 def read_corresponding_eapi_file(filename):
383         """
384         Read the 'eapi' file from the directory 'filename' is in.
385         Returns "0" if the file is not present or invalid.
386         """
387         default = "0"
388         eapi_file = os.path.join(os.path.dirname(filename), "eapi")
389         try:
390                 f = open(eapi_file, "r")
391                 lines = f.readlines()
392                 if len(lines) == 1:
393                         eapi = lines[0].rstrip("\n")
394                 else:
395                         writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
396                                 noiselevel=-1)
397                         eapi = default
398                 f.close()
399         except IOError:
400                 eapi = default
401
402         return eapi
403
404 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
405         verify_eapi=False, eapi=None):
406         """ Does the same thing as grabdict except it validates keys
407             with isvalidatom()"""
408         pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
409         if not pkgs:
410                 return pkgs
411         if verify_eapi and eapi is None:
412                 eapi = read_corresponding_eapi_file(myfilename)
413
414         # We need to call keys() here in order to avoid the possibility of
415         # "RuntimeError: dictionary changed size during iteration"
416         # when an invalid atom is deleted.
417         atoms = {}
418         for k, v in pkgs.items():
419                 try:
420                         k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
421                 except InvalidAtom as e:
422                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
423                                 noiselevel=-1)
424                 else:
425                         atoms[k] = v
426         return atoms
427
428 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
429         remember_source_file=False, verify_eapi=False, eapi=None):
430
431         pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True)
432         if not pkgs:
433                 return pkgs
434         if verify_eapi and eapi is None:
435                 eapi = read_corresponding_eapi_file(myfilename)
436         mybasename = os.path.basename(myfilename)
437         atoms = []
438         for pkg, source_file in pkgs:
439                 pkg_orig = pkg
440                 # for packages and package.mask files
441                 if pkg[:1] == "-":
442                         pkg = pkg[1:]
443                 if pkg[:1] == '*' and mybasename == 'packages':
444                         pkg = pkg[1:]
445                 try:
446                         pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
447                 except InvalidAtom as e:
448                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
449                                 noiselevel=-1)
450                 else:
451                         if pkg_orig == str(pkg):
452                                 # normal atom, so return as Atom instance
453                                 if remember_source_file:
454                                         atoms.append((pkg, source_file))
455                                 else:
456                                         atoms.append(pkg)
457                         else:
458                                 # atom has special prefix, so return as string
459                                 if remember_source_file:
460                                         atoms.append((pkg_orig, source_file))
461                                 else:
462                                         atoms.append(pkg_orig)
463         return atoms
464
465 def grablines(myfilename, recursive=0, remember_source_file=False):
466         mylines=[]
467         if recursive and os.path.isdir(myfilename):
468                 if os.path.basename(myfilename) in _ignorecvs_dirs:
469                         return mylines
470                 dirlist = os.listdir(myfilename)
471                 dirlist.sort()
472                 for f in dirlist:
473                         if not f.startswith(".") and not f.endswith("~"):
474                                 mylines.extend(grablines(
475                                         os.path.join(myfilename, f), recursive, remember_source_file))
476         else:
477                 try:
478                         myfile = io.open(_unicode_encode(myfilename,
479                                 encoding=_encodings['fs'], errors='strict'),
480                                 mode='r', encoding=_encodings['content'], errors='replace')
481                         if remember_source_file:
482                                 mylines = [(line, myfilename) for line in myfile.readlines()]
483                         else:
484                                 mylines = myfile.readlines()
485                         myfile.close()
486                 except IOError as e:
487                         if e.errno == PermissionDenied.errno:
488                                 raise PermissionDenied(myfilename)
489                         pass
490         return mylines
491
492 def writedict(mydict,myfilename,writekey=True):
493         """Writes out a dict to a file; writekey=0 mode doesn't write out
494         the key and assumes all values are strings, not lists."""
495         lines = []
496         if not writekey:
497                 for v in mydict.values():
498                         lines.append(v + "\n")
499         else:
500                 for k, v in mydict.items():
501                         lines.append("%s %s\n" % (k, " ".join(v)))
502         write_atomic(myfilename, "".join(lines))
503
504 def shlex_split(s):
505         """
506         This is equivalent to shlex.split but it temporarily encodes unicode
507         strings to bytes since shlex.split() doesn't handle unicode strings.
508         """
509         is_unicode = sys.hexversion < 0x3000000 and isinstance(s, unicode)
510         if is_unicode:
511                 s = _unicode_encode(s)
512         rval = shlex.split(s)
513         if is_unicode:
514                 rval = [_unicode_decode(x) for x in rval]
515         return rval
516
517 class _tolerant_shlex(shlex.shlex):
518         def sourcehook(self, newfile):
519                 try:
520                         return shlex.shlex.sourcehook(self, newfile)
521                 except EnvironmentError as e:
522                         writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
523                                 (self.infile, str(e)), noiselevel=-1)
524                         return (newfile, io.StringIO())
525
526 _invalid_var_name_re = re.compile(r'^\d|\W')
527
528 def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
529         if isinstance(expand, dict):
530                 # Some existing variable definitions have been
531                 # passed in, for use in substitutions.
532                 expand_map = expand
533                 expand = True
534         else:
535                 expand_map = {}
536         mykeys = {}
537         f = None
538         try:
539                 # NOTE: shlex doesn't support unicode objects with Python 2
540                 # (produces spurious \0 characters).
541                 if sys.hexversion < 0x3000000:
542                         f = open(_unicode_encode(mycfg,
543                                 encoding=_encodings['fs'], errors='strict'), 'rb')
544                 else:
545                         f = open(_unicode_encode(mycfg,
546                                 encoding=_encodings['fs'], errors='strict'), mode='r',
547                                 encoding=_encodings['content'], errors='replace')
548                 content = f.read()
549         except IOError as e:
550                 if e.errno == PermissionDenied.errno:
551                         raise PermissionDenied(mycfg)
552                 if e.errno != errno.ENOENT:
553                         writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
554                         if e.errno not in (errno.EISDIR,):
555                                 raise
556                 return None
557         finally:
558                 if f is not None:
559                         f.close()
560
561         # Workaround for avoiding a silent error in shlex that is
562         # triggered by a source statement at the end of the file
563         # without a trailing newline after the source statement.
564         if content and content[-1] != '\n':
565                 content += '\n'
566
567         # Warn about dos-style line endings since that prevents
568         # people from being able to source them with bash.
569         if '\r' in content:
570                 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
571                         "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
572
573         try:
574                 if tolerant:
575                         shlex_class = _tolerant_shlex
576                 else:
577                         shlex_class = shlex.shlex
578                 # The default shlex.sourcehook() implementation
579                 # only joins relative paths when the infile
580                 # attribute is properly set.
581                 lex = shlex_class(content, infile=mycfg, posix=True)
582                 lex.wordchars = string.digits + string.ascii_letters + \
583                         "~!@#$%*_\:;?,./-+{}"
584                 lex.quotes="\"'"
585                 if allow_sourcing:
586                         lex.source="source"
587                 while 1:
588                         key=lex.get_token()
589                         if key == "export":
590                                 key = lex.get_token()
591                         if key is None:
592                                 #normal end of file
593                                 break;
594                         equ=lex.get_token()
595                         if (equ==''):
596                                 #unexpected end of file
597                                 #lex.error_leader(self.filename,lex.lineno)
598                                 if not tolerant:
599                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
600                                                 noiselevel=-1)
601                                         raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno))
602                                 else:
603                                         return mykeys
604                         elif (equ!='='):
605                                 #invalid token
606                                 #lex.error_leader(self.filename,lex.lineno)
607                                 if not tolerant:
608                                         raise Exception(_("ParseError: Invalid token "
609                                                 "'%s' (not '='): %s: line %s") % \
610                                                 (equ, mycfg, lex.lineno))
611                                 else:
612                                         return mykeys
613                         val=lex.get_token()
614                         if val is None:
615                                 #unexpected end of file
616                                 #lex.error_leader(self.filename,lex.lineno)
617                                 if not tolerant:
618                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
619                                                 noiselevel=-1)
620                                         raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno))
621                                 else:
622                                         return mykeys
623                         key = _unicode_decode(key)
624                         val = _unicode_decode(val)
625
626                         if _invalid_var_name_re.search(key) is not None:
627                                 if not tolerant:
628                                         raise Exception(_(
629                                                 "ParseError: Invalid variable name '%s': line %s") % \
630                                                 (key, lex.lineno - 1))
631                                 writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \
632                                         % (key, lex.lineno - 1, mycfg), noiselevel=-1)
633                                 continue
634
635                         if expand:
636                                 mykeys[key] = varexpand(val, expand_map)
637                                 expand_map[key] = mykeys[key]
638                         else:
639                                 mykeys[key] = val
640         except SystemExit as e:
641                 raise
642         except Exception as e:
643                 raise portage.exception.ParseError(str(e)+" in "+mycfg)
644         return mykeys
645         
646 #cache expansions of constant strings
647 cexpand={}
648 def varexpand(mystring, mydict=None):
649         if mydict is None:
650                 mydict = {}
651         newstring = cexpand.get(" "+mystring, None)
652         if newstring is not None:
653                 return newstring
654
655         """
656         new variable expansion code.  Preserves quotes, handles \n, etc.
657         This code is used by the configfile code, as well as others (parser)
658         This would be a good bunch of code to port to C.
659         """
660         numvars=0
661         mystring=" "+mystring
662         #in single, double quotes
663         insing=0
664         indoub=0
665         pos=1
666         newstring=" "
667         while (pos<len(mystring)):
668                 if (mystring[pos]=="'") and (mystring[pos-1]!="\\"):
669                         if (indoub):
670                                 newstring=newstring+"'"
671                         else:
672                                 newstring += "'" # Quote removal is handled by shlex.
673                                 insing=not insing
674                         pos=pos+1
675                         continue
676                 elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"):
677                         if (insing):
678                                 newstring=newstring+'"'
679                         else:
680                                 newstring += '"' # Quote removal is handled by shlex.
681                                 indoub=not indoub
682                         pos=pos+1
683                         continue
684                 if (not insing): 
685                         #expansion time
686                         if (mystring[pos]=="\n"):
687                                 #convert newlines to spaces
688                                 newstring=newstring+" "
689                                 pos=pos+1
690                         elif (mystring[pos]=="\\"):
691                                 # For backslash expansion, this function used to behave like
692                                 # echo -e, but that's not needed for our purposes. We want to
693                                 # behave like bash does when expanding a variable assignment
694                                 # in a sourced file, in which case it performs backslash
695                                 # removal for \\ and \$ but nothing more. It also removes
696                                 # escaped newline characters. Note that we don't handle
697                                 # escaped quotes here, since getconfig() uses shlex
698                                 # to handle that earlier.
699                                 if (pos+1>=len(mystring)):
700                                         newstring=newstring+mystring[pos]
701                                         break
702                                 else:
703                                         a = mystring[pos + 1]
704                                         pos = pos + 2
705                                         if a in ("\\", "$"):
706                                                 newstring = newstring + a
707                                         elif a == "\n":
708                                                 pass
709                                         else:
710                                                 newstring = newstring + mystring[pos-2:pos]
711                                         continue
712                         elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"):
713                                 pos=pos+1
714                                 if mystring[pos]=="{":
715                                         pos=pos+1
716                                         braced=True
717                                 else:
718                                         braced=False
719                                 myvstart=pos
720                                 validchars=string.ascii_letters+string.digits+"_"
721                                 while mystring[pos] in validchars:
722                                         if (pos+1)>=len(mystring):
723                                                 if braced:
724                                                         cexpand[mystring]=""
725                                                         return ""
726                                                 else:
727                                                         pos=pos+1
728                                                         break
729                                         pos=pos+1
730                                 myvarname=mystring[myvstart:pos]
731                                 if braced:
732                                         if mystring[pos]!="}":
733                                                 cexpand[mystring]=""
734                                                 return ""
735                                         else:
736                                                 pos=pos+1
737                                 if len(myvarname)==0:
738                                         cexpand[mystring]=""
739                                         return ""
740                                 numvars=numvars+1
741                                 if myvarname in mydict:
742                                         newstring=newstring+mydict[myvarname] 
743                         else:
744                                 newstring=newstring+mystring[pos]
745                                 pos=pos+1
746                 else:
747                         newstring=newstring+mystring[pos]
748                         pos=pos+1
749         if numvars==0:
750                 cexpand[mystring]=newstring[1:]
751         return newstring[1:]    
752
753 # broken and removed, but can still be imported
754 pickle_write = None
755
756 def pickle_read(filename,default=None,debug=0):
757         if not os.access(filename, os.R_OK):
758                 writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
759                 return default
760         data = None
761         try:
762                 myf = open(_unicode_encode(filename,
763                         encoding=_encodings['fs'], errors='strict'), 'rb')
764                 mypickle = pickle.Unpickler(myf)
765                 data = mypickle.load()
766                 myf.close()
767                 del mypickle,myf
768                 writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
769         except SystemExit as e:
770                 raise
771         except Exception as e:
772                 writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
773                 data = default
774         return data
775
776 def dump_traceback(msg, noiselevel=1):
777         info = sys.exc_info()
778         if not info[2]:
779                 stack = traceback.extract_stack()[:-1]
780                 error = None
781         else:
782                 stack = traceback.extract_tb(info[2])
783                 error = str(info[1])
784         writemsg("\n====================================\n", noiselevel=noiselevel)
785         writemsg("%s\n\n" % msg, noiselevel=noiselevel)
786         for line in traceback.format_list(stack):
787                 writemsg(line, noiselevel=noiselevel)
788         if error:
789                 writemsg(error+"\n", noiselevel=noiselevel)
790         writemsg("====================================\n\n", noiselevel=noiselevel)
791
792 class cmp_sort_key(object):
793         """
794         In python-3.0 the list.sort() method no longer has a "cmp" keyword
795         argument. This class acts as an adapter which converts a cmp function
796         into one that's suitable for use as the "key" keyword argument to
797         list.sort(), making it easier to port code for python-3.0 compatibility.
798         It works by generating key objects which use the given cmp function to
799         implement their __lt__ method.
800         """
801         __slots__ = ("_cmp_func",)
802
803         def __init__(self, cmp_func):
804                 """
805                 @type cmp_func: callable which takes 2 positional arguments
806                 @param cmp_func: A cmp function.
807                 """
808                 self._cmp_func = cmp_func
809
810         def __call__(self, lhs):
811                 return self._cmp_key(self._cmp_func, lhs)
812
813         class _cmp_key(object):
814                 __slots__ = ("_cmp_func", "_obj")
815
816                 def __init__(self, cmp_func, obj):
817                         self._cmp_func = cmp_func
818                         self._obj = obj
819
820                 def __lt__(self, other):
821                         if other.__class__ is not self.__class__:
822                                 raise TypeError("Expected type %s, got %s" % \
823                                         (self.__class__, other.__class__))
824                         return self._cmp_func(self._obj, other._obj) < 0
825
826 def unique_array(s):
827         """lifted from python cookbook, credit: Tim Peters
828         Return a list of the elements in s in arbitrary order, sans duplicates"""
829         n = len(s)
830         # assume all elements are hashable, if so, it's linear
831         try:
832                 return list(set(s))
833         except TypeError:
834                 pass
835
836         # so much for linear.  abuse sort.
837         try:
838                 t = list(s)
839                 t.sort()
840         except TypeError:
841                 pass
842         else:
843                 assert n > 0
844                 last = t[0]
845                 lasti = i = 1
846                 while i < n:
847                         if t[i] != last:
848                                 t[lasti] = last = t[i]
849                                 lasti += 1
850                         i += 1
851                 return t[:lasti]
852
853         # blah.  back to original portage.unique_array
854         u = []
855         for x in s:
856                 if x not in u:
857                         u.append(x)
858         return u
859
860 def unique_everseen(iterable, key=None):
861     """
862     List unique elements, preserving order. Remember all elements ever seen.
863     Taken from itertools documentation.
864     """
865     # unique_everseen('AAAABBBCCDAABBB') --> A B C D
866     # unique_everseen('ABBCcAD', str.lower) --> A B C D
867     seen = set()
868     seen_add = seen.add
869     if key is None:
870         for element in filterfalse(seen.__contains__, iterable):
871             seen_add(element)
872             yield element
873     else:
874         for element in iterable:
875             k = key(element)
876             if k not in seen:
877                 seen_add(k)
878                 yield element
879
880 def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
881         stat_cached=None, follow_links=True):
882         """Apply user, group, and mode bits to a file if the existing bits do not
883         already match.  The default behavior is to force an exact match of mode
884         bits.  When mask=0 is specified, mode bits on the target file are allowed
885         to be a superset of the mode argument (via logical OR).  When mask>0, the
886         mode bits that the target file is allowed to have are restricted via
887         logical XOR.
888         Returns True if the permissions were modified and False otherwise."""
889
890         modified = False
891
892         if stat_cached is None:
893                 try:
894                         if follow_links:
895                                 stat_cached = os.stat(filename)
896                         else:
897                                 stat_cached = os.lstat(filename)
898                 except OSError as oe:
899                         func_call = "stat('%s')" % filename
900                         if oe.errno == errno.EPERM:
901                                 raise OperationNotPermitted(func_call)
902                         elif oe.errno == errno.EACCES:
903                                 raise PermissionDenied(func_call)
904                         elif oe.errno == errno.ENOENT:
905                                 raise FileNotFound(filename)
906                         else:
907                                 raise
908
909         if      (uid != -1 and uid != stat_cached.st_uid) or \
910                 (gid != -1 and gid != stat_cached.st_gid):
911                 try:
912                         if follow_links:
913                                 os.chown(filename, uid, gid)
914                         else:
915                                 portage.data.lchown(filename, uid, gid)
916                         modified = True
917                 except OSError as oe:
918                         func_call = "chown('%s', %i, %i)" % (filename, uid, gid)
919                         if oe.errno == errno.EPERM:
920                                 raise OperationNotPermitted(func_call)
921                         elif oe.errno == errno.EACCES:
922                                 raise PermissionDenied(func_call)
923                         elif oe.errno == errno.EROFS:
924                                 raise ReadOnlyFileSystem(func_call)
925                         elif oe.errno == errno.ENOENT:
926                                 raise FileNotFound(filename)
927                         else:
928                                 raise
929
930         new_mode = -1
931         st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits
932         if mask >= 0:
933                 if mode == -1:
934                         mode = 0 # Don't add any mode bits when mode is unspecified.
935                 else:
936                         mode = mode & 0o7777
937                 if      (mode & st_mode != mode) or \
938                         ((mask ^ st_mode) & st_mode != st_mode):
939                         new_mode = mode | st_mode
940                         new_mode = (mask ^ new_mode) & new_mode
941         elif mode != -1:
942                 mode = mode & 0o7777 # protect from unwanted bits
943                 if mode != st_mode:
944                         new_mode = mode
945
946         # The chown system call may clear S_ISUID and S_ISGID
947         # bits, so those bits are restored if necessary.
948         if modified and new_mode == -1 and \
949                 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID):
950                 if mode == -1:
951                         new_mode = st_mode
952                 else:
953                         mode = mode & 0o7777
954                         if mask >= 0:
955                                 new_mode = mode | st_mode
956                                 new_mode = (mask ^ new_mode) & new_mode
957                         else:
958                                 new_mode = mode
959                         if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID):
960                                 new_mode = -1
961
962         if not follow_links and stat.S_ISLNK(stat_cached.st_mode):
963                 # Mode doesn't matter for symlinks.
964                 new_mode = -1
965
966         if new_mode != -1:
967                 try:
968                         os.chmod(filename, new_mode)
969                         modified = True
970                 except OSError as oe:
971                         func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
972                         if oe.errno == errno.EPERM:
973                                 raise OperationNotPermitted(func_call)
974                         elif oe.errno == errno.EACCES:
975                                 raise PermissionDenied(func_call)
976                         elif oe.errno == errno.EROFS:
977                                 raise ReadOnlyFileSystem(func_call)
978                         elif oe.errno == errno.ENOENT:
979                                 raise FileNotFound(filename)
980                         raise
981         return modified
982
983 def apply_stat_permissions(filename, newstat, **kwargs):
984         """A wrapper around apply_secpass_permissions that gets
985         uid, gid, and mode from a stat object"""
986         return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid,
987         mode=newstat.st_mode, **kwargs)
988
989 def apply_recursive_permissions(top, uid=-1, gid=-1,
990         dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
991         """A wrapper around apply_secpass_permissions that applies permissions
992         recursively.  If optional argument onerror is specified, it should be a
993         function; it will be called with one argument, a PortageException instance.
994         Returns True if all permissions are applied and False if some are left
995         unapplied."""
996
997         # Avoid issues with circular symbolic links, as in bug #339670.
998         follow_links = False
999
1000         if onerror is None:
1001                 # Default behavior is to dump errors to stderr so they won't
1002                 # go unnoticed.  Callers can pass in a quiet instance.
1003                 def onerror(e):
1004                         if isinstance(e, OperationNotPermitted):
1005                                 writemsg(_("Operation Not Permitted: %s\n") % str(e),
1006                                         noiselevel=-1)
1007                         elif isinstance(e, FileNotFound):
1008                                 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
1009                         else:
1010                                 raise
1011
1012         all_applied = True
1013         for dirpath, dirnames, filenames in os.walk(top):
1014                 try:
1015                         applied = apply_secpass_permissions(dirpath,
1016                                 uid=uid, gid=gid, mode=dirmode, mask=dirmask,
1017                                 follow_links=follow_links)
1018                         if not applied:
1019                                 all_applied = False
1020                 except PortageException as e:
1021                         all_applied = False
1022                         onerror(e)
1023
1024                 for name in filenames:
1025                         try:
1026                                 applied = apply_secpass_permissions(os.path.join(dirpath, name),
1027                                         uid=uid, gid=gid, mode=filemode, mask=filemask,
1028                                         follow_links=follow_links)
1029                                 if not applied:
1030                                         all_applied = False
1031                         except PortageException as e:
1032                                 # Ignore InvalidLocation exceptions such as FileNotFound
1033                                 # and DirectoryNotFound since sometimes things disappear,
1034                                 # like when adjusting permissions on DISTCC_DIR.
1035                                 if not isinstance(e, portage.exception.InvalidLocation):
1036                                         all_applied = False
1037                                         onerror(e)
1038         return all_applied
1039
1040 def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
1041         stat_cached=None, follow_links=True):
1042         """A wrapper around apply_permissions that uses secpass and simple
1043         logic to apply as much of the permissions as possible without
1044         generating an obviously avoidable permission exception. Despite
1045         attempts to avoid an exception, it's possible that one will be raised
1046         anyway, so be prepared.
1047         Returns True if all permissions are applied and False if some are left
1048         unapplied."""
1049
1050         if stat_cached is None:
1051                 try:
1052                         if follow_links:
1053                                 stat_cached = os.stat(filename)
1054                         else:
1055                                 stat_cached = os.lstat(filename)
1056                 except OSError as oe:
1057                         func_call = "stat('%s')" % filename
1058                         if oe.errno == errno.EPERM:
1059                                 raise OperationNotPermitted(func_call)
1060                         elif oe.errno == errno.EACCES:
1061                                 raise PermissionDenied(func_call)
1062                         elif oe.errno == errno.ENOENT:
1063                                 raise FileNotFound(filename)
1064                         else:
1065                                 raise
1066
1067         all_applied = True
1068
1069         if portage.data.secpass < 2:
1070
1071                 if uid != -1 and \
1072                 uid != stat_cached.st_uid:
1073                         all_applied = False
1074                         uid = -1
1075
1076                 if gid != -1 and \
1077                 gid != stat_cached.st_gid and \
1078                 gid not in os.getgroups():
1079                         all_applied = False
1080                         gid = -1
1081
1082         apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
1083                 stat_cached=stat_cached, follow_links=follow_links)
1084         return all_applied
1085
1086 class atomic_ofstream(ObjectProxy):
1087         """Write a file atomically via os.rename().  Atomic replacement prevents
1088         interprocess interference and prevents corruption of the target
1089         file when the write is interrupted (for example, when an 'out of space'
1090         error occurs)."""
1091
1092         def __init__(self, filename, mode='w', follow_links=True, **kargs):
1093                 """Opens a temporary filename.pid in the same directory as filename."""
1094                 ObjectProxy.__init__(self)
1095                 object.__setattr__(self, '_aborted', False)
1096                 if 'b' in mode:
1097                         open_func = open
1098                 else:
1099                         open_func = io.open
1100                         kargs.setdefault('encoding', _encodings['content'])
1101                         kargs.setdefault('errors', 'backslashreplace')
1102
1103                 if follow_links:
1104                         canonical_path = os.path.realpath(filename)
1105                         object.__setattr__(self, '_real_name', canonical_path)
1106                         tmp_name = "%s.%i" % (canonical_path, os.getpid())
1107                         try:
1108                                 object.__setattr__(self, '_file',
1109                                         open_func(_unicode_encode(tmp_name,
1110                                                 encoding=_encodings['fs'], errors='strict'),
1111                                                 mode=mode, **kargs))
1112                                 return
1113                         except IOError as e:
1114                                 if canonical_path == filename:
1115                                         raise
1116                                 # Ignore this error, since it's irrelevant
1117                                 # and the below open call will produce a
1118                                 # new error if necessary.
1119
1120                 object.__setattr__(self, '_real_name', filename)
1121                 tmp_name = "%s.%i" % (filename, os.getpid())
1122                 object.__setattr__(self, '_file',
1123                         open_func(_unicode_encode(tmp_name,
1124                                 encoding=_encodings['fs'], errors='strict'),
1125                                 mode=mode, **kargs))
1126
1127         def _get_target(self):
1128                 return object.__getattribute__(self, '_file')
1129
1130         if sys.hexversion >= 0x3000000:
1131
1132                 def __getattribute__(self, attr):
1133                         if attr in ('close', 'abort', '__del__'):
1134                                 return object.__getattribute__(self, attr)
1135                         return getattr(object.__getattribute__(self, '_file'), attr)
1136
1137         else:
1138
1139                 # For TextIOWrapper, automatically coerce write calls to
1140                 # unicode, in order to avoid TypeError when writing raw
1141                 # bytes with python2.
1142
1143                 def __getattribute__(self, attr):
1144                         if attr in ('close', 'abort', 'write', '__del__'):
1145                                 return object.__getattribute__(self, attr)
1146                         return getattr(object.__getattribute__(self, '_file'), attr)
1147
1148                 def write(self, s):
1149                         f = object.__getattribute__(self, '_file')
1150                         if isinstance(f, io.TextIOWrapper):
1151                                 s = _unicode_decode(s)
1152                         return f.write(s)
1153
1154         def close(self):
1155                 """Closes the temporary file, copies permissions (if possible),
1156                 and performs the atomic replacement via os.rename().  If the abort()
1157                 method has been called, then the temp file is closed and removed."""
1158                 f = object.__getattribute__(self, '_file')
1159                 real_name = object.__getattribute__(self, '_real_name')
1160                 if not f.closed:
1161                         try:
1162                                 f.close()
1163                                 if not object.__getattribute__(self, '_aborted'):
1164                                         try:
1165                                                 apply_stat_permissions(f.name, os.stat(real_name))
1166                                         except OperationNotPermitted:
1167                                                 pass
1168                                         except FileNotFound:
1169                                                 pass
1170                                         except OSError as oe: # from the above os.stat call
1171                                                 if oe.errno in (errno.ENOENT, errno.EPERM):
1172                                                         pass
1173                                                 else:
1174                                                         raise
1175                                         os.rename(f.name, real_name)
1176                         finally:
1177                                 # Make sure we cleanup the temp file
1178                                 # even if an exception is raised.
1179                                 try:
1180                                         os.unlink(f.name)
1181                                 except OSError as oe:
1182                                         pass
1183
1184         def abort(self):
1185                 """If an error occurs while writing the file, the user should
1186                 call this method in order to leave the target file unchanged.
1187                 This will call close() automatically."""
1188                 if not object.__getattribute__(self, '_aborted'):
1189                         object.__setattr__(self, '_aborted', True)
1190                         self.close()
1191
1192         def __del__(self):
1193                 """If the user does not explicitely call close(), it is
1194                 assumed that an error has occurred, so we abort()."""
1195                 try:
1196                         f = object.__getattribute__(self, '_file')
1197                 except AttributeError:
1198                         pass
1199                 else:
1200                         if not f.closed:
1201                                 self.abort()
1202                 # ensure destructor from the base class is called
1203                 base_destructor = getattr(ObjectProxy, '__del__', None)
1204                 if base_destructor is not None:
1205                         base_destructor(self)
1206
1207 def write_atomic(file_path, content, **kwargs):
1208         f = None
1209         try:
1210                 f = atomic_ofstream(file_path, **kwargs)
1211                 f.write(content)
1212                 f.close()
1213         except (IOError, OSError) as e:
1214                 if f:
1215                         f.abort()
1216                 func_call = "write_atomic('%s')" % file_path
1217                 if e.errno == errno.EPERM:
1218                         raise OperationNotPermitted(func_call)
1219                 elif e.errno == errno.EACCES:
1220                         raise PermissionDenied(func_call)
1221                 elif e.errno == errno.EROFS:
1222                         raise ReadOnlyFileSystem(func_call)
1223                 elif e.errno == errno.ENOENT:
1224                         raise FileNotFound(file_path)
1225                 else:
1226                         raise
1227
1228 def ensure_dirs(dir_path, **kwargs):
1229         """Create a directory and call apply_permissions.
1230         Returns True if a directory is created or the permissions needed to be
1231         modified, and False otherwise.
1232
1233         This function's handling of EEXIST errors makes it useful for atomic
1234         directory creation, in which multiple processes may be competing to
1235         create the same directory.
1236         """
1237
1238         created_dir = False
1239
1240         try:
1241                 os.makedirs(dir_path)
1242                 created_dir = True
1243         except OSError as oe:
1244                 func_call = "makedirs('%s')" % dir_path
1245                 if oe.errno in (errno.EEXIST,):
1246                         pass
1247                 else:
1248                         if os.path.isdir(dir_path):
1249                                 # NOTE: DragonFly raises EPERM for makedir('/')
1250                                 # and that is supposed to be ignored here.
1251                                 # Also, sometimes mkdir raises EISDIR on FreeBSD
1252                                 # and we want to ignore that too (bug #187518).
1253                                 pass
1254                         elif oe.errno == errno.EPERM:
1255                                 raise OperationNotPermitted(func_call)
1256                         elif oe.errno == errno.EACCES:
1257                                 raise PermissionDenied(func_call)
1258                         elif oe.errno == errno.EROFS:
1259                                 raise ReadOnlyFileSystem(func_call)
1260                         else:
1261                                 raise
1262         if kwargs:
1263                 perms_modified = apply_permissions(dir_path, **kwargs)
1264         else:
1265                 perms_modified = False
1266         return created_dir or perms_modified
1267
1268 class LazyItemsDict(UserDict):
1269         """A mapping object that behaves like a standard dict except that it allows
1270         for lazy initialization of values via callable objects.  Lazy items can be
1271         overwritten and deleted just as normal items."""
1272
1273         __slots__ = ('lazy_items',)
1274
1275         def __init__(self, *args, **kwargs):
1276
1277                 self.lazy_items = {}
1278                 UserDict.__init__(self, *args, **kwargs)
1279
1280         def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1281                 """Add a lazy item for the given key.  When the item is requested,
1282                 value_callable will be called with *pargs and **kwargs arguments."""
1283                 self.lazy_items[item_key] = \
1284                         self._LazyItem(value_callable, pargs, kwargs, False)
1285                 # make it show up in self.keys(), etc...
1286                 UserDict.__setitem__(self, item_key, None)
1287
1288         def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1289                 """This is like addLazyItem except value_callable will only be called
1290                 a maximum of 1 time and the result will be cached for future requests."""
1291                 self.lazy_items[item_key] = \
1292                         self._LazyItem(value_callable, pargs, kwargs, True)
1293                 # make it show up in self.keys(), etc...
1294                 UserDict.__setitem__(self, item_key, None)
1295
1296         def update(self, *args, **kwargs):
1297                 if len(args) > 1:
1298                         raise TypeError(
1299                                 "expected at most 1 positional argument, got " + \
1300                                 repr(len(args)))
1301                 if args:
1302                         map_obj = args[0]
1303                 else:
1304                         map_obj = None
1305                 if map_obj is None:
1306                         pass
1307                 elif isinstance(map_obj, LazyItemsDict):
1308                         for k in map_obj:
1309                                 if k in map_obj.lazy_items:
1310                                         UserDict.__setitem__(self, k, None)
1311                                 else:
1312                                         UserDict.__setitem__(self, k, map_obj[k])
1313                         self.lazy_items.update(map_obj.lazy_items)
1314                 else:
1315                         UserDict.update(self, map_obj)
1316                 if kwargs:
1317                         UserDict.update(self, kwargs)
1318
1319         def __getitem__(self, item_key):
1320                 if item_key in self.lazy_items:
1321                         lazy_item = self.lazy_items[item_key]
1322                         pargs = lazy_item.pargs
1323                         if pargs is None:
1324                                 pargs = ()
1325                         kwargs = lazy_item.kwargs
1326                         if kwargs is None:
1327                                 kwargs = {}
1328                         result = lazy_item.func(*pargs, **kwargs)
1329                         if lazy_item.singleton:
1330                                 self[item_key] = result
1331                         return result
1332
1333                 else:
1334                         return UserDict.__getitem__(self, item_key)
1335
1336         def __setitem__(self, item_key, value):
1337                 if item_key in self.lazy_items:
1338                         del self.lazy_items[item_key]
1339                 UserDict.__setitem__(self, item_key, value)
1340
1341         def __delitem__(self, item_key):
1342                 if item_key in self.lazy_items:
1343                         del self.lazy_items[item_key]
1344                 UserDict.__delitem__(self, item_key)
1345
1346         def clear(self):
1347                 self.lazy_items.clear()
1348                 UserDict.clear(self)
1349
1350         def copy(self):
1351                 return self.__copy__()
1352
1353         def __copy__(self):
1354                 return self.__class__(self)
1355
1356         def __deepcopy__(self, memo=None):
1357                 """
1358                 This forces evaluation of each contained lazy item, and deepcopy of
1359                 the result. A TypeError is raised if any contained lazy item is not
1360                 a singleton, since it is not necessarily possible for the behavior
1361                 of this type of item to be safely preserved.
1362                 """
1363                 if memo is None:
1364                         memo = {}
1365                 result = self.__class__()
1366                 memo[id(self)] = result
1367                 for k in self:
1368                         k_copy = deepcopy(k, memo)
1369                         lazy_item = self.lazy_items.get(k)
1370                         if lazy_item is not None:
1371                                 if not lazy_item.singleton:
1372                                         raise TypeError(_unicode_decode("LazyItemsDict " + \
1373                                                 "deepcopy is unsafe with lazy items that are " + \
1374                                                 "not singletons: key=%s value=%s") % (k, lazy_item,))
1375                         UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
1376                 return result
1377
1378         class _LazyItem(object):
1379
1380                 __slots__ = ('func', 'pargs', 'kwargs', 'singleton')
1381
1382                 def __init__(self, func, pargs, kwargs, singleton):
1383
1384                         if not pargs:
1385                                 pargs = None
1386                         if not kwargs:
1387                                 kwargs = None
1388
1389                         self.func = func
1390                         self.pargs = pargs
1391                         self.kwargs = kwargs
1392                         self.singleton = singleton
1393
1394                 def __copy__(self):
1395                         return self.__class__(self.func, self.pargs,
1396                                 self.kwargs, self.singleton)
1397
1398                 def __deepcopy__(self, memo=None):
1399                         """
1400                         Override this since the default implementation can fail silently,
1401                         leaving some attributes unset.
1402                         """
1403                         if memo is None:
1404                                 memo = {}
1405                         result = self.__copy__()
1406                         memo[id(self)] = result
1407                         result.func = deepcopy(self.func, memo)
1408                         result.pargs = deepcopy(self.pargs, memo)
1409                         result.kwargs = deepcopy(self.kwargs, memo)
1410                         result.singleton = deepcopy(self.singleton, memo)
1411                         return result
1412
1413 class ConfigProtect(object):
1414         def __init__(self, myroot, protect_list, mask_list):
1415                 self.myroot = myroot
1416                 self.protect_list = protect_list
1417                 self.mask_list = mask_list
1418                 self.updateprotect()
1419
1420         def updateprotect(self):
1421                 """Update internal state for isprotected() calls.  Nonexistent paths
1422                 are ignored."""
1423
1424                 os = _os_merge
1425
1426                 self.protect = []
1427                 self._dirs = set()
1428                 for x in self.protect_list:
1429                         ppath = normalize_path(
1430                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1431                         try:
1432                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1433                                         self._dirs.add(ppath)
1434                                 self.protect.append(ppath)
1435                         except OSError:
1436                                 # If it doesn't exist, there's no need to protect it.
1437                                 pass
1438
1439                 self.protectmask = []
1440                 for x in self.mask_list:
1441                         ppath = normalize_path(
1442                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1443                         try:
1444                                 """Use lstat so that anything, even a broken symlink can be
1445                                 protected."""
1446                                 if stat.S_ISDIR(os.lstat(ppath).st_mode):
1447                                         self._dirs.add(ppath)
1448                                 self.protectmask.append(ppath)
1449                                 """Now use stat in case this is a symlink to a directory."""
1450                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1451                                         self._dirs.add(ppath)
1452                         except OSError:
1453                                 # If it doesn't exist, there's no need to mask it.
1454                                 pass
1455
1456         def isprotected(self, obj):
1457                 """Returns True if obj is protected, False otherwise.  The caller must
1458                 ensure that obj is normalized with a single leading slash.  A trailing
1459                 slash is optional for directories."""
1460                 masked = 0
1461                 protected = 0
1462                 sep = os.path.sep
1463                 for ppath in self.protect:
1464                         if len(ppath) > masked and obj.startswith(ppath):
1465                                 if ppath in self._dirs:
1466                                         if obj != ppath and not obj.startswith(ppath + sep):
1467                                                 # /etc/foo does not match /etc/foobaz
1468                                                 continue
1469                                 elif obj != ppath:
1470                                         # force exact match when CONFIG_PROTECT lists a
1471                                         # non-directory
1472                                         continue
1473                                 protected = len(ppath)
1474                                 #config file management
1475                                 for pmpath in self.protectmask:
1476                                         if len(pmpath) >= protected and obj.startswith(pmpath):
1477                                                 if pmpath in self._dirs:
1478                                                         if obj != pmpath and \
1479                                                                 not obj.startswith(pmpath + sep):
1480                                                                 # /etc/foo does not match /etc/foobaz
1481                                                                 continue
1482                                                 elif obj != pmpath:
1483                                                         # force exact match when CONFIG_PROTECT_MASK lists
1484                                                         # a non-directory
1485                                                         continue
1486                                                 #skip, it's in the mask
1487                                                 masked = len(pmpath)
1488                 return protected > masked
1489
1490 def new_protect_filename(mydest, newmd5=None, force=False):
1491         """Resolves a config-protect filename for merging, optionally
1492         using the last filename if the md5 matches. If force is True,
1493         then a new filename will be generated even if mydest does not
1494         exist yet.
1495         (dest,md5) ==> 'string'            --- path_to_target_filename
1496         (dest)     ==> ('next', 'highest') --- next_target and most-recent_target
1497         """
1498
1499         # config protection filename format:
1500         # ._cfg0000_foo
1501         # 0123456789012
1502
1503         os = _os_merge
1504
1505         prot_num = -1
1506         last_pfile = ""
1507
1508         if not force and \
1509                 not os.path.exists(mydest):
1510                 return mydest
1511
1512         real_filename = os.path.basename(mydest)
1513         real_dirname  = os.path.dirname(mydest)
1514         for pfile in os.listdir(real_dirname):
1515                 if pfile[0:5] != "._cfg":
1516                         continue
1517                 if pfile[10:] != real_filename:
1518                         continue
1519                 try:
1520                         new_prot_num = int(pfile[5:9])
1521                         if new_prot_num > prot_num:
1522                                 prot_num = new_prot_num
1523                                 last_pfile = pfile
1524                 except ValueError:
1525                         continue
1526         prot_num = prot_num + 1
1527
1528         new_pfile = normalize_path(os.path.join(real_dirname,
1529                 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename))
1530         old_pfile = normalize_path(os.path.join(real_dirname, last_pfile))
1531         if last_pfile and newmd5:
1532                 try:
1533                         last_pfile_md5 = portage.checksum._perform_md5_merge(old_pfile)
1534                 except FileNotFound:
1535                         # The file suddenly disappeared or it's a broken symlink.
1536                         pass
1537                 else:
1538                         if last_pfile_md5 == newmd5:
1539                                 return old_pfile
1540         return new_pfile
1541
1542 def find_updated_config_files(target_root, config_protect):
1543         """
1544         Return a tuple of configuration files that needs to be updated.
1545         The tuple contains lists organized like this:
1546         [ protected_dir, file_list ]
1547         If the protected config isn't a protected_dir but a procted_file, list is:
1548         [ protected_file, None ]
1549         If no configuration files needs to be updated, None is returned
1550         """
1551
1552         os = _os_merge
1553
1554         if config_protect:
1555                 # directories with some protect files in them
1556                 for x in config_protect:
1557                         files = []
1558
1559                         x = os.path.join(target_root, x.lstrip(os.path.sep))
1560                         if not os.access(x, os.W_OK):
1561                                 continue
1562                         try:
1563                                 mymode = os.lstat(x).st_mode
1564                         except OSError:
1565                                 continue
1566
1567                         if stat.S_ISLNK(mymode):
1568                                 # We want to treat it like a directory if it
1569                                 # is a symlink to an existing directory.
1570                                 try:
1571                                         real_mode = os.stat(x).st_mode
1572                                         if stat.S_ISDIR(real_mode):
1573                                                 mymode = real_mode
1574                                 except OSError:
1575                                         pass
1576
1577                         if stat.S_ISDIR(mymode):
1578                                 mycommand = \
1579                                         "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
1580                         else:
1581                                 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
1582                                                 os.path.split(x.rstrip(os.path.sep))
1583                         mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
1584                         a = subprocess_getstatusoutput(mycommand)
1585
1586                         if a[0] == 0:
1587                                 files = a[1].split('\0')
1588                                 # split always produces an empty string as the last element
1589                                 if files and not files[-1]:
1590                                         del files[-1]
1591                                 if files:
1592                                         if stat.S_ISDIR(mymode):
1593                                                 yield (x, files)
1594                                         else:
1595                                                 yield (x, None)
1596
1597 def getlibpaths(root, env=None):
1598         """ Return a list of paths that are used for library lookups """
1599         if env is None:
1600                 env = os.environ
1601         # the following is based on the information from ld.so(8)
1602         rval = env.get("LD_LIBRARY_PATH", "").split(":")
1603         rval.extend(grabfile(os.path.join(root, "etc", "ld.so.conf")))
1604         rval.append("/usr/lib")
1605         rval.append("/lib")
1606
1607         return [normalize_path(x) for x in rval if x]