Support include directives in ld.so.conf.
[portage.git] / pym / portage / util / __init__.py
1 # Copyright 2004-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = ['apply_permissions', 'apply_recursive_permissions',
5         'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
6         'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
7         'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict',
8         'grabdict_package', 'grabfile', 'grabfile_package', 'grablines',
9         'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals',
10         'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist',
11         'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand',
12         'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout']
13
14 from copy import deepcopy
15 import errno
16 import io
17 try:
18         from itertools import filterfalse
19 except ImportError:
20         from itertools import ifilterfalse as filterfalse
21 import logging
22 import re
23 import shlex
24 import stat
25 import string
26 import sys
27 import traceback
28 import glob
29
30 import portage
31 portage.proxy.lazyimport.lazyimport(globals(),
32         'pickle',
33         'portage.dep:Atom',
34         'portage.util.listdir:_ignorecvs_dirs'
35 )
36
37 from portage import os
38 from portage import subprocess_getstatusoutput
39 from portage import _encodings
40 from portage import _os_merge
41 from portage import _unicode_encode
42 from portage import _unicode_decode
43 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
44        OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem
45 from portage.localization import _
46 from portage.proxy.objectproxy import ObjectProxy
47 from portage.cache.mappings import UserDict
48
49 noiselimit = 0
50
51 def initialize_logger(level=logging.WARN):
52         """Sets up basic logging of portage activities
53         Args:
54                 level: the level to emit messages at ('info', 'debug', 'warning' ...)
55         Returns:
56                 None
57         """
58         logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
59
60 def writemsg(mystr,noiselevel=0,fd=None):
61         """Prints out warning and debug messages based on the noiselimit setting"""
62         global noiselimit
63         if fd is None:
64                 fd = sys.stderr
65         if noiselevel <= noiselimit:
66                 # avoid potential UnicodeEncodeError
67                 if isinstance(fd, io.StringIO):
68                         mystr = _unicode_decode(mystr,
69                                 encoding=_encodings['content'], errors='replace')
70                 else:
71                         mystr = _unicode_encode(mystr,
72                                 encoding=_encodings['stdio'], errors='backslashreplace')
73                         if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr):
74                                 fd = fd.buffer
75                 fd.write(mystr)
76                 fd.flush()
77
78 def writemsg_stdout(mystr,noiselevel=0):
79         """Prints messages stdout based on the noiselimit setting"""
80         writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
81
82 def writemsg_level(msg, level=0, noiselevel=0):
83         """
84         Show a message for the given level as defined by the logging module
85         (default is 0). When level >= logging.WARNING then the message is
86         sent to stderr, otherwise it is sent to stdout. The noiselevel is
87         passed directly to writemsg().
88
89         @type msg: str
90         @param msg: a message string, including newline if appropriate
91         @type level: int
92         @param level: a numeric logging level (see the logging module)
93         @type noiselevel: int
94         @param noiselevel: passed directly to writemsg
95         """
96         if level >= logging.WARNING:
97                 fd = sys.stderr
98         else:
99                 fd = sys.stdout
100         writemsg(msg, noiselevel=noiselevel, fd=fd)
101
102 def normalize_path(mypath):
103         """ 
104         os.path.normpath("//foo") returns "//foo" instead of "/foo"
105         We dislike this behavior so we create our own normpath func
106         to fix it.
107         """
108         if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes):
109                 path_sep = os.path.sep.encode()
110         else:
111                 path_sep = os.path.sep
112
113         if mypath.startswith(path_sep):
114                 # posixpath.normpath collapses 3 or more leading slashes to just 1.
115                 return os.path.normpath(2*path_sep + mypath)
116         else:
117                 return os.path.normpath(mypath)
118
119 def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
120         """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
121         begins with a #, it is ignored, as are empty lines"""
122
123         mylines=grablines(myfilename, recursive, remember_source_file=True)
124         newlines=[]
125
126         for x, source_file in mylines:
127                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
128                 #into single spaces.
129                 myline = x.split()
130                 if x and x[0] != "#":
131                         mylinetemp = []
132                         for item in myline:
133                                 if item[:1] != "#":
134                                         mylinetemp.append(item)
135                                 else:
136                                         break
137                         myline = mylinetemp
138
139                 myline = " ".join(myline)
140                 if not myline:
141                         continue
142                 if myline[0]=="#":
143                         # Check if we have a compat-level string. BC-integration data.
144                         # '##COMPAT==>N<==' 'some string attached to it'
145                         mylinetest = myline.split("<==",1)
146                         if len(mylinetest) == 2:
147                                 myline_potential = mylinetest[1]
148                                 mylinetest = mylinetest[0].split("##COMPAT==>")
149                                 if len(mylinetest) == 2:
150                                         if compat_level >= int(mylinetest[1]):
151                                                 # It's a compat line, and the key matches.
152                                                 newlines.append(myline_potential)
153                                 continue
154                         else:
155                                 continue
156                 if remember_source_file:
157                         newlines.append((myline, source_file))
158                 else:
159                         newlines.append(myline)
160         return newlines
161
162 def map_dictlist_vals(func,myDict):
163         """Performs a function on each value of each key in a dictlist.
164         Returns a new dictlist."""
165         new_dl = {}
166         for key in myDict:
167                 new_dl[key] = []
168                 new_dl[key] = [func(x) for x in myDict[key]]
169         return new_dl
170
171 def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
172         """
173         Stacks an array of dict-types into one array. Optionally merging or
174         overwriting matching key/value pairs for the dict[key]->list.
175         Returns a single dict. Higher index in lists is preferenced.
176         
177         Example usage:
178            >>> from portage.util import stack_dictlist
179                 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
180                 >>> {'a':'b','x':'y'}
181                 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True )
182                 >>> {'a':['b','c'] }
183                 >>> a = {'KEYWORDS':['x86','alpha']}
184                 >>> b = {'KEYWORDS':['-x86']}
185                 >>> print stack_dictlist( [a,b] )
186                 >>> { 'KEYWORDS':['x86','alpha','-x86']}
187                 >>> print stack_dictlist( [a,b], incremental=True)
188                 >>> { 'KEYWORDS':['alpha'] }
189                 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
190                 >>> { 'KEYWORDS':['alpha'] }
191         
192         @param original_dicts a list of (dictionary objects or None)
193         @type list
194         @param incremental True or false depending on whether new keys should overwrite
195            keys which already exist.
196         @type boolean
197         @param incrementals A list of items that should be incremental (-foo removes foo from
198            the returned dict).
199         @type list
200         @param ignore_none Appears to be ignored, but probably was used long long ago.
201         @type boolean
202         
203         """
204         final_dict = {}
205         for mydict in original_dicts:
206                 if mydict is None:
207                         continue
208                 for y in mydict:
209                         if not y in final_dict:
210                                 final_dict[y] = []
211                         
212                         for thing in mydict[y]:
213                                 if thing:
214                                         if incremental or y in incrementals:
215                                                 if thing == "-*":
216                                                         final_dict[y] = []
217                                                         continue
218                                                 elif thing[:1] == '-':
219                                                         try:
220                                                                 final_dict[y].remove(thing[1:])
221                                                         except ValueError:
222                                                                 pass
223                                                         continue
224                                         if thing not in final_dict[y]:
225                                                 final_dict[y].append(thing)
226                         if y in final_dict and not final_dict[y]:
227                                 del final_dict[y]
228         return final_dict
229
230 def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
231         """Stacks an array of dict-types into one array. Optionally merging or
232         overwriting matching key/value pairs for the dict[key]->string.
233         Returns a single dict."""
234         final_dict = {}
235         for mydict in dicts:
236                 if not mydict:
237                         continue
238                 for k, v in mydict.items():
239                         if k in final_dict and (incremental or (k in incrementals)):
240                                 final_dict[k] += " " + v
241                         else:
242                                 final_dict[k]  = v
243         return final_dict
244
245 def append_repo(atom_list, repo_name, remember_source_file=False):
246         """
247         Takes a list of valid atoms without repo spec and appends ::repo_name.
248         """
249         if remember_source_file:
250                 return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
251                         for atom, source in atom_list]
252         else:
253                 return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
254                         for atom in atom_list]
255
256 def stack_lists(lists, incremental=1, remember_source_file=False,
257         warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
258         """Stacks an array of list-types into one array. Optionally removing
259         distinct values using '-value' notation. Higher index is preferenced.
260
261         all elements must be hashable."""
262         matched_removals = set()
263         unmatched_removals = {}
264         new_list = {}
265         for sub_list in lists:
266                 for token in sub_list:
267                         token_key = token
268                         if remember_source_file:
269                                 token, source_file = token
270                         else:
271                                 source_file = False
272
273                         if token is None:
274                                 continue
275
276                         if incremental:
277                                 if token == "-*":
278                                         new_list.clear()
279                                 elif token[:1] == '-':
280                                         matched = False
281                                         if ignore_repo and not "::" in token:
282                                                 #Let -cat/pkg remove cat/pkg::repo.
283                                                 to_be_removed = []
284                                                 token_slice = token[1:]
285                                                 for atom in new_list:
286                                                         atom_without_repo = atom
287                                                         if atom.repo is not None:
288                                                                 # Atom.without_repo instantiates a new Atom,
289                                                                 # which is unnecessary here, so use string
290                                                                 # replacement instead.
291                                                                 atom_without_repo = \
292                                                                         atom.replace("::" + atom.repo, "", 1)
293                                                         if atom_without_repo == token_slice:
294                                                                 to_be_removed.append(atom)
295                                                 if to_be_removed:
296                                                         matched = True
297                                                         for atom in to_be_removed:
298                                                                 new_list.pop(atom)
299                                         else:
300                                                 try:
301                                                         new_list.pop(token[1:])
302                                                         matched = True
303                                                 except KeyError:
304                                                         pass
305
306                                         if not matched:
307                                                 if source_file and \
308                                                         (strict_warn_for_unmatched_removal or \
309                                                         token_key not in matched_removals):
310                                                         unmatched_removals.setdefault(source_file, set()).add(token)
311                                         else:
312                                                 matched_removals.add(token_key)
313                                 else:
314                                         new_list[token] = source_file
315                         else:
316                                 new_list[token] = source_file
317
318         if warn_for_unmatched_removal:
319                 for source_file, tokens in unmatched_removals.items():
320                         if len(tokens) > 3:
321                                 selected = [tokens.pop(), tokens.pop(), tokens.pop()]
322                                 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \
323                                         (source_file, ", ".join(selected), len(tokens)),
324                                         noiselevel=-1)
325                         else:
326                                 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)),
327                                         noiselevel=-1)
328
329         if remember_source_file:
330                 return list(new_list.items())
331         else:
332                 return list(new_list)
333
334 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
335         """
336         This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
337         
338         @param myfilename: file to process
339         @type myfilename: string (path)
340         @param juststrings: only return strings
341         @type juststrings: Boolean (integer)
342         @param empty: Ignore certain lines
343         @type empty: Boolean (integer)
344         @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends )
345         @type recursive: Boolean (integer)
346         @param incremental: Append to the return list, don't overwrite
347         @type incremental: Boolean (integer)
348         @rtype: Dictionary
349         @returns:
350         1.  Returns the lines in a file in a dictionary, for example:
351                 'sys-apps/portage x86 amd64 ppc'
352                 would return
353                 { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
354                 the line syntax is key : [list of values]
355         """
356         newdict={}
357         for x in grablines(myfilename, recursive):
358                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
359                 #into single spaces.
360                 if x[0] == "#":
361                         continue
362                 myline=x.split()
363                 mylinetemp = []
364                 for item in myline:
365                         if item[:1] != "#":
366                                 mylinetemp.append(item)
367                         else:
368                                 break
369                 myline = mylinetemp
370                 if len(myline) < 2 and empty == 0:
371                         continue
372                 if len(myline) < 1 and empty == 1:
373                         continue
374                 if incremental:
375                         newdict.setdefault(myline[0], []).extend(myline[1:])
376                 else:
377                         newdict[myline[0]] = myline[1:]
378         if juststrings:
379                 for k, v in newdict.items():
380                         newdict[k] = " ".join(v)
381         return newdict
382
383 def read_corresponding_eapi_file(filename):
384         """
385         Read the 'eapi' file from the directory 'filename' is in.
386         Returns "0" if the file is not present or invalid.
387         """
388         default = "0"
389         eapi_file = os.path.join(os.path.dirname(filename), "eapi")
390         try:
391                 f = open(eapi_file, "r")
392                 lines = f.readlines()
393                 if len(lines) == 1:
394                         eapi = lines[0].rstrip("\n")
395                 else:
396                         writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
397                                 noiselevel=-1)
398                         eapi = default
399                 f.close()
400         except IOError:
401                 eapi = default
402
403         return eapi
404
405 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
406         verify_eapi=False, eapi=None):
407         """ Does the same thing as grabdict except it validates keys
408             with isvalidatom()"""
409         pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
410         if not pkgs:
411                 return pkgs
412         if verify_eapi and eapi is None:
413                 eapi = read_corresponding_eapi_file(myfilename)
414
415         # We need to call keys() here in order to avoid the possibility of
416         # "RuntimeError: dictionary changed size during iteration"
417         # when an invalid atom is deleted.
418         atoms = {}
419         for k, v in pkgs.items():
420                 try:
421                         k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
422                 except InvalidAtom as e:
423                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
424                                 noiselevel=-1)
425                 else:
426                         atoms[k] = v
427         return atoms
428
429 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
430         remember_source_file=False, verify_eapi=False, eapi=None):
431
432         pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True)
433         if not pkgs:
434                 return pkgs
435         if verify_eapi and eapi is None:
436                 eapi = read_corresponding_eapi_file(myfilename)
437         mybasename = os.path.basename(myfilename)
438         atoms = []
439         for pkg, source_file in pkgs:
440                 pkg_orig = pkg
441                 # for packages and package.mask files
442                 if pkg[:1] == "-":
443                         pkg = pkg[1:]
444                 if pkg[:1] == '*' and mybasename == 'packages':
445                         pkg = pkg[1:]
446                 try:
447                         pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
448                 except InvalidAtom as e:
449                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
450                                 noiselevel=-1)
451                 else:
452                         if pkg_orig == str(pkg):
453                                 # normal atom, so return as Atom instance
454                                 if remember_source_file:
455                                         atoms.append((pkg, source_file))
456                                 else:
457                                         atoms.append(pkg)
458                         else:
459                                 # atom has special prefix, so return as string
460                                 if remember_source_file:
461                                         atoms.append((pkg_orig, source_file))
462                                 else:
463                                         atoms.append(pkg_orig)
464         return atoms
465
466 def grablines(myfilename, recursive=0, remember_source_file=False):
467         mylines=[]
468         if recursive and os.path.isdir(myfilename):
469                 if os.path.basename(myfilename) in _ignorecvs_dirs:
470                         return mylines
471                 dirlist = os.listdir(myfilename)
472                 dirlist.sort()
473                 for f in dirlist:
474                         if not f.startswith(".") and not f.endswith("~"):
475                                 mylines.extend(grablines(
476                                         os.path.join(myfilename, f), recursive, remember_source_file))
477         else:
478                 try:
479                         myfile = io.open(_unicode_encode(myfilename,
480                                 encoding=_encodings['fs'], errors='strict'),
481                                 mode='r', encoding=_encodings['content'], errors='replace')
482                         if remember_source_file:
483                                 mylines = [(line, myfilename) for line in myfile.readlines()]
484                         else:
485                                 mylines = myfile.readlines()
486                         myfile.close()
487                 except IOError as e:
488                         if e.errno == PermissionDenied.errno:
489                                 raise PermissionDenied(myfilename)
490                         pass
491         return mylines
492
493 def writedict(mydict,myfilename,writekey=True):
494         """Writes out a dict to a file; writekey=0 mode doesn't write out
495         the key and assumes all values are strings, not lists."""
496         lines = []
497         if not writekey:
498                 for v in mydict.values():
499                         lines.append(v + "\n")
500         else:
501                 for k, v in mydict.items():
502                         lines.append("%s %s\n" % (k, " ".join(v)))
503         write_atomic(myfilename, "".join(lines))
504
505 def shlex_split(s):
506         """
507         This is equivalent to shlex.split, but if the current interpreter is
508         python2, it temporarily encodes unicode strings to bytes since python2's
509         shlex.split() doesn't handle unicode strings.
510         """
511         convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes)
512         if convert_to_bytes:
513                 s = _unicode_encode(s)
514         rval = shlex.split(s)
515         if convert_to_bytes:
516                 rval = [_unicode_decode(x) for x in rval]
517         return rval
518
519 class _tolerant_shlex(shlex.shlex):
520         def sourcehook(self, newfile):
521                 try:
522                         return shlex.shlex.sourcehook(self, newfile)
523                 except EnvironmentError as e:
524                         writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
525                                 (self.infile, str(e)), noiselevel=-1)
526                         return (newfile, io.StringIO())
527
528 _invalid_var_name_re = re.compile(r'^\d|\W')
529
530 def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
531         if isinstance(expand, dict):
532                 # Some existing variable definitions have been
533                 # passed in, for use in substitutions.
534                 expand_map = expand
535                 expand = True
536         else:
537                 expand_map = {}
538         mykeys = {}
539         f = None
540         try:
541                 # NOTE: shlex doesn't support unicode objects with Python 2
542                 # (produces spurious \0 characters).
543                 if sys.hexversion < 0x3000000:
544                         f = open(_unicode_encode(mycfg,
545                                 encoding=_encodings['fs'], errors='strict'), 'rb')
546                 else:
547                         f = open(_unicode_encode(mycfg,
548                                 encoding=_encodings['fs'], errors='strict'), mode='r',
549                                 encoding=_encodings['content'], errors='replace')
550                 content = f.read()
551         except IOError as e:
552                 if e.errno == PermissionDenied.errno:
553                         raise PermissionDenied(mycfg)
554                 if e.errno != errno.ENOENT:
555                         writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
556                         if e.errno not in (errno.EISDIR,):
557                                 raise
558                 return None
559         finally:
560                 if f is not None:
561                         f.close()
562
563         # Workaround for avoiding a silent error in shlex that is
564         # triggered by a source statement at the end of the file
565         # without a trailing newline after the source statement.
566         if content and content[-1] != '\n':
567                 content += '\n'
568
569         # Warn about dos-style line endings since that prevents
570         # people from being able to source them with bash.
571         if '\r' in content:
572                 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
573                         "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
574
575         try:
576                 if tolerant:
577                         shlex_class = _tolerant_shlex
578                 else:
579                         shlex_class = shlex.shlex
580                 # The default shlex.sourcehook() implementation
581                 # only joins relative paths when the infile
582                 # attribute is properly set.
583                 lex = shlex_class(content, infile=mycfg, posix=True)
584                 lex.wordchars = string.digits + string.ascii_letters + \
585                         "~!@#$%*_\:;?,./-+{}"
586                 lex.quotes="\"'"
587                 if allow_sourcing:
588                         lex.source="source"
589                 while 1:
590                         key=lex.get_token()
591                         if key == "export":
592                                 key = lex.get_token()
593                         if key is None:
594                                 #normal end of file
595                                 break;
596                         equ=lex.get_token()
597                         if (equ==''):
598                                 #unexpected end of file
599                                 #lex.error_leader(self.filename,lex.lineno)
600                                 if not tolerant:
601                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
602                                                 noiselevel=-1)
603                                         raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno))
604                                 else:
605                                         return mykeys
606                         elif (equ!='='):
607                                 #invalid token
608                                 #lex.error_leader(self.filename,lex.lineno)
609                                 if not tolerant:
610                                         raise Exception(_("ParseError: Invalid token "
611                                                 "'%s' (not '='): %s: line %s") % \
612                                                 (equ, mycfg, lex.lineno))
613                                 else:
614                                         return mykeys
615                         val=lex.get_token()
616                         if val is None:
617                                 #unexpected end of file
618                                 #lex.error_leader(self.filename,lex.lineno)
619                                 if not tolerant:
620                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
621                                                 noiselevel=-1)
622                                         raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno))
623                                 else:
624                                         return mykeys
625                         key = _unicode_decode(key)
626                         val = _unicode_decode(val)
627
628                         if _invalid_var_name_re.search(key) is not None:
629                                 if not tolerant:
630                                         raise Exception(_(
631                                                 "ParseError: Invalid variable name '%s': line %s") % \
632                                                 (key, lex.lineno - 1))
633                                 writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \
634                                         % (key, lex.lineno - 1, mycfg), noiselevel=-1)
635                                 continue
636
637                         if expand:
638                                 mykeys[key] = varexpand(val, expand_map)
639                                 expand_map[key] = mykeys[key]
640                         else:
641                                 mykeys[key] = val
642         except SystemExit as e:
643                 raise
644         except Exception as e:
645                 raise portage.exception.ParseError(str(e)+" in "+mycfg)
646         return mykeys
647         
648 #cache expansions of constant strings
649 cexpand={}
650 def varexpand(mystring, mydict=None):
651         if mydict is None:
652                 mydict = {}
653         newstring = cexpand.get(" "+mystring, None)
654         if newstring is not None:
655                 return newstring
656
657         """
658         new variable expansion code.  Preserves quotes, handles \n, etc.
659         This code is used by the configfile code, as well as others (parser)
660         This would be a good bunch of code to port to C.
661         """
662         numvars=0
663         mystring=" "+mystring
664         #in single, double quotes
665         insing=0
666         indoub=0
667         pos=1
668         newstring=" "
669         while (pos<len(mystring)):
670                 if (mystring[pos]=="'") and (mystring[pos-1]!="\\"):
671                         if (indoub):
672                                 newstring=newstring+"'"
673                         else:
674                                 newstring += "'" # Quote removal is handled by shlex.
675                                 insing=not insing
676                         pos=pos+1
677                         continue
678                 elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"):
679                         if (insing):
680                                 newstring=newstring+'"'
681                         else:
682                                 newstring += '"' # Quote removal is handled by shlex.
683                                 indoub=not indoub
684                         pos=pos+1
685                         continue
686                 if (not insing): 
687                         #expansion time
688                         if (mystring[pos]=="\n"):
689                                 #convert newlines to spaces
690                                 newstring=newstring+" "
691                                 pos=pos+1
692                         elif (mystring[pos]=="\\"):
693                                 # For backslash expansion, this function used to behave like
694                                 # echo -e, but that's not needed for our purposes. We want to
695                                 # behave like bash does when expanding a variable assignment
696                                 # in a sourced file, in which case it performs backslash
697                                 # removal for \\ and \$ but nothing more. It also removes
698                                 # escaped newline characters. Note that we don't handle
699                                 # escaped quotes here, since getconfig() uses shlex
700                                 # to handle that earlier.
701                                 if (pos+1>=len(mystring)):
702                                         newstring=newstring+mystring[pos]
703                                         break
704                                 else:
705                                         a = mystring[pos + 1]
706                                         pos = pos + 2
707                                         if a in ("\\", "$"):
708                                                 newstring = newstring + a
709                                         elif a == "\n":
710                                                 pass
711                                         else:
712                                                 newstring = newstring + mystring[pos-2:pos]
713                                         continue
714                         elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"):
715                                 pos=pos+1
716                                 if mystring[pos]=="{":
717                                         pos=pos+1
718                                         braced=True
719                                 else:
720                                         braced=False
721                                 myvstart=pos
722                                 validchars=string.ascii_letters+string.digits+"_"
723                                 while mystring[pos] in validchars:
724                                         if (pos+1)>=len(mystring):
725                                                 if braced:
726                                                         cexpand[mystring]=""
727                                                         return ""
728                                                 else:
729                                                         pos=pos+1
730                                                         break
731                                         pos=pos+1
732                                 myvarname=mystring[myvstart:pos]
733                                 if braced:
734                                         if mystring[pos]!="}":
735                                                 cexpand[mystring]=""
736                                                 return ""
737                                         else:
738                                                 pos=pos+1
739                                 if len(myvarname)==0:
740                                         cexpand[mystring]=""
741                                         return ""
742                                 numvars=numvars+1
743                                 if myvarname in mydict:
744                                         newstring=newstring+mydict[myvarname] 
745                         else:
746                                 newstring=newstring+mystring[pos]
747                                 pos=pos+1
748                 else:
749                         newstring=newstring+mystring[pos]
750                         pos=pos+1
751         if numvars==0:
752                 cexpand[mystring]=newstring[1:]
753         return newstring[1:]    
754
755 # broken and removed, but can still be imported
756 pickle_write = None
757
758 def pickle_read(filename,default=None,debug=0):
759         if not os.access(filename, os.R_OK):
760                 writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
761                 return default
762         data = None
763         try:
764                 myf = open(_unicode_encode(filename,
765                         encoding=_encodings['fs'], errors='strict'), 'rb')
766                 mypickle = pickle.Unpickler(myf)
767                 data = mypickle.load()
768                 myf.close()
769                 del mypickle,myf
770                 writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
771         except SystemExit as e:
772                 raise
773         except Exception as e:
774                 writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
775                 data = default
776         return data
777
778 def dump_traceback(msg, noiselevel=1):
779         info = sys.exc_info()
780         if not info[2]:
781                 stack = traceback.extract_stack()[:-1]
782                 error = None
783         else:
784                 stack = traceback.extract_tb(info[2])
785                 error = str(info[1])
786         writemsg("\n====================================\n", noiselevel=noiselevel)
787         writemsg("%s\n\n" % msg, noiselevel=noiselevel)
788         for line in traceback.format_list(stack):
789                 writemsg(line, noiselevel=noiselevel)
790         if error:
791                 writemsg(error+"\n", noiselevel=noiselevel)
792         writemsg("====================================\n\n", noiselevel=noiselevel)
793
794 class cmp_sort_key(object):
795         """
796         In python-3.0 the list.sort() method no longer has a "cmp" keyword
797         argument. This class acts as an adapter which converts a cmp function
798         into one that's suitable for use as the "key" keyword argument to
799         list.sort(), making it easier to port code for python-3.0 compatibility.
800         It works by generating key objects which use the given cmp function to
801         implement their __lt__ method.
802         """
803         __slots__ = ("_cmp_func",)
804
805         def __init__(self, cmp_func):
806                 """
807                 @type cmp_func: callable which takes 2 positional arguments
808                 @param cmp_func: A cmp function.
809                 """
810                 self._cmp_func = cmp_func
811
812         def __call__(self, lhs):
813                 return self._cmp_key(self._cmp_func, lhs)
814
815         class _cmp_key(object):
816                 __slots__ = ("_cmp_func", "_obj")
817
818                 def __init__(self, cmp_func, obj):
819                         self._cmp_func = cmp_func
820                         self._obj = obj
821
822                 def __lt__(self, other):
823                         if other.__class__ is not self.__class__:
824                                 raise TypeError("Expected type %s, got %s" % \
825                                         (self.__class__, other.__class__))
826                         return self._cmp_func(self._obj, other._obj) < 0
827
828 def unique_array(s):
829         """lifted from python cookbook, credit: Tim Peters
830         Return a list of the elements in s in arbitrary order, sans duplicates"""
831         n = len(s)
832         # assume all elements are hashable, if so, it's linear
833         try:
834                 return list(set(s))
835         except TypeError:
836                 pass
837
838         # so much for linear.  abuse sort.
839         try:
840                 t = list(s)
841                 t.sort()
842         except TypeError:
843                 pass
844         else:
845                 assert n > 0
846                 last = t[0]
847                 lasti = i = 1
848                 while i < n:
849                         if t[i] != last:
850                                 t[lasti] = last = t[i]
851                                 lasti += 1
852                         i += 1
853                 return t[:lasti]
854
855         # blah.  back to original portage.unique_array
856         u = []
857         for x in s:
858                 if x not in u:
859                         u.append(x)
860         return u
861
862 def unique_everseen(iterable, key=None):
863     """
864     List unique elements, preserving order. Remember all elements ever seen.
865     Taken from itertools documentation.
866     """
867     # unique_everseen('AAAABBBCCDAABBB') --> A B C D
868     # unique_everseen('ABBCcAD', str.lower) --> A B C D
869     seen = set()
870     seen_add = seen.add
871     if key is None:
872         for element in filterfalse(seen.__contains__, iterable):
873             seen_add(element)
874             yield element
875     else:
876         for element in iterable:
877             k = key(element)
878             if k not in seen:
879                 seen_add(k)
880                 yield element
881
882 def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
883         stat_cached=None, follow_links=True):
884         """Apply user, group, and mode bits to a file if the existing bits do not
885         already match.  The default behavior is to force an exact match of mode
886         bits.  When mask=0 is specified, mode bits on the target file are allowed
887         to be a superset of the mode argument (via logical OR).  When mask>0, the
888         mode bits that the target file is allowed to have are restricted via
889         logical XOR.
890         Returns True if the permissions were modified and False otherwise."""
891
892         modified = False
893
894         if stat_cached is None:
895                 try:
896                         if follow_links:
897                                 stat_cached = os.stat(filename)
898                         else:
899                                 stat_cached = os.lstat(filename)
900                 except OSError as oe:
901                         func_call = "stat('%s')" % filename
902                         if oe.errno == errno.EPERM:
903                                 raise OperationNotPermitted(func_call)
904                         elif oe.errno == errno.EACCES:
905                                 raise PermissionDenied(func_call)
906                         elif oe.errno == errno.ENOENT:
907                                 raise FileNotFound(filename)
908                         else:
909                                 raise
910
911         if      (uid != -1 and uid != stat_cached.st_uid) or \
912                 (gid != -1 and gid != stat_cached.st_gid):
913                 try:
914                         if follow_links:
915                                 os.chown(filename, uid, gid)
916                         else:
917                                 portage.data.lchown(filename, uid, gid)
918                         modified = True
919                 except OSError as oe:
920                         func_call = "chown('%s', %i, %i)" % (filename, uid, gid)
921                         if oe.errno == errno.EPERM:
922                                 raise OperationNotPermitted(func_call)
923                         elif oe.errno == errno.EACCES:
924                                 raise PermissionDenied(func_call)
925                         elif oe.errno == errno.EROFS:
926                                 raise ReadOnlyFileSystem(func_call)
927                         elif oe.errno == errno.ENOENT:
928                                 raise FileNotFound(filename)
929                         else:
930                                 raise
931
932         new_mode = -1
933         st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits
934         if mask >= 0:
935                 if mode == -1:
936                         mode = 0 # Don't add any mode bits when mode is unspecified.
937                 else:
938                         mode = mode & 0o7777
939                 if      (mode & st_mode != mode) or \
940                         ((mask ^ st_mode) & st_mode != st_mode):
941                         new_mode = mode | st_mode
942                         new_mode = (mask ^ new_mode) & new_mode
943         elif mode != -1:
944                 mode = mode & 0o7777 # protect from unwanted bits
945                 if mode != st_mode:
946                         new_mode = mode
947
948         # The chown system call may clear S_ISUID and S_ISGID
949         # bits, so those bits are restored if necessary.
950         if modified and new_mode == -1 and \
951                 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID):
952                 if mode == -1:
953                         new_mode = st_mode
954                 else:
955                         mode = mode & 0o7777
956                         if mask >= 0:
957                                 new_mode = mode | st_mode
958                                 new_mode = (mask ^ new_mode) & new_mode
959                         else:
960                                 new_mode = mode
961                         if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID):
962                                 new_mode = -1
963
964         if not follow_links and stat.S_ISLNK(stat_cached.st_mode):
965                 # Mode doesn't matter for symlinks.
966                 new_mode = -1
967
968         if new_mode != -1:
969                 try:
970                         os.chmod(filename, new_mode)
971                         modified = True
972                 except OSError as oe:
973                         func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
974                         if oe.errno == errno.EPERM:
975                                 raise OperationNotPermitted(func_call)
976                         elif oe.errno == errno.EACCES:
977                                 raise PermissionDenied(func_call)
978                         elif oe.errno == errno.EROFS:
979                                 raise ReadOnlyFileSystem(func_call)
980                         elif oe.errno == errno.ENOENT:
981                                 raise FileNotFound(filename)
982                         raise
983         return modified
984
985 def apply_stat_permissions(filename, newstat, **kwargs):
986         """A wrapper around apply_secpass_permissions that gets
987         uid, gid, and mode from a stat object"""
988         return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid,
989         mode=newstat.st_mode, **kwargs)
990
991 def apply_recursive_permissions(top, uid=-1, gid=-1,
992         dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
993         """A wrapper around apply_secpass_permissions that applies permissions
994         recursively.  If optional argument onerror is specified, it should be a
995         function; it will be called with one argument, a PortageException instance.
996         Returns True if all permissions are applied and False if some are left
997         unapplied."""
998
999         # Avoid issues with circular symbolic links, as in bug #339670.
1000         follow_links = False
1001
1002         if onerror is None:
1003                 # Default behavior is to dump errors to stderr so they won't
1004                 # go unnoticed.  Callers can pass in a quiet instance.
1005                 def onerror(e):
1006                         if isinstance(e, OperationNotPermitted):
1007                                 writemsg(_("Operation Not Permitted: %s\n") % str(e),
1008                                         noiselevel=-1)
1009                         elif isinstance(e, FileNotFound):
1010                                 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
1011                         else:
1012                                 raise
1013
1014         all_applied = True
1015         for dirpath, dirnames, filenames in os.walk(top):
1016                 try:
1017                         applied = apply_secpass_permissions(dirpath,
1018                                 uid=uid, gid=gid, mode=dirmode, mask=dirmask,
1019                                 follow_links=follow_links)
1020                         if not applied:
1021                                 all_applied = False
1022                 except PortageException as e:
1023                         all_applied = False
1024                         onerror(e)
1025
1026                 for name in filenames:
1027                         try:
1028                                 applied = apply_secpass_permissions(os.path.join(dirpath, name),
1029                                         uid=uid, gid=gid, mode=filemode, mask=filemask,
1030                                         follow_links=follow_links)
1031                                 if not applied:
1032                                         all_applied = False
1033                         except PortageException as e:
1034                                 # Ignore InvalidLocation exceptions such as FileNotFound
1035                                 # and DirectoryNotFound since sometimes things disappear,
1036                                 # like when adjusting permissions on DISTCC_DIR.
1037                                 if not isinstance(e, portage.exception.InvalidLocation):
1038                                         all_applied = False
1039                                         onerror(e)
1040         return all_applied
1041
1042 def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
1043         stat_cached=None, follow_links=True):
1044         """A wrapper around apply_permissions that uses secpass and simple
1045         logic to apply as much of the permissions as possible without
1046         generating an obviously avoidable permission exception. Despite
1047         attempts to avoid an exception, it's possible that one will be raised
1048         anyway, so be prepared.
1049         Returns True if all permissions are applied and False if some are left
1050         unapplied."""
1051
1052         if stat_cached is None:
1053                 try:
1054                         if follow_links:
1055                                 stat_cached = os.stat(filename)
1056                         else:
1057                                 stat_cached = os.lstat(filename)
1058                 except OSError as oe:
1059                         func_call = "stat('%s')" % filename
1060                         if oe.errno == errno.EPERM:
1061                                 raise OperationNotPermitted(func_call)
1062                         elif oe.errno == errno.EACCES:
1063                                 raise PermissionDenied(func_call)
1064                         elif oe.errno == errno.ENOENT:
1065                                 raise FileNotFound(filename)
1066                         else:
1067                                 raise
1068
1069         all_applied = True
1070
1071         if portage.data.secpass < 2:
1072
1073                 if uid != -1 and \
1074                 uid != stat_cached.st_uid:
1075                         all_applied = False
1076                         uid = -1
1077
1078                 if gid != -1 and \
1079                 gid != stat_cached.st_gid and \
1080                 gid not in os.getgroups():
1081                         all_applied = False
1082                         gid = -1
1083
1084         apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
1085                 stat_cached=stat_cached, follow_links=follow_links)
1086         return all_applied
1087
1088 class atomic_ofstream(ObjectProxy):
1089         """Write a file atomically via os.rename().  Atomic replacement prevents
1090         interprocess interference and prevents corruption of the target
1091         file when the write is interrupted (for example, when an 'out of space'
1092         error occurs)."""
1093
1094         def __init__(self, filename, mode='w', follow_links=True, **kargs):
1095                 """Opens a temporary filename.pid in the same directory as filename."""
1096                 ObjectProxy.__init__(self)
1097                 object.__setattr__(self, '_aborted', False)
1098                 if 'b' in mode:
1099                         open_func = open
1100                 else:
1101                         open_func = io.open
1102                         kargs.setdefault('encoding', _encodings['content'])
1103                         kargs.setdefault('errors', 'backslashreplace')
1104
1105                 if follow_links:
1106                         canonical_path = os.path.realpath(filename)
1107                         object.__setattr__(self, '_real_name', canonical_path)
1108                         tmp_name = "%s.%i" % (canonical_path, os.getpid())
1109                         try:
1110                                 object.__setattr__(self, '_file',
1111                                         open_func(_unicode_encode(tmp_name,
1112                                                 encoding=_encodings['fs'], errors='strict'),
1113                                                 mode=mode, **kargs))
1114                                 return
1115                         except IOError as e:
1116                                 if canonical_path == filename:
1117                                         raise
1118                                 # Ignore this error, since it's irrelevant
1119                                 # and the below open call will produce a
1120                                 # new error if necessary.
1121
1122                 object.__setattr__(self, '_real_name', filename)
1123                 tmp_name = "%s.%i" % (filename, os.getpid())
1124                 object.__setattr__(self, '_file',
1125                         open_func(_unicode_encode(tmp_name,
1126                                 encoding=_encodings['fs'], errors='strict'),
1127                                 mode=mode, **kargs))
1128
1129         def _get_target(self):
1130                 return object.__getattribute__(self, '_file')
1131
1132         if sys.hexversion >= 0x3000000:
1133
1134                 def __getattribute__(self, attr):
1135                         if attr in ('close', 'abort', '__del__'):
1136                                 return object.__getattribute__(self, attr)
1137                         return getattr(object.__getattribute__(self, '_file'), attr)
1138
1139         else:
1140
1141                 # For TextIOWrapper, automatically coerce write calls to
1142                 # unicode, in order to avoid TypeError when writing raw
1143                 # bytes with python2.
1144
1145                 def __getattribute__(self, attr):
1146                         if attr in ('close', 'abort', 'write', '__del__'):
1147                                 return object.__getattribute__(self, attr)
1148                         return getattr(object.__getattribute__(self, '_file'), attr)
1149
1150                 def write(self, s):
1151                         f = object.__getattribute__(self, '_file')
1152                         if isinstance(f, io.TextIOWrapper):
1153                                 s = _unicode_decode(s)
1154                         return f.write(s)
1155
1156         def close(self):
1157                 """Closes the temporary file, copies permissions (if possible),
1158                 and performs the atomic replacement via os.rename().  If the abort()
1159                 method has been called, then the temp file is closed and removed."""
1160                 f = object.__getattribute__(self, '_file')
1161                 real_name = object.__getattribute__(self, '_real_name')
1162                 if not f.closed:
1163                         try:
1164                                 f.close()
1165                                 if not object.__getattribute__(self, '_aborted'):
1166                                         try:
1167                                                 apply_stat_permissions(f.name, os.stat(real_name))
1168                                         except OperationNotPermitted:
1169                                                 pass
1170                                         except FileNotFound:
1171                                                 pass
1172                                         except OSError as oe: # from the above os.stat call
1173                                                 if oe.errno in (errno.ENOENT, errno.EPERM):
1174                                                         pass
1175                                                 else:
1176                                                         raise
1177                                         os.rename(f.name, real_name)
1178                         finally:
1179                                 # Make sure we cleanup the temp file
1180                                 # even if an exception is raised.
1181                                 try:
1182                                         os.unlink(f.name)
1183                                 except OSError as oe:
1184                                         pass
1185
1186         def abort(self):
1187                 """If an error occurs while writing the file, the user should
1188                 call this method in order to leave the target file unchanged.
1189                 This will call close() automatically."""
1190                 if not object.__getattribute__(self, '_aborted'):
1191                         object.__setattr__(self, '_aborted', True)
1192                         self.close()
1193
1194         def __del__(self):
1195                 """If the user does not explicitely call close(), it is
1196                 assumed that an error has occurred, so we abort()."""
1197                 try:
1198                         f = object.__getattribute__(self, '_file')
1199                 except AttributeError:
1200                         pass
1201                 else:
1202                         if not f.closed:
1203                                 self.abort()
1204                 # ensure destructor from the base class is called
1205                 base_destructor = getattr(ObjectProxy, '__del__', None)
1206                 if base_destructor is not None:
1207                         base_destructor(self)
1208
1209 def write_atomic(file_path, content, **kwargs):
1210         f = None
1211         try:
1212                 f = atomic_ofstream(file_path, **kwargs)
1213                 f.write(content)
1214                 f.close()
1215         except (IOError, OSError) as e:
1216                 if f:
1217                         f.abort()
1218                 func_call = "write_atomic('%s')" % file_path
1219                 if e.errno == errno.EPERM:
1220                         raise OperationNotPermitted(func_call)
1221                 elif e.errno == errno.EACCES:
1222                         raise PermissionDenied(func_call)
1223                 elif e.errno == errno.EROFS:
1224                         raise ReadOnlyFileSystem(func_call)
1225                 elif e.errno == errno.ENOENT:
1226                         raise FileNotFound(file_path)
1227                 else:
1228                         raise
1229
1230 def ensure_dirs(dir_path, **kwargs):
1231         """Create a directory and call apply_permissions.
1232         Returns True if a directory is created or the permissions needed to be
1233         modified, and False otherwise.
1234
1235         This function's handling of EEXIST errors makes it useful for atomic
1236         directory creation, in which multiple processes may be competing to
1237         create the same directory.
1238         """
1239
1240         created_dir = False
1241
1242         try:
1243                 os.makedirs(dir_path)
1244                 created_dir = True
1245         except OSError as oe:
1246                 func_call = "makedirs('%s')" % dir_path
1247                 if oe.errno in (errno.EEXIST,):
1248                         pass
1249                 else:
1250                         if os.path.isdir(dir_path):
1251                                 # NOTE: DragonFly raises EPERM for makedir('/')
1252                                 # and that is supposed to be ignored here.
1253                                 # Also, sometimes mkdir raises EISDIR on FreeBSD
1254                                 # and we want to ignore that too (bug #187518).
1255                                 pass
1256                         elif oe.errno == errno.EPERM:
1257                                 raise OperationNotPermitted(func_call)
1258                         elif oe.errno == errno.EACCES:
1259                                 raise PermissionDenied(func_call)
1260                         elif oe.errno == errno.EROFS:
1261                                 raise ReadOnlyFileSystem(func_call)
1262                         else:
1263                                 raise
1264         if kwargs:
1265                 perms_modified = apply_permissions(dir_path, **kwargs)
1266         else:
1267                 perms_modified = False
1268         return created_dir or perms_modified
1269
1270 class LazyItemsDict(UserDict):
1271         """A mapping object that behaves like a standard dict except that it allows
1272         for lazy initialization of values via callable objects.  Lazy items can be
1273         overwritten and deleted just as normal items."""
1274
1275         __slots__ = ('lazy_items',)
1276
1277         def __init__(self, *args, **kwargs):
1278
1279                 self.lazy_items = {}
1280                 UserDict.__init__(self, *args, **kwargs)
1281
1282         def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1283                 """Add a lazy item for the given key.  When the item is requested,
1284                 value_callable will be called with *pargs and **kwargs arguments."""
1285                 self.lazy_items[item_key] = \
1286                         self._LazyItem(value_callable, pargs, kwargs, False)
1287                 # make it show up in self.keys(), etc...
1288                 UserDict.__setitem__(self, item_key, None)
1289
1290         def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1291                 """This is like addLazyItem except value_callable will only be called
1292                 a maximum of 1 time and the result will be cached for future requests."""
1293                 self.lazy_items[item_key] = \
1294                         self._LazyItem(value_callable, pargs, kwargs, True)
1295                 # make it show up in self.keys(), etc...
1296                 UserDict.__setitem__(self, item_key, None)
1297
1298         def update(self, *args, **kwargs):
1299                 if len(args) > 1:
1300                         raise TypeError(
1301                                 "expected at most 1 positional argument, got " + \
1302                                 repr(len(args)))
1303                 if args:
1304                         map_obj = args[0]
1305                 else:
1306                         map_obj = None
1307                 if map_obj is None:
1308                         pass
1309                 elif isinstance(map_obj, LazyItemsDict):
1310                         for k in map_obj:
1311                                 if k in map_obj.lazy_items:
1312                                         UserDict.__setitem__(self, k, None)
1313                                 else:
1314                                         UserDict.__setitem__(self, k, map_obj[k])
1315                         self.lazy_items.update(map_obj.lazy_items)
1316                 else:
1317                         UserDict.update(self, map_obj)
1318                 if kwargs:
1319                         UserDict.update(self, kwargs)
1320
1321         def __getitem__(self, item_key):
1322                 if item_key in self.lazy_items:
1323                         lazy_item = self.lazy_items[item_key]
1324                         pargs = lazy_item.pargs
1325                         if pargs is None:
1326                                 pargs = ()
1327                         kwargs = lazy_item.kwargs
1328                         if kwargs is None:
1329                                 kwargs = {}
1330                         result = lazy_item.func(*pargs, **kwargs)
1331                         if lazy_item.singleton:
1332                                 self[item_key] = result
1333                         return result
1334
1335                 else:
1336                         return UserDict.__getitem__(self, item_key)
1337
1338         def __setitem__(self, item_key, value):
1339                 if item_key in self.lazy_items:
1340                         del self.lazy_items[item_key]
1341                 UserDict.__setitem__(self, item_key, value)
1342
1343         def __delitem__(self, item_key):
1344                 if item_key in self.lazy_items:
1345                         del self.lazy_items[item_key]
1346                 UserDict.__delitem__(self, item_key)
1347
1348         def clear(self):
1349                 self.lazy_items.clear()
1350                 UserDict.clear(self)
1351
1352         def copy(self):
1353                 return self.__copy__()
1354
1355         def __copy__(self):
1356                 return self.__class__(self)
1357
1358         def __deepcopy__(self, memo=None):
1359                 """
1360                 This forces evaluation of each contained lazy item, and deepcopy of
1361                 the result. A TypeError is raised if any contained lazy item is not
1362                 a singleton, since it is not necessarily possible for the behavior
1363                 of this type of item to be safely preserved.
1364                 """
1365                 if memo is None:
1366                         memo = {}
1367                 result = self.__class__()
1368                 memo[id(self)] = result
1369                 for k in self:
1370                         k_copy = deepcopy(k, memo)
1371                         lazy_item = self.lazy_items.get(k)
1372                         if lazy_item is not None:
1373                                 if not lazy_item.singleton:
1374                                         raise TypeError(_unicode_decode("LazyItemsDict " + \
1375                                                 "deepcopy is unsafe with lazy items that are " + \
1376                                                 "not singletons: key=%s value=%s") % (k, lazy_item,))
1377                         UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
1378                 return result
1379
1380         class _LazyItem(object):
1381
1382                 __slots__ = ('func', 'pargs', 'kwargs', 'singleton')
1383
1384                 def __init__(self, func, pargs, kwargs, singleton):
1385
1386                         if not pargs:
1387                                 pargs = None
1388                         if not kwargs:
1389                                 kwargs = None
1390
1391                         self.func = func
1392                         self.pargs = pargs
1393                         self.kwargs = kwargs
1394                         self.singleton = singleton
1395
1396                 def __copy__(self):
1397                         return self.__class__(self.func, self.pargs,
1398                                 self.kwargs, self.singleton)
1399
1400                 def __deepcopy__(self, memo=None):
1401                         """
1402                         Override this since the default implementation can fail silently,
1403                         leaving some attributes unset.
1404                         """
1405                         if memo is None:
1406                                 memo = {}
1407                         result = self.__copy__()
1408                         memo[id(self)] = result
1409                         result.func = deepcopy(self.func, memo)
1410                         result.pargs = deepcopy(self.pargs, memo)
1411                         result.kwargs = deepcopy(self.kwargs, memo)
1412                         result.singleton = deepcopy(self.singleton, memo)
1413                         return result
1414
1415 class ConfigProtect(object):
1416         def __init__(self, myroot, protect_list, mask_list):
1417                 self.myroot = myroot
1418                 self.protect_list = protect_list
1419                 self.mask_list = mask_list
1420                 self.updateprotect()
1421
1422         def updateprotect(self):
1423                 """Update internal state for isprotected() calls.  Nonexistent paths
1424                 are ignored."""
1425
1426                 os = _os_merge
1427
1428                 self.protect = []
1429                 self._dirs = set()
1430                 for x in self.protect_list:
1431                         ppath = normalize_path(
1432                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1433                         try:
1434                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1435                                         self._dirs.add(ppath)
1436                                 self.protect.append(ppath)
1437                         except OSError:
1438                                 # If it doesn't exist, there's no need to protect it.
1439                                 pass
1440
1441                 self.protectmask = []
1442                 for x in self.mask_list:
1443                         ppath = normalize_path(
1444                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1445                         try:
1446                                 """Use lstat so that anything, even a broken symlink can be
1447                                 protected."""
1448                                 if stat.S_ISDIR(os.lstat(ppath).st_mode):
1449                                         self._dirs.add(ppath)
1450                                 self.protectmask.append(ppath)
1451                                 """Now use stat in case this is a symlink to a directory."""
1452                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1453                                         self._dirs.add(ppath)
1454                         except OSError:
1455                                 # If it doesn't exist, there's no need to mask it.
1456                                 pass
1457
1458         def isprotected(self, obj):
1459                 """Returns True if obj is protected, False otherwise.  The caller must
1460                 ensure that obj is normalized with a single leading slash.  A trailing
1461                 slash is optional for directories."""
1462                 masked = 0
1463                 protected = 0
1464                 sep = os.path.sep
1465                 for ppath in self.protect:
1466                         if len(ppath) > masked and obj.startswith(ppath):
1467                                 if ppath in self._dirs:
1468                                         if obj != ppath and not obj.startswith(ppath + sep):
1469                                                 # /etc/foo does not match /etc/foobaz
1470                                                 continue
1471                                 elif obj != ppath:
1472                                         # force exact match when CONFIG_PROTECT lists a
1473                                         # non-directory
1474                                         continue
1475                                 protected = len(ppath)
1476                                 #config file management
1477                                 for pmpath in self.protectmask:
1478                                         if len(pmpath) >= protected and obj.startswith(pmpath):
1479                                                 if pmpath in self._dirs:
1480                                                         if obj != pmpath and \
1481                                                                 not obj.startswith(pmpath + sep):
1482                                                                 # /etc/foo does not match /etc/foobaz
1483                                                                 continue
1484                                                 elif obj != pmpath:
1485                                                         # force exact match when CONFIG_PROTECT_MASK lists
1486                                                         # a non-directory
1487                                                         continue
1488                                                 #skip, it's in the mask
1489                                                 masked = len(pmpath)
1490                 return protected > masked
1491
1492 def new_protect_filename(mydest, newmd5=None, force=False):
1493         """Resolves a config-protect filename for merging, optionally
1494         using the last filename if the md5 matches. If force is True,
1495         then a new filename will be generated even if mydest does not
1496         exist yet.
1497         (dest,md5) ==> 'string'            --- path_to_target_filename
1498         (dest)     ==> ('next', 'highest') --- next_target and most-recent_target
1499         """
1500
1501         # config protection filename format:
1502         # ._cfg0000_foo
1503         # 0123456789012
1504
1505         os = _os_merge
1506
1507         prot_num = -1
1508         last_pfile = ""
1509
1510         if not force and \
1511                 not os.path.exists(mydest):
1512                 return mydest
1513
1514         real_filename = os.path.basename(mydest)
1515         real_dirname  = os.path.dirname(mydest)
1516         for pfile in os.listdir(real_dirname):
1517                 if pfile[0:5] != "._cfg":
1518                         continue
1519                 if pfile[10:] != real_filename:
1520                         continue
1521                 try:
1522                         new_prot_num = int(pfile[5:9])
1523                         if new_prot_num > prot_num:
1524                                 prot_num = new_prot_num
1525                                 last_pfile = pfile
1526                 except ValueError:
1527                         continue
1528         prot_num = prot_num + 1
1529
1530         new_pfile = normalize_path(os.path.join(real_dirname,
1531                 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename))
1532         old_pfile = normalize_path(os.path.join(real_dirname, last_pfile))
1533         if last_pfile and newmd5:
1534                 try:
1535                         last_pfile_md5 = portage.checksum._perform_md5_merge(old_pfile)
1536                 except FileNotFound:
1537                         # The file suddenly disappeared or it's a broken symlink.
1538                         pass
1539                 else:
1540                         if last_pfile_md5 == newmd5:
1541                                 return old_pfile
1542         return new_pfile
1543
1544 def find_updated_config_files(target_root, config_protect):
1545         """
1546         Return a tuple of configuration files that needs to be updated.
1547         The tuple contains lists organized like this:
1548         [ protected_dir, file_list ]
1549         If the protected config isn't a protected_dir but a procted_file, list is:
1550         [ protected_file, None ]
1551         If no configuration files needs to be updated, None is returned
1552         """
1553
1554         os = _os_merge
1555
1556         if config_protect:
1557                 # directories with some protect files in them
1558                 for x in config_protect:
1559                         files = []
1560
1561                         x = os.path.join(target_root, x.lstrip(os.path.sep))
1562                         if not os.access(x, os.W_OK):
1563                                 continue
1564                         try:
1565                                 mymode = os.lstat(x).st_mode
1566                         except OSError:
1567                                 continue
1568
1569                         if stat.S_ISLNK(mymode):
1570                                 # We want to treat it like a directory if it
1571                                 # is a symlink to an existing directory.
1572                                 try:
1573                                         real_mode = os.stat(x).st_mode
1574                                         if stat.S_ISDIR(real_mode):
1575                                                 mymode = real_mode
1576                                 except OSError:
1577                                         pass
1578
1579                         if stat.S_ISDIR(mymode):
1580                                 mycommand = \
1581                                         "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
1582                         else:
1583                                 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
1584                                                 os.path.split(x.rstrip(os.path.sep))
1585                         mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
1586                         a = subprocess_getstatusoutput(mycommand)
1587
1588                         if a[0] == 0:
1589                                 files = a[1].split('\0')
1590                                 # split always produces an empty string as the last element
1591                                 if files and not files[-1]:
1592                                         del files[-1]
1593                                 if files:
1594                                         if stat.S_ISDIR(mymode):
1595                                                 yield (x, files)
1596                                         else:
1597                                                 yield (x, None)
1598
1599 def getlibpaths(root, env=None):
1600         def read_ld_so_conf(path):
1601                 for l in grabfile(path):
1602                         if l.startswith('include '):
1603                                 subpath = os.path.join(os.path.dirname(path), l[8:].strip())
1604                                 for p in glob.glob(subpath):
1605                                         for r in read_ld_so_conf(p):
1606                                                 yield r
1607                         else:
1608                                 yield l
1609
1610         """ Return a list of paths that are used for library lookups """
1611         if env is None:
1612                 env = os.environ
1613         # the following is based on the information from ld.so(8)
1614         rval = env.get("LD_LIBRARY_PATH", "").split(":")
1615         rval.extend(read_ld_so_conf(os.path.join(root, "etc", "ld.so.conf")))
1616         rval.append("/usr/lib")
1617         rval.append("/lib")
1618
1619         return [normalize_path(x) for x in rval if x]