Use bytes instead of unicode with isinstance.
[portage.git] / pym / portage / util / __init__.py
1 # Copyright 2004-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3
4 __all__ = ['apply_permissions', 'apply_recursive_permissions',
5         'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
6         'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
7         'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict',
8         'grabdict_package', 'grabfile', 'grabfile_package', 'grablines',
9         'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals',
10         'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist',
11         'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand',
12         'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout']
13
14 from copy import deepcopy
15 import errno
16 import io
17 try:
18         from itertools import filterfalse
19 except ImportError:
20         from itertools import ifilterfalse as filterfalse
21 import logging
22 import re
23 import shlex
24 import stat
25 import string
26 import sys
27 import traceback
28
29 import portage
30 portage.proxy.lazyimport.lazyimport(globals(),
31         'pickle',
32         'portage.dep:Atom',
33         'portage.util.listdir:_ignorecvs_dirs'
34 )
35
36 from portage import os
37 from portage import subprocess_getstatusoutput
38 from portage import _encodings
39 from portage import _os_merge
40 from portage import _unicode_encode
41 from portage import _unicode_decode
42 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
43        OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem
44 from portage.localization import _
45 from portage.proxy.objectproxy import ObjectProxy
46 from portage.cache.mappings import UserDict
47
48 noiselimit = 0
49
50 def initialize_logger(level=logging.WARN):
51         """Sets up basic logging of portage activities
52         Args:
53                 level: the level to emit messages at ('info', 'debug', 'warning' ...)
54         Returns:
55                 None
56         """
57         logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
58
59 def writemsg(mystr,noiselevel=0,fd=None):
60         """Prints out warning and debug messages based on the noiselimit setting"""
61         global noiselimit
62         if fd is None:
63                 fd = sys.stderr
64         if noiselevel <= noiselimit:
65                 # avoid potential UnicodeEncodeError
66                 if isinstance(fd, io.StringIO):
67                         mystr = _unicode_decode(mystr,
68                                 encoding=_encodings['content'], errors='replace')
69                 else:
70                         mystr = _unicode_encode(mystr,
71                                 encoding=_encodings['stdio'], errors='backslashreplace')
72                         if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr):
73                                 fd = fd.buffer
74                 fd.write(mystr)
75                 fd.flush()
76
77 def writemsg_stdout(mystr,noiselevel=0):
78         """Prints messages stdout based on the noiselimit setting"""
79         writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
80
81 def writemsg_level(msg, level=0, noiselevel=0):
82         """
83         Show a message for the given level as defined by the logging module
84         (default is 0). When level >= logging.WARNING then the message is
85         sent to stderr, otherwise it is sent to stdout. The noiselevel is
86         passed directly to writemsg().
87
88         @type msg: str
89         @param msg: a message string, including newline if appropriate
90         @type level: int
91         @param level: a numeric logging level (see the logging module)
92         @type noiselevel: int
93         @param noiselevel: passed directly to writemsg
94         """
95         if level >= logging.WARNING:
96                 fd = sys.stderr
97         else:
98                 fd = sys.stdout
99         writemsg(msg, noiselevel=noiselevel, fd=fd)
100
101 def normalize_path(mypath):
102         """ 
103         os.path.normpath("//foo") returns "//foo" instead of "/foo"
104         We dislike this behavior so we create our own normpath func
105         to fix it.
106         """
107         if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes):
108                 path_sep = os.path.sep.encode()
109         else:
110                 path_sep = os.path.sep
111
112         if mypath.startswith(path_sep):
113                 # posixpath.normpath collapses 3 or more leading slashes to just 1.
114                 return os.path.normpath(2*path_sep + mypath)
115         else:
116                 return os.path.normpath(mypath)
117
118 def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
119         """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
120         begins with a #, it is ignored, as are empty lines"""
121
122         mylines=grablines(myfilename, recursive, remember_source_file=True)
123         newlines=[]
124
125         for x, source_file in mylines:
126                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
127                 #into single spaces.
128                 myline = x.split()
129                 if x and x[0] != "#":
130                         mylinetemp = []
131                         for item in myline:
132                                 if item[:1] != "#":
133                                         mylinetemp.append(item)
134                                 else:
135                                         break
136                         myline = mylinetemp
137
138                 myline = " ".join(myline)
139                 if not myline:
140                         continue
141                 if myline[0]=="#":
142                         # Check if we have a compat-level string. BC-integration data.
143                         # '##COMPAT==>N<==' 'some string attached to it'
144                         mylinetest = myline.split("<==",1)
145                         if len(mylinetest) == 2:
146                                 myline_potential = mylinetest[1]
147                                 mylinetest = mylinetest[0].split("##COMPAT==>")
148                                 if len(mylinetest) == 2:
149                                         if compat_level >= int(mylinetest[1]):
150                                                 # It's a compat line, and the key matches.
151                                                 newlines.append(myline_potential)
152                                 continue
153                         else:
154                                 continue
155                 if remember_source_file:
156                         newlines.append((myline, source_file))
157                 else:
158                         newlines.append(myline)
159         return newlines
160
161 def map_dictlist_vals(func,myDict):
162         """Performs a function on each value of each key in a dictlist.
163         Returns a new dictlist."""
164         new_dl = {}
165         for key in myDict:
166                 new_dl[key] = []
167                 new_dl[key] = [func(x) for x in myDict[key]]
168         return new_dl
169
170 def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
171         """
172         Stacks an array of dict-types into one array. Optionally merging or
173         overwriting matching key/value pairs for the dict[key]->list.
174         Returns a single dict. Higher index in lists is preferenced.
175         
176         Example usage:
177            >>> from portage.util import stack_dictlist
178                 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
179                 >>> {'a':'b','x':'y'}
180                 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True )
181                 >>> {'a':['b','c'] }
182                 >>> a = {'KEYWORDS':['x86','alpha']}
183                 >>> b = {'KEYWORDS':['-x86']}
184                 >>> print stack_dictlist( [a,b] )
185                 >>> { 'KEYWORDS':['x86','alpha','-x86']}
186                 >>> print stack_dictlist( [a,b], incremental=True)
187                 >>> { 'KEYWORDS':['alpha'] }
188                 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
189                 >>> { 'KEYWORDS':['alpha'] }
190         
191         @param original_dicts a list of (dictionary objects or None)
192         @type list
193         @param incremental True or false depending on whether new keys should overwrite
194            keys which already exist.
195         @type boolean
196         @param incrementals A list of items that should be incremental (-foo removes foo from
197            the returned dict).
198         @type list
199         @param ignore_none Appears to be ignored, but probably was used long long ago.
200         @type boolean
201         
202         """
203         final_dict = {}
204         for mydict in original_dicts:
205                 if mydict is None:
206                         continue
207                 for y in mydict:
208                         if not y in final_dict:
209                                 final_dict[y] = []
210                         
211                         for thing in mydict[y]:
212                                 if thing:
213                                         if incremental or y in incrementals:
214                                                 if thing == "-*":
215                                                         final_dict[y] = []
216                                                         continue
217                                                 elif thing[:1] == '-':
218                                                         try:
219                                                                 final_dict[y].remove(thing[1:])
220                                                         except ValueError:
221                                                                 pass
222                                                         continue
223                                         if thing not in final_dict[y]:
224                                                 final_dict[y].append(thing)
225                         if y in final_dict and not final_dict[y]:
226                                 del final_dict[y]
227         return final_dict
228
229 def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
230         """Stacks an array of dict-types into one array. Optionally merging or
231         overwriting matching key/value pairs for the dict[key]->string.
232         Returns a single dict."""
233         final_dict = {}
234         for mydict in dicts:
235                 if not mydict:
236                         continue
237                 for k, v in mydict.items():
238                         if k in final_dict and (incremental or (k in incrementals)):
239                                 final_dict[k] += " " + v
240                         else:
241                                 final_dict[k]  = v
242         return final_dict
243
244 def append_repo(atom_list, repo_name, remember_source_file=False):
245         """
246         Takes a list of valid atoms without repo spec and appends ::repo_name.
247         """
248         if remember_source_file:
249                 return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
250                         for atom, source in atom_list]
251         else:
252                 return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
253                         for atom in atom_list]
254
255 def stack_lists(lists, incremental=1, remember_source_file=False,
256         warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
257         """Stacks an array of list-types into one array. Optionally removing
258         distinct values using '-value' notation. Higher index is preferenced.
259
260         all elements must be hashable."""
261         matched_removals = set()
262         unmatched_removals = {}
263         new_list = {}
264         for sub_list in lists:
265                 for token in sub_list:
266                         token_key = token
267                         if remember_source_file:
268                                 token, source_file = token
269                         else:
270                                 source_file = False
271
272                         if token is None:
273                                 continue
274
275                         if incremental:
276                                 if token == "-*":
277                                         new_list.clear()
278                                 elif token[:1] == '-':
279                                         matched = False
280                                         if ignore_repo and not "::" in token:
281                                                 #Let -cat/pkg remove cat/pkg::repo.
282                                                 to_be_removed = []
283                                                 token_slice = token[1:]
284                                                 for atom in new_list:
285                                                         atom_without_repo = atom
286                                                         if atom.repo is not None:
287                                                                 # Atom.without_repo instantiates a new Atom,
288                                                                 # which is unnecessary here, so use string
289                                                                 # replacement instead.
290                                                                 atom_without_repo = \
291                                                                         atom.replace("::" + atom.repo, "", 1)
292                                                         if atom_without_repo == token_slice:
293                                                                 to_be_removed.append(atom)
294                                                 if to_be_removed:
295                                                         matched = True
296                                                         for atom in to_be_removed:
297                                                                 new_list.pop(atom)
298                                         else:
299                                                 try:
300                                                         new_list.pop(token[1:])
301                                                         matched = True
302                                                 except KeyError:
303                                                         pass
304
305                                         if not matched:
306                                                 if source_file and \
307                                                         (strict_warn_for_unmatched_removal or \
308                                                         token_key not in matched_removals):
309                                                         unmatched_removals.setdefault(source_file, set()).add(token)
310                                         else:
311                                                 matched_removals.add(token_key)
312                                 else:
313                                         new_list[token] = source_file
314                         else:
315                                 new_list[token] = source_file
316
317         if warn_for_unmatched_removal:
318                 for source_file, tokens in unmatched_removals.items():
319                         if len(tokens) > 3:
320                                 selected = [tokens.pop(), tokens.pop(), tokens.pop()]
321                                 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \
322                                         (source_file, ", ".join(selected), len(tokens)),
323                                         noiselevel=-1)
324                         else:
325                                 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)),
326                                         noiselevel=-1)
327
328         if remember_source_file:
329                 return list(new_list.items())
330         else:
331                 return list(new_list)
332
333 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
334         """
335         This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
336         
337         @param myfilename: file to process
338         @type myfilename: string (path)
339         @param juststrings: only return strings
340         @type juststrings: Boolean (integer)
341         @param empty: Ignore certain lines
342         @type empty: Boolean (integer)
343         @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends )
344         @type recursive: Boolean (integer)
345         @param incremental: Append to the return list, don't overwrite
346         @type incremental: Boolean (integer)
347         @rtype: Dictionary
348         @returns:
349         1.  Returns the lines in a file in a dictionary, for example:
350                 'sys-apps/portage x86 amd64 ppc'
351                 would return
352                 { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
353                 the line syntax is key : [list of values]
354         """
355         newdict={}
356         for x in grablines(myfilename, recursive):
357                 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
358                 #into single spaces.
359                 if x[0] == "#":
360                         continue
361                 myline=x.split()
362                 mylinetemp = []
363                 for item in myline:
364                         if item[:1] != "#":
365                                 mylinetemp.append(item)
366                         else:
367                                 break
368                 myline = mylinetemp
369                 if len(myline) < 2 and empty == 0:
370                         continue
371                 if len(myline) < 1 and empty == 1:
372                         continue
373                 if incremental:
374                         newdict.setdefault(myline[0], []).extend(myline[1:])
375                 else:
376                         newdict[myline[0]] = myline[1:]
377         if juststrings:
378                 for k, v in newdict.items():
379                         newdict[k] = " ".join(v)
380         return newdict
381
382 def read_corresponding_eapi_file(filename):
383         """
384         Read the 'eapi' file from the directory 'filename' is in.
385         Returns "0" if the file is not present or invalid.
386         """
387         default = "0"
388         eapi_file = os.path.join(os.path.dirname(filename), "eapi")
389         try:
390                 f = open(eapi_file, "r")
391                 lines = f.readlines()
392                 if len(lines) == 1:
393                         eapi = lines[0].rstrip("\n")
394                 else:
395                         writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
396                                 noiselevel=-1)
397                         eapi = default
398                 f.close()
399         except IOError:
400                 eapi = default
401
402         return eapi
403
404 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
405         verify_eapi=False, eapi=None):
406         """ Does the same thing as grabdict except it validates keys
407             with isvalidatom()"""
408         pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
409         if not pkgs:
410                 return pkgs
411         if verify_eapi and eapi is None:
412                 eapi = read_corresponding_eapi_file(myfilename)
413
414         # We need to call keys() here in order to avoid the possibility of
415         # "RuntimeError: dictionary changed size during iteration"
416         # when an invalid atom is deleted.
417         atoms = {}
418         for k, v in pkgs.items():
419                 try:
420                         k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
421                 except InvalidAtom as e:
422                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
423                                 noiselevel=-1)
424                 else:
425                         atoms[k] = v
426         return atoms
427
428 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
429         remember_source_file=False, verify_eapi=False, eapi=None):
430
431         pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True)
432         if not pkgs:
433                 return pkgs
434         if verify_eapi and eapi is None:
435                 eapi = read_corresponding_eapi_file(myfilename)
436         mybasename = os.path.basename(myfilename)
437         atoms = []
438         for pkg, source_file in pkgs:
439                 pkg_orig = pkg
440                 # for packages and package.mask files
441                 if pkg[:1] == "-":
442                         pkg = pkg[1:]
443                 if pkg[:1] == '*' and mybasename == 'packages':
444                         pkg = pkg[1:]
445                 try:
446                         pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
447                 except InvalidAtom as e:
448                         writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
449                                 noiselevel=-1)
450                 else:
451                         if pkg_orig == str(pkg):
452                                 # normal atom, so return as Atom instance
453                                 if remember_source_file:
454                                         atoms.append((pkg, source_file))
455                                 else:
456                                         atoms.append(pkg)
457                         else:
458                                 # atom has special prefix, so return as string
459                                 if remember_source_file:
460                                         atoms.append((pkg_orig, source_file))
461                                 else:
462                                         atoms.append(pkg_orig)
463         return atoms
464
465 def grablines(myfilename, recursive=0, remember_source_file=False):
466         mylines=[]
467         if recursive and os.path.isdir(myfilename):
468                 if os.path.basename(myfilename) in _ignorecvs_dirs:
469                         return mylines
470                 dirlist = os.listdir(myfilename)
471                 dirlist.sort()
472                 for f in dirlist:
473                         if not f.startswith(".") and not f.endswith("~"):
474                                 mylines.extend(grablines(
475                                         os.path.join(myfilename, f), recursive, remember_source_file))
476         else:
477                 try:
478                         myfile = io.open(_unicode_encode(myfilename,
479                                 encoding=_encodings['fs'], errors='strict'),
480                                 mode='r', encoding=_encodings['content'], errors='replace')
481                         if remember_source_file:
482                                 mylines = [(line, myfilename) for line in myfile.readlines()]
483                         else:
484                                 mylines = myfile.readlines()
485                         myfile.close()
486                 except IOError as e:
487                         if e.errno == PermissionDenied.errno:
488                                 raise PermissionDenied(myfilename)
489                         pass
490         return mylines
491
492 def writedict(mydict,myfilename,writekey=True):
493         """Writes out a dict to a file; writekey=0 mode doesn't write out
494         the key and assumes all values are strings, not lists."""
495         lines = []
496         if not writekey:
497                 for v in mydict.values():
498                         lines.append(v + "\n")
499         else:
500                 for k, v in mydict.items():
501                         lines.append("%s %s\n" % (k, " ".join(v)))
502         write_atomic(myfilename, "".join(lines))
503
504 def shlex_split(s):
505         """
506         This is equivalent to shlex.split, but if the current interpreter is
507         python2, it temporarily encodes unicode strings to bytes since python2's
508         shlex.split() doesn't handle unicode strings.
509         """
510         convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes)
511         if convert_to_bytes:
512                 s = _unicode_encode(s)
513         rval = shlex.split(s)
514         if convert_to_bytes:
515                 rval = [_unicode_decode(x) for x in rval]
516         return rval
517
518 class _tolerant_shlex(shlex.shlex):
519         def sourcehook(self, newfile):
520                 try:
521                         return shlex.shlex.sourcehook(self, newfile)
522                 except EnvironmentError as e:
523                         writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
524                                 (self.infile, str(e)), noiselevel=-1)
525                         return (newfile, io.StringIO())
526
527 _invalid_var_name_re = re.compile(r'^\d|\W')
528
529 def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
530         if isinstance(expand, dict):
531                 # Some existing variable definitions have been
532                 # passed in, for use in substitutions.
533                 expand_map = expand
534                 expand = True
535         else:
536                 expand_map = {}
537         mykeys = {}
538         f = None
539         try:
540                 # NOTE: shlex doesn't support unicode objects with Python 2
541                 # (produces spurious \0 characters).
542                 if sys.hexversion < 0x3000000:
543                         f = open(_unicode_encode(mycfg,
544                                 encoding=_encodings['fs'], errors='strict'), 'rb')
545                 else:
546                         f = open(_unicode_encode(mycfg,
547                                 encoding=_encodings['fs'], errors='strict'), mode='r',
548                                 encoding=_encodings['content'], errors='replace')
549                 content = f.read()
550         except IOError as e:
551                 if e.errno == PermissionDenied.errno:
552                         raise PermissionDenied(mycfg)
553                 if e.errno != errno.ENOENT:
554                         writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
555                         if e.errno not in (errno.EISDIR,):
556                                 raise
557                 return None
558         finally:
559                 if f is not None:
560                         f.close()
561
562         # Workaround for avoiding a silent error in shlex that is
563         # triggered by a source statement at the end of the file
564         # without a trailing newline after the source statement.
565         if content and content[-1] != '\n':
566                 content += '\n'
567
568         # Warn about dos-style line endings since that prevents
569         # people from being able to source them with bash.
570         if '\r' in content:
571                 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
572                         "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
573
574         try:
575                 if tolerant:
576                         shlex_class = _tolerant_shlex
577                 else:
578                         shlex_class = shlex.shlex
579                 # The default shlex.sourcehook() implementation
580                 # only joins relative paths when the infile
581                 # attribute is properly set.
582                 lex = shlex_class(content, infile=mycfg, posix=True)
583                 lex.wordchars = string.digits + string.ascii_letters + \
584                         "~!@#$%*_\:;?,./-+{}"
585                 lex.quotes="\"'"
586                 if allow_sourcing:
587                         lex.source="source"
588                 while 1:
589                         key=lex.get_token()
590                         if key == "export":
591                                 key = lex.get_token()
592                         if key is None:
593                                 #normal end of file
594                                 break;
595                         equ=lex.get_token()
596                         if (equ==''):
597                                 #unexpected end of file
598                                 #lex.error_leader(self.filename,lex.lineno)
599                                 if not tolerant:
600                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
601                                                 noiselevel=-1)
602                                         raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno))
603                                 else:
604                                         return mykeys
605                         elif (equ!='='):
606                                 #invalid token
607                                 #lex.error_leader(self.filename,lex.lineno)
608                                 if not tolerant:
609                                         raise Exception(_("ParseError: Invalid token "
610                                                 "'%s' (not '='): %s: line %s") % \
611                                                 (equ, mycfg, lex.lineno))
612                                 else:
613                                         return mykeys
614                         val=lex.get_token()
615                         if val is None:
616                                 #unexpected end of file
617                                 #lex.error_leader(self.filename,lex.lineno)
618                                 if not tolerant:
619                                         writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
620                                                 noiselevel=-1)
621                                         raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno))
622                                 else:
623                                         return mykeys
624                         key = _unicode_decode(key)
625                         val = _unicode_decode(val)
626
627                         if _invalid_var_name_re.search(key) is not None:
628                                 if not tolerant:
629                                         raise Exception(_(
630                                                 "ParseError: Invalid variable name '%s': line %s") % \
631                                                 (key, lex.lineno - 1))
632                                 writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \
633                                         % (key, lex.lineno - 1, mycfg), noiselevel=-1)
634                                 continue
635
636                         if expand:
637                                 mykeys[key] = varexpand(val, expand_map)
638                                 expand_map[key] = mykeys[key]
639                         else:
640                                 mykeys[key] = val
641         except SystemExit as e:
642                 raise
643         except Exception as e:
644                 raise portage.exception.ParseError(str(e)+" in "+mycfg)
645         return mykeys
646         
647 #cache expansions of constant strings
648 cexpand={}
649 def varexpand(mystring, mydict=None):
650         if mydict is None:
651                 mydict = {}
652         newstring = cexpand.get(" "+mystring, None)
653         if newstring is not None:
654                 return newstring
655
656         """
657         new variable expansion code.  Preserves quotes, handles \n, etc.
658         This code is used by the configfile code, as well as others (parser)
659         This would be a good bunch of code to port to C.
660         """
661         numvars=0
662         mystring=" "+mystring
663         #in single, double quotes
664         insing=0
665         indoub=0
666         pos=1
667         newstring=" "
668         while (pos<len(mystring)):
669                 if (mystring[pos]=="'") and (mystring[pos-1]!="\\"):
670                         if (indoub):
671                                 newstring=newstring+"'"
672                         else:
673                                 newstring += "'" # Quote removal is handled by shlex.
674                                 insing=not insing
675                         pos=pos+1
676                         continue
677                 elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"):
678                         if (insing):
679                                 newstring=newstring+'"'
680                         else:
681                                 newstring += '"' # Quote removal is handled by shlex.
682                                 indoub=not indoub
683                         pos=pos+1
684                         continue
685                 if (not insing): 
686                         #expansion time
687                         if (mystring[pos]=="\n"):
688                                 #convert newlines to spaces
689                                 newstring=newstring+" "
690                                 pos=pos+1
691                         elif (mystring[pos]=="\\"):
692                                 # For backslash expansion, this function used to behave like
693                                 # echo -e, but that's not needed for our purposes. We want to
694                                 # behave like bash does when expanding a variable assignment
695                                 # in a sourced file, in which case it performs backslash
696                                 # removal for \\ and \$ but nothing more. It also removes
697                                 # escaped newline characters. Note that we don't handle
698                                 # escaped quotes here, since getconfig() uses shlex
699                                 # to handle that earlier.
700                                 if (pos+1>=len(mystring)):
701                                         newstring=newstring+mystring[pos]
702                                         break
703                                 else:
704                                         a = mystring[pos + 1]
705                                         pos = pos + 2
706                                         if a in ("\\", "$"):
707                                                 newstring = newstring + a
708                                         elif a == "\n":
709                                                 pass
710                                         else:
711                                                 newstring = newstring + mystring[pos-2:pos]
712                                         continue
713                         elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"):
714                                 pos=pos+1
715                                 if mystring[pos]=="{":
716                                         pos=pos+1
717                                         braced=True
718                                 else:
719                                         braced=False
720                                 myvstart=pos
721                                 validchars=string.ascii_letters+string.digits+"_"
722                                 while mystring[pos] in validchars:
723                                         if (pos+1)>=len(mystring):
724                                                 if braced:
725                                                         cexpand[mystring]=""
726                                                         return ""
727                                                 else:
728                                                         pos=pos+1
729                                                         break
730                                         pos=pos+1
731                                 myvarname=mystring[myvstart:pos]
732                                 if braced:
733                                         if mystring[pos]!="}":
734                                                 cexpand[mystring]=""
735                                                 return ""
736                                         else:
737                                                 pos=pos+1
738                                 if len(myvarname)==0:
739                                         cexpand[mystring]=""
740                                         return ""
741                                 numvars=numvars+1
742                                 if myvarname in mydict:
743                                         newstring=newstring+mydict[myvarname] 
744                         else:
745                                 newstring=newstring+mystring[pos]
746                                 pos=pos+1
747                 else:
748                         newstring=newstring+mystring[pos]
749                         pos=pos+1
750         if numvars==0:
751                 cexpand[mystring]=newstring[1:]
752         return newstring[1:]    
753
754 # broken and removed, but can still be imported
755 pickle_write = None
756
757 def pickle_read(filename,default=None,debug=0):
758         if not os.access(filename, os.R_OK):
759                 writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
760                 return default
761         data = None
762         try:
763                 myf = open(_unicode_encode(filename,
764                         encoding=_encodings['fs'], errors='strict'), 'rb')
765                 mypickle = pickle.Unpickler(myf)
766                 data = mypickle.load()
767                 myf.close()
768                 del mypickle,myf
769                 writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
770         except SystemExit as e:
771                 raise
772         except Exception as e:
773                 writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
774                 data = default
775         return data
776
777 def dump_traceback(msg, noiselevel=1):
778         info = sys.exc_info()
779         if not info[2]:
780                 stack = traceback.extract_stack()[:-1]
781                 error = None
782         else:
783                 stack = traceback.extract_tb(info[2])
784                 error = str(info[1])
785         writemsg("\n====================================\n", noiselevel=noiselevel)
786         writemsg("%s\n\n" % msg, noiselevel=noiselevel)
787         for line in traceback.format_list(stack):
788                 writemsg(line, noiselevel=noiselevel)
789         if error:
790                 writemsg(error+"\n", noiselevel=noiselevel)
791         writemsg("====================================\n\n", noiselevel=noiselevel)
792
793 class cmp_sort_key(object):
794         """
795         In python-3.0 the list.sort() method no longer has a "cmp" keyword
796         argument. This class acts as an adapter which converts a cmp function
797         into one that's suitable for use as the "key" keyword argument to
798         list.sort(), making it easier to port code for python-3.0 compatibility.
799         It works by generating key objects which use the given cmp function to
800         implement their __lt__ method.
801         """
802         __slots__ = ("_cmp_func",)
803
804         def __init__(self, cmp_func):
805                 """
806                 @type cmp_func: callable which takes 2 positional arguments
807                 @param cmp_func: A cmp function.
808                 """
809                 self._cmp_func = cmp_func
810
811         def __call__(self, lhs):
812                 return self._cmp_key(self._cmp_func, lhs)
813
814         class _cmp_key(object):
815                 __slots__ = ("_cmp_func", "_obj")
816
817                 def __init__(self, cmp_func, obj):
818                         self._cmp_func = cmp_func
819                         self._obj = obj
820
821                 def __lt__(self, other):
822                         if other.__class__ is not self.__class__:
823                                 raise TypeError("Expected type %s, got %s" % \
824                                         (self.__class__, other.__class__))
825                         return self._cmp_func(self._obj, other._obj) < 0
826
827 def unique_array(s):
828         """lifted from python cookbook, credit: Tim Peters
829         Return a list of the elements in s in arbitrary order, sans duplicates"""
830         n = len(s)
831         # assume all elements are hashable, if so, it's linear
832         try:
833                 return list(set(s))
834         except TypeError:
835                 pass
836
837         # so much for linear.  abuse sort.
838         try:
839                 t = list(s)
840                 t.sort()
841         except TypeError:
842                 pass
843         else:
844                 assert n > 0
845                 last = t[0]
846                 lasti = i = 1
847                 while i < n:
848                         if t[i] != last:
849                                 t[lasti] = last = t[i]
850                                 lasti += 1
851                         i += 1
852                 return t[:lasti]
853
854         # blah.  back to original portage.unique_array
855         u = []
856         for x in s:
857                 if x not in u:
858                         u.append(x)
859         return u
860
861 def unique_everseen(iterable, key=None):
862     """
863     List unique elements, preserving order. Remember all elements ever seen.
864     Taken from itertools documentation.
865     """
866     # unique_everseen('AAAABBBCCDAABBB') --> A B C D
867     # unique_everseen('ABBCcAD', str.lower) --> A B C D
868     seen = set()
869     seen_add = seen.add
870     if key is None:
871         for element in filterfalse(seen.__contains__, iterable):
872             seen_add(element)
873             yield element
874     else:
875         for element in iterable:
876             k = key(element)
877             if k not in seen:
878                 seen_add(k)
879                 yield element
880
881 def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
882         stat_cached=None, follow_links=True):
883         """Apply user, group, and mode bits to a file if the existing bits do not
884         already match.  The default behavior is to force an exact match of mode
885         bits.  When mask=0 is specified, mode bits on the target file are allowed
886         to be a superset of the mode argument (via logical OR).  When mask>0, the
887         mode bits that the target file is allowed to have are restricted via
888         logical XOR.
889         Returns True if the permissions were modified and False otherwise."""
890
891         modified = False
892
893         if stat_cached is None:
894                 try:
895                         if follow_links:
896                                 stat_cached = os.stat(filename)
897                         else:
898                                 stat_cached = os.lstat(filename)
899                 except OSError as oe:
900                         func_call = "stat('%s')" % filename
901                         if oe.errno == errno.EPERM:
902                                 raise OperationNotPermitted(func_call)
903                         elif oe.errno == errno.EACCES:
904                                 raise PermissionDenied(func_call)
905                         elif oe.errno == errno.ENOENT:
906                                 raise FileNotFound(filename)
907                         else:
908                                 raise
909
910         if      (uid != -1 and uid != stat_cached.st_uid) or \
911                 (gid != -1 and gid != stat_cached.st_gid):
912                 try:
913                         if follow_links:
914                                 os.chown(filename, uid, gid)
915                         else:
916                                 portage.data.lchown(filename, uid, gid)
917                         modified = True
918                 except OSError as oe:
919                         func_call = "chown('%s', %i, %i)" % (filename, uid, gid)
920                         if oe.errno == errno.EPERM:
921                                 raise OperationNotPermitted(func_call)
922                         elif oe.errno == errno.EACCES:
923                                 raise PermissionDenied(func_call)
924                         elif oe.errno == errno.EROFS:
925                                 raise ReadOnlyFileSystem(func_call)
926                         elif oe.errno == errno.ENOENT:
927                                 raise FileNotFound(filename)
928                         else:
929                                 raise
930
931         new_mode = -1
932         st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits
933         if mask >= 0:
934                 if mode == -1:
935                         mode = 0 # Don't add any mode bits when mode is unspecified.
936                 else:
937                         mode = mode & 0o7777
938                 if      (mode & st_mode != mode) or \
939                         ((mask ^ st_mode) & st_mode != st_mode):
940                         new_mode = mode | st_mode
941                         new_mode = (mask ^ new_mode) & new_mode
942         elif mode != -1:
943                 mode = mode & 0o7777 # protect from unwanted bits
944                 if mode != st_mode:
945                         new_mode = mode
946
947         # The chown system call may clear S_ISUID and S_ISGID
948         # bits, so those bits are restored if necessary.
949         if modified and new_mode == -1 and \
950                 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID):
951                 if mode == -1:
952                         new_mode = st_mode
953                 else:
954                         mode = mode & 0o7777
955                         if mask >= 0:
956                                 new_mode = mode | st_mode
957                                 new_mode = (mask ^ new_mode) & new_mode
958                         else:
959                                 new_mode = mode
960                         if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID):
961                                 new_mode = -1
962
963         if not follow_links and stat.S_ISLNK(stat_cached.st_mode):
964                 # Mode doesn't matter for symlinks.
965                 new_mode = -1
966
967         if new_mode != -1:
968                 try:
969                         os.chmod(filename, new_mode)
970                         modified = True
971                 except OSError as oe:
972                         func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
973                         if oe.errno == errno.EPERM:
974                                 raise OperationNotPermitted(func_call)
975                         elif oe.errno == errno.EACCES:
976                                 raise PermissionDenied(func_call)
977                         elif oe.errno == errno.EROFS:
978                                 raise ReadOnlyFileSystem(func_call)
979                         elif oe.errno == errno.ENOENT:
980                                 raise FileNotFound(filename)
981                         raise
982         return modified
983
984 def apply_stat_permissions(filename, newstat, **kwargs):
985         """A wrapper around apply_secpass_permissions that gets
986         uid, gid, and mode from a stat object"""
987         return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid,
988         mode=newstat.st_mode, **kwargs)
989
990 def apply_recursive_permissions(top, uid=-1, gid=-1,
991         dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
992         """A wrapper around apply_secpass_permissions that applies permissions
993         recursively.  If optional argument onerror is specified, it should be a
994         function; it will be called with one argument, a PortageException instance.
995         Returns True if all permissions are applied and False if some are left
996         unapplied."""
997
998         # Avoid issues with circular symbolic links, as in bug #339670.
999         follow_links = False
1000
1001         if onerror is None:
1002                 # Default behavior is to dump errors to stderr so they won't
1003                 # go unnoticed.  Callers can pass in a quiet instance.
1004                 def onerror(e):
1005                         if isinstance(e, OperationNotPermitted):
1006                                 writemsg(_("Operation Not Permitted: %s\n") % str(e),
1007                                         noiselevel=-1)
1008                         elif isinstance(e, FileNotFound):
1009                                 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
1010                         else:
1011                                 raise
1012
1013         all_applied = True
1014         for dirpath, dirnames, filenames in os.walk(top):
1015                 try:
1016                         applied = apply_secpass_permissions(dirpath,
1017                                 uid=uid, gid=gid, mode=dirmode, mask=dirmask,
1018                                 follow_links=follow_links)
1019                         if not applied:
1020                                 all_applied = False
1021                 except PortageException as e:
1022                         all_applied = False
1023                         onerror(e)
1024
1025                 for name in filenames:
1026                         try:
1027                                 applied = apply_secpass_permissions(os.path.join(dirpath, name),
1028                                         uid=uid, gid=gid, mode=filemode, mask=filemask,
1029                                         follow_links=follow_links)
1030                                 if not applied:
1031                                         all_applied = False
1032                         except PortageException as e:
1033                                 # Ignore InvalidLocation exceptions such as FileNotFound
1034                                 # and DirectoryNotFound since sometimes things disappear,
1035                                 # like when adjusting permissions on DISTCC_DIR.
1036                                 if not isinstance(e, portage.exception.InvalidLocation):
1037                                         all_applied = False
1038                                         onerror(e)
1039         return all_applied
1040
1041 def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
1042         stat_cached=None, follow_links=True):
1043         """A wrapper around apply_permissions that uses secpass and simple
1044         logic to apply as much of the permissions as possible without
1045         generating an obviously avoidable permission exception. Despite
1046         attempts to avoid an exception, it's possible that one will be raised
1047         anyway, so be prepared.
1048         Returns True if all permissions are applied and False if some are left
1049         unapplied."""
1050
1051         if stat_cached is None:
1052                 try:
1053                         if follow_links:
1054                                 stat_cached = os.stat(filename)
1055                         else:
1056                                 stat_cached = os.lstat(filename)
1057                 except OSError as oe:
1058                         func_call = "stat('%s')" % filename
1059                         if oe.errno == errno.EPERM:
1060                                 raise OperationNotPermitted(func_call)
1061                         elif oe.errno == errno.EACCES:
1062                                 raise PermissionDenied(func_call)
1063                         elif oe.errno == errno.ENOENT:
1064                                 raise FileNotFound(filename)
1065                         else:
1066                                 raise
1067
1068         all_applied = True
1069
1070         if portage.data.secpass < 2:
1071
1072                 if uid != -1 and \
1073                 uid != stat_cached.st_uid:
1074                         all_applied = False
1075                         uid = -1
1076
1077                 if gid != -1 and \
1078                 gid != stat_cached.st_gid and \
1079                 gid not in os.getgroups():
1080                         all_applied = False
1081                         gid = -1
1082
1083         apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
1084                 stat_cached=stat_cached, follow_links=follow_links)
1085         return all_applied
1086
1087 class atomic_ofstream(ObjectProxy):
1088         """Write a file atomically via os.rename().  Atomic replacement prevents
1089         interprocess interference and prevents corruption of the target
1090         file when the write is interrupted (for example, when an 'out of space'
1091         error occurs)."""
1092
1093         def __init__(self, filename, mode='w', follow_links=True, **kargs):
1094                 """Opens a temporary filename.pid in the same directory as filename."""
1095                 ObjectProxy.__init__(self)
1096                 object.__setattr__(self, '_aborted', False)
1097                 if 'b' in mode:
1098                         open_func = open
1099                 else:
1100                         open_func = io.open
1101                         kargs.setdefault('encoding', _encodings['content'])
1102                         kargs.setdefault('errors', 'backslashreplace')
1103
1104                 if follow_links:
1105                         canonical_path = os.path.realpath(filename)
1106                         object.__setattr__(self, '_real_name', canonical_path)
1107                         tmp_name = "%s.%i" % (canonical_path, os.getpid())
1108                         try:
1109                                 object.__setattr__(self, '_file',
1110                                         open_func(_unicode_encode(tmp_name,
1111                                                 encoding=_encodings['fs'], errors='strict'),
1112                                                 mode=mode, **kargs))
1113                                 return
1114                         except IOError as e:
1115                                 if canonical_path == filename:
1116                                         raise
1117                                 # Ignore this error, since it's irrelevant
1118                                 # and the below open call will produce a
1119                                 # new error if necessary.
1120
1121                 object.__setattr__(self, '_real_name', filename)
1122                 tmp_name = "%s.%i" % (filename, os.getpid())
1123                 object.__setattr__(self, '_file',
1124                         open_func(_unicode_encode(tmp_name,
1125                                 encoding=_encodings['fs'], errors='strict'),
1126                                 mode=mode, **kargs))
1127
1128         def _get_target(self):
1129                 return object.__getattribute__(self, '_file')
1130
1131         if sys.hexversion >= 0x3000000:
1132
1133                 def __getattribute__(self, attr):
1134                         if attr in ('close', 'abort', '__del__'):
1135                                 return object.__getattribute__(self, attr)
1136                         return getattr(object.__getattribute__(self, '_file'), attr)
1137
1138         else:
1139
1140                 # For TextIOWrapper, automatically coerce write calls to
1141                 # unicode, in order to avoid TypeError when writing raw
1142                 # bytes with python2.
1143
1144                 def __getattribute__(self, attr):
1145                         if attr in ('close', 'abort', 'write', '__del__'):
1146                                 return object.__getattribute__(self, attr)
1147                         return getattr(object.__getattribute__(self, '_file'), attr)
1148
1149                 def write(self, s):
1150                         f = object.__getattribute__(self, '_file')
1151                         if isinstance(f, io.TextIOWrapper):
1152                                 s = _unicode_decode(s)
1153                         return f.write(s)
1154
1155         def close(self):
1156                 """Closes the temporary file, copies permissions (if possible),
1157                 and performs the atomic replacement via os.rename().  If the abort()
1158                 method has been called, then the temp file is closed and removed."""
1159                 f = object.__getattribute__(self, '_file')
1160                 real_name = object.__getattribute__(self, '_real_name')
1161                 if not f.closed:
1162                         try:
1163                                 f.close()
1164                                 if not object.__getattribute__(self, '_aborted'):
1165                                         try:
1166                                                 apply_stat_permissions(f.name, os.stat(real_name))
1167                                         except OperationNotPermitted:
1168                                                 pass
1169                                         except FileNotFound:
1170                                                 pass
1171                                         except OSError as oe: # from the above os.stat call
1172                                                 if oe.errno in (errno.ENOENT, errno.EPERM):
1173                                                         pass
1174                                                 else:
1175                                                         raise
1176                                         os.rename(f.name, real_name)
1177                         finally:
1178                                 # Make sure we cleanup the temp file
1179                                 # even if an exception is raised.
1180                                 try:
1181                                         os.unlink(f.name)
1182                                 except OSError as oe:
1183                                         pass
1184
1185         def abort(self):
1186                 """If an error occurs while writing the file, the user should
1187                 call this method in order to leave the target file unchanged.
1188                 This will call close() automatically."""
1189                 if not object.__getattribute__(self, '_aborted'):
1190                         object.__setattr__(self, '_aborted', True)
1191                         self.close()
1192
1193         def __del__(self):
1194                 """If the user does not explicitely call close(), it is
1195                 assumed that an error has occurred, so we abort()."""
1196                 try:
1197                         f = object.__getattribute__(self, '_file')
1198                 except AttributeError:
1199                         pass
1200                 else:
1201                         if not f.closed:
1202                                 self.abort()
1203                 # ensure destructor from the base class is called
1204                 base_destructor = getattr(ObjectProxy, '__del__', None)
1205                 if base_destructor is not None:
1206                         base_destructor(self)
1207
1208 def write_atomic(file_path, content, **kwargs):
1209         f = None
1210         try:
1211                 f = atomic_ofstream(file_path, **kwargs)
1212                 f.write(content)
1213                 f.close()
1214         except (IOError, OSError) as e:
1215                 if f:
1216                         f.abort()
1217                 func_call = "write_atomic('%s')" % file_path
1218                 if e.errno == errno.EPERM:
1219                         raise OperationNotPermitted(func_call)
1220                 elif e.errno == errno.EACCES:
1221                         raise PermissionDenied(func_call)
1222                 elif e.errno == errno.EROFS:
1223                         raise ReadOnlyFileSystem(func_call)
1224                 elif e.errno == errno.ENOENT:
1225                         raise FileNotFound(file_path)
1226                 else:
1227                         raise
1228
1229 def ensure_dirs(dir_path, **kwargs):
1230         """Create a directory and call apply_permissions.
1231         Returns True if a directory is created or the permissions needed to be
1232         modified, and False otherwise.
1233
1234         This function's handling of EEXIST errors makes it useful for atomic
1235         directory creation, in which multiple processes may be competing to
1236         create the same directory.
1237         """
1238
1239         created_dir = False
1240
1241         try:
1242                 os.makedirs(dir_path)
1243                 created_dir = True
1244         except OSError as oe:
1245                 func_call = "makedirs('%s')" % dir_path
1246                 if oe.errno in (errno.EEXIST,):
1247                         pass
1248                 else:
1249                         if os.path.isdir(dir_path):
1250                                 # NOTE: DragonFly raises EPERM for makedir('/')
1251                                 # and that is supposed to be ignored here.
1252                                 # Also, sometimes mkdir raises EISDIR on FreeBSD
1253                                 # and we want to ignore that too (bug #187518).
1254                                 pass
1255                         elif oe.errno == errno.EPERM:
1256                                 raise OperationNotPermitted(func_call)
1257                         elif oe.errno == errno.EACCES:
1258                                 raise PermissionDenied(func_call)
1259                         elif oe.errno == errno.EROFS:
1260                                 raise ReadOnlyFileSystem(func_call)
1261                         else:
1262                                 raise
1263         if kwargs:
1264                 perms_modified = apply_permissions(dir_path, **kwargs)
1265         else:
1266                 perms_modified = False
1267         return created_dir or perms_modified
1268
1269 class LazyItemsDict(UserDict):
1270         """A mapping object that behaves like a standard dict except that it allows
1271         for lazy initialization of values via callable objects.  Lazy items can be
1272         overwritten and deleted just as normal items."""
1273
1274         __slots__ = ('lazy_items',)
1275
1276         def __init__(self, *args, **kwargs):
1277
1278                 self.lazy_items = {}
1279                 UserDict.__init__(self, *args, **kwargs)
1280
1281         def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1282                 """Add a lazy item for the given key.  When the item is requested,
1283                 value_callable will be called with *pargs and **kwargs arguments."""
1284                 self.lazy_items[item_key] = \
1285                         self._LazyItem(value_callable, pargs, kwargs, False)
1286                 # make it show up in self.keys(), etc...
1287                 UserDict.__setitem__(self, item_key, None)
1288
1289         def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1290                 """This is like addLazyItem except value_callable will only be called
1291                 a maximum of 1 time and the result will be cached for future requests."""
1292                 self.lazy_items[item_key] = \
1293                         self._LazyItem(value_callable, pargs, kwargs, True)
1294                 # make it show up in self.keys(), etc...
1295                 UserDict.__setitem__(self, item_key, None)
1296
1297         def update(self, *args, **kwargs):
1298                 if len(args) > 1:
1299                         raise TypeError(
1300                                 "expected at most 1 positional argument, got " + \
1301                                 repr(len(args)))
1302                 if args:
1303                         map_obj = args[0]
1304                 else:
1305                         map_obj = None
1306                 if map_obj is None:
1307                         pass
1308                 elif isinstance(map_obj, LazyItemsDict):
1309                         for k in map_obj:
1310                                 if k in map_obj.lazy_items:
1311                                         UserDict.__setitem__(self, k, None)
1312                                 else:
1313                                         UserDict.__setitem__(self, k, map_obj[k])
1314                         self.lazy_items.update(map_obj.lazy_items)
1315                 else:
1316                         UserDict.update(self, map_obj)
1317                 if kwargs:
1318                         UserDict.update(self, kwargs)
1319
1320         def __getitem__(self, item_key):
1321                 if item_key in self.lazy_items:
1322                         lazy_item = self.lazy_items[item_key]
1323                         pargs = lazy_item.pargs
1324                         if pargs is None:
1325                                 pargs = ()
1326                         kwargs = lazy_item.kwargs
1327                         if kwargs is None:
1328                                 kwargs = {}
1329                         result = lazy_item.func(*pargs, **kwargs)
1330                         if lazy_item.singleton:
1331                                 self[item_key] = result
1332                         return result
1333
1334                 else:
1335                         return UserDict.__getitem__(self, item_key)
1336
1337         def __setitem__(self, item_key, value):
1338                 if item_key in self.lazy_items:
1339                         del self.lazy_items[item_key]
1340                 UserDict.__setitem__(self, item_key, value)
1341
1342         def __delitem__(self, item_key):
1343                 if item_key in self.lazy_items:
1344                         del self.lazy_items[item_key]
1345                 UserDict.__delitem__(self, item_key)
1346
1347         def clear(self):
1348                 self.lazy_items.clear()
1349                 UserDict.clear(self)
1350
1351         def copy(self):
1352                 return self.__copy__()
1353
1354         def __copy__(self):
1355                 return self.__class__(self)
1356
1357         def __deepcopy__(self, memo=None):
1358                 """
1359                 This forces evaluation of each contained lazy item, and deepcopy of
1360                 the result. A TypeError is raised if any contained lazy item is not
1361                 a singleton, since it is not necessarily possible for the behavior
1362                 of this type of item to be safely preserved.
1363                 """
1364                 if memo is None:
1365                         memo = {}
1366                 result = self.__class__()
1367                 memo[id(self)] = result
1368                 for k in self:
1369                         k_copy = deepcopy(k, memo)
1370                         lazy_item = self.lazy_items.get(k)
1371                         if lazy_item is not None:
1372                                 if not lazy_item.singleton:
1373                                         raise TypeError(_unicode_decode("LazyItemsDict " + \
1374                                                 "deepcopy is unsafe with lazy items that are " + \
1375                                                 "not singletons: key=%s value=%s") % (k, lazy_item,))
1376                         UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
1377                 return result
1378
1379         class _LazyItem(object):
1380
1381                 __slots__ = ('func', 'pargs', 'kwargs', 'singleton')
1382
1383                 def __init__(self, func, pargs, kwargs, singleton):
1384
1385                         if not pargs:
1386                                 pargs = None
1387                         if not kwargs:
1388                                 kwargs = None
1389
1390                         self.func = func
1391                         self.pargs = pargs
1392                         self.kwargs = kwargs
1393                         self.singleton = singleton
1394
1395                 def __copy__(self):
1396                         return self.__class__(self.func, self.pargs,
1397                                 self.kwargs, self.singleton)
1398
1399                 def __deepcopy__(self, memo=None):
1400                         """
1401                         Override this since the default implementation can fail silently,
1402                         leaving some attributes unset.
1403                         """
1404                         if memo is None:
1405                                 memo = {}
1406                         result = self.__copy__()
1407                         memo[id(self)] = result
1408                         result.func = deepcopy(self.func, memo)
1409                         result.pargs = deepcopy(self.pargs, memo)
1410                         result.kwargs = deepcopy(self.kwargs, memo)
1411                         result.singleton = deepcopy(self.singleton, memo)
1412                         return result
1413
1414 class ConfigProtect(object):
1415         def __init__(self, myroot, protect_list, mask_list):
1416                 self.myroot = myroot
1417                 self.protect_list = protect_list
1418                 self.mask_list = mask_list
1419                 self.updateprotect()
1420
1421         def updateprotect(self):
1422                 """Update internal state for isprotected() calls.  Nonexistent paths
1423                 are ignored."""
1424
1425                 os = _os_merge
1426
1427                 self.protect = []
1428                 self._dirs = set()
1429                 for x in self.protect_list:
1430                         ppath = normalize_path(
1431                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1432                         try:
1433                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1434                                         self._dirs.add(ppath)
1435                                 self.protect.append(ppath)
1436                         except OSError:
1437                                 # If it doesn't exist, there's no need to protect it.
1438                                 pass
1439
1440                 self.protectmask = []
1441                 for x in self.mask_list:
1442                         ppath = normalize_path(
1443                                 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1444                         try:
1445                                 """Use lstat so that anything, even a broken symlink can be
1446                                 protected."""
1447                                 if stat.S_ISDIR(os.lstat(ppath).st_mode):
1448                                         self._dirs.add(ppath)
1449                                 self.protectmask.append(ppath)
1450                                 """Now use stat in case this is a symlink to a directory."""
1451                                 if stat.S_ISDIR(os.stat(ppath).st_mode):
1452                                         self._dirs.add(ppath)
1453                         except OSError:
1454                                 # If it doesn't exist, there's no need to mask it.
1455                                 pass
1456
1457         def isprotected(self, obj):
1458                 """Returns True if obj is protected, False otherwise.  The caller must
1459                 ensure that obj is normalized with a single leading slash.  A trailing
1460                 slash is optional for directories."""
1461                 masked = 0
1462                 protected = 0
1463                 sep = os.path.sep
1464                 for ppath in self.protect:
1465                         if len(ppath) > masked and obj.startswith(ppath):
1466                                 if ppath in self._dirs:
1467                                         if obj != ppath and not obj.startswith(ppath + sep):
1468                                                 # /etc/foo does not match /etc/foobaz
1469                                                 continue
1470                                 elif obj != ppath:
1471                                         # force exact match when CONFIG_PROTECT lists a
1472                                         # non-directory
1473                                         continue
1474                                 protected = len(ppath)
1475                                 #config file management
1476                                 for pmpath in self.protectmask:
1477                                         if len(pmpath) >= protected and obj.startswith(pmpath):
1478                                                 if pmpath in self._dirs:
1479                                                         if obj != pmpath and \
1480                                                                 not obj.startswith(pmpath + sep):
1481                                                                 # /etc/foo does not match /etc/foobaz
1482                                                                 continue
1483                                                 elif obj != pmpath:
1484                                                         # force exact match when CONFIG_PROTECT_MASK lists
1485                                                         # a non-directory
1486                                                         continue
1487                                                 #skip, it's in the mask
1488                                                 masked = len(pmpath)
1489                 return protected > masked
1490
1491 def new_protect_filename(mydest, newmd5=None, force=False):
1492         """Resolves a config-protect filename for merging, optionally
1493         using the last filename if the md5 matches. If force is True,
1494         then a new filename will be generated even if mydest does not
1495         exist yet.
1496         (dest,md5) ==> 'string'            --- path_to_target_filename
1497         (dest)     ==> ('next', 'highest') --- next_target and most-recent_target
1498         """
1499
1500         # config protection filename format:
1501         # ._cfg0000_foo
1502         # 0123456789012
1503
1504         os = _os_merge
1505
1506         prot_num = -1
1507         last_pfile = ""
1508
1509         if not force and \
1510                 not os.path.exists(mydest):
1511                 return mydest
1512
1513         real_filename = os.path.basename(mydest)
1514         real_dirname  = os.path.dirname(mydest)
1515         for pfile in os.listdir(real_dirname):
1516                 if pfile[0:5] != "._cfg":
1517                         continue
1518                 if pfile[10:] != real_filename:
1519                         continue
1520                 try:
1521                         new_prot_num = int(pfile[5:9])
1522                         if new_prot_num > prot_num:
1523                                 prot_num = new_prot_num
1524                                 last_pfile = pfile
1525                 except ValueError:
1526                         continue
1527         prot_num = prot_num + 1
1528
1529         new_pfile = normalize_path(os.path.join(real_dirname,
1530                 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename))
1531         old_pfile = normalize_path(os.path.join(real_dirname, last_pfile))
1532         if last_pfile and newmd5:
1533                 try:
1534                         last_pfile_md5 = portage.checksum._perform_md5_merge(old_pfile)
1535                 except FileNotFound:
1536                         # The file suddenly disappeared or it's a broken symlink.
1537                         pass
1538                 else:
1539                         if last_pfile_md5 == newmd5:
1540                                 return old_pfile
1541         return new_pfile
1542
1543 def find_updated_config_files(target_root, config_protect):
1544         """
1545         Return a tuple of configuration files that needs to be updated.
1546         The tuple contains lists organized like this:
1547         [ protected_dir, file_list ]
1548         If the protected config isn't a protected_dir but a procted_file, list is:
1549         [ protected_file, None ]
1550         If no configuration files needs to be updated, None is returned
1551         """
1552
1553         os = _os_merge
1554
1555         if config_protect:
1556                 # directories with some protect files in them
1557                 for x in config_protect:
1558                         files = []
1559
1560                         x = os.path.join(target_root, x.lstrip(os.path.sep))
1561                         if not os.access(x, os.W_OK):
1562                                 continue
1563                         try:
1564                                 mymode = os.lstat(x).st_mode
1565                         except OSError:
1566                                 continue
1567
1568                         if stat.S_ISLNK(mymode):
1569                                 # We want to treat it like a directory if it
1570                                 # is a symlink to an existing directory.
1571                                 try:
1572                                         real_mode = os.stat(x).st_mode
1573                                         if stat.S_ISDIR(real_mode):
1574                                                 mymode = real_mode
1575                                 except OSError:
1576                                         pass
1577
1578                         if stat.S_ISDIR(mymode):
1579                                 mycommand = \
1580                                         "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
1581                         else:
1582                                 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
1583                                                 os.path.split(x.rstrip(os.path.sep))
1584                         mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
1585                         a = subprocess_getstatusoutput(mycommand)
1586
1587                         if a[0] == 0:
1588                                 files = a[1].split('\0')
1589                                 # split always produces an empty string as the last element
1590                                 if files and not files[-1]:
1591                                         del files[-1]
1592                                 if files:
1593                                         if stat.S_ISDIR(mymode):
1594                                                 yield (x, files)
1595                                         else:
1596                                                 yield (x, None)
1597
1598 def getlibpaths(root, env=None):
1599         """ Return a list of paths that are used for library lookups """
1600         if env is None:
1601                 env = os.environ
1602         # the following is based on the information from ld.so(8)
1603         rval = env.get("LD_LIBRARY_PATH", "").split(":")
1604         rval.extend(grabfile(os.path.join(root, "etc", "ld.so.conf")))
1605         rval.append("/usr/lib")
1606         rval.append("/lib")
1607
1608         return [normalize_path(x) for x in rval if x]