1 # Copyright 2004-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 __all__ = ['apply_permissions', 'apply_recursive_permissions',
5 'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
6 'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
7 'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict',
8 'grabdict_package', 'grabfile', 'grabfile_package', 'grablines',
9 'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals',
10 'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist',
11 'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand',
12 'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout']
14 from copy import deepcopy
18 from itertools import filterfalse
20 from itertools import ifilterfalse as filterfalse
30 portage.proxy.lazyimport.lazyimport(globals(),
33 'portage.util.listdir:_ignorecvs_dirs'
36 from portage import os
37 from portage import subprocess_getstatusoutput
38 from portage import _encodings
39 from portage import _os_merge
40 from portage import _unicode_encode
41 from portage import _unicode_decode
42 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
43 OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem
44 from portage.localization import _
45 from portage.proxy.objectproxy import ObjectProxy
46 from portage.cache.mappings import UserDict
50 def initialize_logger(level=logging.WARN):
51 """Sets up basic logging of portage activities
53 level: the level to emit messages at ('info', 'debug', 'warning' ...)
57 logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
59 def writemsg(mystr,noiselevel=0,fd=None):
60 """Prints out warning and debug messages based on the noiselimit setting"""
64 if noiselevel <= noiselimit:
65 # avoid potential UnicodeEncodeError
66 if isinstance(fd, io.StringIO):
67 mystr = _unicode_decode(mystr,
68 encoding=_encodings['content'], errors='replace')
70 mystr = _unicode_encode(mystr,
71 encoding=_encodings['stdio'], errors='backslashreplace')
72 if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr):
77 def writemsg_stdout(mystr,noiselevel=0):
78 """Prints messages stdout based on the noiselimit setting"""
79 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
81 def writemsg_level(msg, level=0, noiselevel=0):
83 Show a message for the given level as defined by the logging module
84 (default is 0). When level >= logging.WARNING then the message is
85 sent to stderr, otherwise it is sent to stdout. The noiselevel is
86 passed directly to writemsg().
89 @param msg: a message string, including newline if appropriate
91 @param level: a numeric logging level (see the logging module)
93 @param noiselevel: passed directly to writemsg
95 if level >= logging.WARNING:
99 writemsg(msg, noiselevel=noiselevel, fd=fd)
101 def normalize_path(mypath):
103 os.path.normpath("//foo") returns "//foo" instead of "/foo"
104 We dislike this behavior so we create our own normpath func
107 if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes):
108 path_sep = os.path.sep.encode()
110 path_sep = os.path.sep
112 if mypath.startswith(path_sep):
113 # posixpath.normpath collapses 3 or more leading slashes to just 1.
114 return os.path.normpath(2*path_sep + mypath)
116 return os.path.normpath(mypath)
118 def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
119 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
120 begins with a #, it is ignored, as are empty lines"""
122 mylines=grablines(myfilename, recursive, remember_source_file=True)
125 for x, source_file in mylines:
126 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
129 if x and x[0] != "#":
133 mylinetemp.append(item)
138 myline = " ".join(myline)
142 # Check if we have a compat-level string. BC-integration data.
143 # '##COMPAT==>N<==' 'some string attached to it'
144 mylinetest = myline.split("<==",1)
145 if len(mylinetest) == 2:
146 myline_potential = mylinetest[1]
147 mylinetest = mylinetest[0].split("##COMPAT==>")
148 if len(mylinetest) == 2:
149 if compat_level >= int(mylinetest[1]):
150 # It's a compat line, and the key matches.
151 newlines.append(myline_potential)
155 if remember_source_file:
156 newlines.append((myline, source_file))
158 newlines.append(myline)
161 def map_dictlist_vals(func,myDict):
162 """Performs a function on each value of each key in a dictlist.
163 Returns a new dictlist."""
167 new_dl[key] = [func(x) for x in myDict[key]]
170 def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
172 Stacks an array of dict-types into one array. Optionally merging or
173 overwriting matching key/value pairs for the dict[key]->list.
174 Returns a single dict. Higher index in lists is preferenced.
177 >>> from portage.util import stack_dictlist
178 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
179 >>> {'a':'b','x':'y'}
180 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True )
182 >>> a = {'KEYWORDS':['x86','alpha']}
183 >>> b = {'KEYWORDS':['-x86']}
184 >>> print stack_dictlist( [a,b] )
185 >>> { 'KEYWORDS':['x86','alpha','-x86']}
186 >>> print stack_dictlist( [a,b], incremental=True)
187 >>> { 'KEYWORDS':['alpha'] }
188 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
189 >>> { 'KEYWORDS':['alpha'] }
191 @param original_dicts a list of (dictionary objects or None)
193 @param incremental True or false depending on whether new keys should overwrite
194 keys which already exist.
196 @param incrementals A list of items that should be incremental (-foo removes foo from
199 @param ignore_none Appears to be ignored, but probably was used long long ago.
204 for mydict in original_dicts:
208 if not y in final_dict:
211 for thing in mydict[y]:
213 if incremental or y in incrementals:
217 elif thing[:1] == '-':
219 final_dict[y].remove(thing[1:])
223 if thing not in final_dict[y]:
224 final_dict[y].append(thing)
225 if y in final_dict and not final_dict[y]:
229 def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
230 """Stacks an array of dict-types into one array. Optionally merging or
231 overwriting matching key/value pairs for the dict[key]->string.
232 Returns a single dict."""
237 for k, v in mydict.items():
238 if k in final_dict and (incremental or (k in incrementals)):
239 final_dict[k] += " " + v
244 def append_repo(atom_list, repo_name, remember_source_file=False):
246 Takes a list of valid atoms without repo spec and appends ::repo_name.
248 if remember_source_file:
249 return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
250 for atom, source in atom_list]
252 return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
253 for atom in atom_list]
255 def stack_lists(lists, incremental=1, remember_source_file=False,
256 warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
257 """Stacks an array of list-types into one array. Optionally removing
258 distinct values using '-value' notation. Higher index is preferenced.
260 all elements must be hashable."""
261 matched_removals = set()
262 unmatched_removals = {}
264 for sub_list in lists:
265 for token in sub_list:
267 if remember_source_file:
268 token, source_file = token
278 elif token[:1] == '-':
280 if ignore_repo and not "::" in token:
281 #Let -cat/pkg remove cat/pkg::repo.
283 token_slice = token[1:]
284 for atom in new_list:
285 atom_without_repo = atom
286 if atom.repo is not None:
287 # Atom.without_repo instantiates a new Atom,
288 # which is unnecessary here, so use string
289 # replacement instead.
290 atom_without_repo = \
291 atom.replace("::" + atom.repo, "", 1)
292 if atom_without_repo == token_slice:
293 to_be_removed.append(atom)
296 for atom in to_be_removed:
300 new_list.pop(token[1:])
307 (strict_warn_for_unmatched_removal or \
308 token_key not in matched_removals):
309 unmatched_removals.setdefault(source_file, set()).add(token)
311 matched_removals.add(token_key)
313 new_list[token] = source_file
315 new_list[token] = source_file
317 if warn_for_unmatched_removal:
318 for source_file, tokens in unmatched_removals.items():
320 selected = [tokens.pop(), tokens.pop(), tokens.pop()]
321 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \
322 (source_file, ", ".join(selected), len(tokens)),
325 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)),
328 if remember_source_file:
329 return list(new_list.items())
331 return list(new_list)
333 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
335 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
337 @param myfilename: file to process
338 @type myfilename: string (path)
339 @param juststrings: only return strings
340 @type juststrings: Boolean (integer)
341 @param empty: Ignore certain lines
342 @type empty: Boolean (integer)
343 @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends )
344 @type recursive: Boolean (integer)
345 @param incremental: Append to the return list, don't overwrite
346 @type incremental: Boolean (integer)
349 1. Returns the lines in a file in a dictionary, for example:
350 'sys-apps/portage x86 amd64 ppc'
352 { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
353 the line syntax is key : [list of values]
356 for x in grablines(myfilename, recursive):
357 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
365 mylinetemp.append(item)
369 if len(myline) < 2 and empty == 0:
371 if len(myline) < 1 and empty == 1:
374 newdict.setdefault(myline[0], []).extend(myline[1:])
376 newdict[myline[0]] = myline[1:]
378 for k, v in newdict.items():
379 newdict[k] = " ".join(v)
382 def read_corresponding_eapi_file(filename):
384 Read the 'eapi' file from the directory 'filename' is in.
385 Returns "0" if the file is not present or invalid.
388 eapi_file = os.path.join(os.path.dirname(filename), "eapi")
390 f = open(eapi_file, "r")
391 lines = f.readlines()
393 eapi = lines[0].rstrip("\n")
395 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
404 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
405 verify_eapi=False, eapi=None):
406 """ Does the same thing as grabdict except it validates keys
407 with isvalidatom()"""
408 pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
411 if verify_eapi and eapi is None:
412 eapi = read_corresponding_eapi_file(myfilename)
414 # We need to call keys() here in order to avoid the possibility of
415 # "RuntimeError: dictionary changed size during iteration"
416 # when an invalid atom is deleted.
418 for k, v in pkgs.items():
420 k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
421 except InvalidAtom as e:
422 writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
428 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
429 remember_source_file=False, verify_eapi=False, eapi=None):
431 pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True)
434 if verify_eapi and eapi is None:
435 eapi = read_corresponding_eapi_file(myfilename)
436 mybasename = os.path.basename(myfilename)
438 for pkg, source_file in pkgs:
440 # for packages and package.mask files
443 if pkg[:1] == '*' and mybasename == 'packages':
446 pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
447 except InvalidAtom as e:
448 writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
451 if pkg_orig == str(pkg):
452 # normal atom, so return as Atom instance
453 if remember_source_file:
454 atoms.append((pkg, source_file))
458 # atom has special prefix, so return as string
459 if remember_source_file:
460 atoms.append((pkg_orig, source_file))
462 atoms.append(pkg_orig)
465 def grablines(myfilename, recursive=0, remember_source_file=False):
467 if recursive and os.path.isdir(myfilename):
468 if os.path.basename(myfilename) in _ignorecvs_dirs:
470 dirlist = os.listdir(myfilename)
473 if not f.startswith(".") and not f.endswith("~"):
474 mylines.extend(grablines(
475 os.path.join(myfilename, f), recursive, remember_source_file))
478 myfile = io.open(_unicode_encode(myfilename,
479 encoding=_encodings['fs'], errors='strict'),
480 mode='r', encoding=_encodings['content'], errors='replace')
481 if remember_source_file:
482 mylines = [(line, myfilename) for line in myfile.readlines()]
484 mylines = myfile.readlines()
487 if e.errno == PermissionDenied.errno:
488 raise PermissionDenied(myfilename)
492 def writedict(mydict,myfilename,writekey=True):
493 """Writes out a dict to a file; writekey=0 mode doesn't write out
494 the key and assumes all values are strings, not lists."""
497 for v in mydict.values():
498 lines.append(v + "\n")
500 for k, v in mydict.items():
501 lines.append("%s %s\n" % (k, " ".join(v)))
502 write_atomic(myfilename, "".join(lines))
506 This is equivalent to shlex.split, but if the current interpreter is
507 python2, it temporarily encodes unicode strings to bytes since python2's
508 shlex.split() doesn't handle unicode strings.
510 convert_to_bytes = sys.hexversion < 0x3000000 and not isinstance(s, bytes)
512 s = _unicode_encode(s)
513 rval = shlex.split(s)
515 rval = [_unicode_decode(x) for x in rval]
518 class _tolerant_shlex(shlex.shlex):
519 def sourcehook(self, newfile):
521 return shlex.shlex.sourcehook(self, newfile)
522 except EnvironmentError as e:
523 writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
524 (self.infile, str(e)), noiselevel=-1)
525 return (newfile, io.StringIO())
527 _invalid_var_name_re = re.compile(r'^\d|\W')
529 def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
530 if isinstance(expand, dict):
531 # Some existing variable definitions have been
532 # passed in, for use in substitutions.
540 # NOTE: shlex doesn't support unicode objects with Python 2
541 # (produces spurious \0 characters).
542 if sys.hexversion < 0x3000000:
543 f = open(_unicode_encode(mycfg,
544 encoding=_encodings['fs'], errors='strict'), 'rb')
546 f = open(_unicode_encode(mycfg,
547 encoding=_encodings['fs'], errors='strict'), mode='r',
548 encoding=_encodings['content'], errors='replace')
551 if e.errno == PermissionDenied.errno:
552 raise PermissionDenied(mycfg)
553 if e.errno != errno.ENOENT:
554 writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
555 if e.errno not in (errno.EISDIR,):
562 # Workaround for avoiding a silent error in shlex that is
563 # triggered by a source statement at the end of the file
564 # without a trailing newline after the source statement.
565 if content and content[-1] != '\n':
568 # Warn about dos-style line endings since that prevents
569 # people from being able to source them with bash.
571 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
572 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
576 shlex_class = _tolerant_shlex
578 shlex_class = shlex.shlex
579 # The default shlex.sourcehook() implementation
580 # only joins relative paths when the infile
581 # attribute is properly set.
582 lex = shlex_class(content, infile=mycfg, posix=True)
583 lex.wordchars = string.digits + string.ascii_letters + \
584 "~!@#$%*_\:;?,./-+{}"
591 key = lex.get_token()
597 #unexpected end of file
598 #lex.error_leader(self.filename,lex.lineno)
600 writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
602 raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno))
607 #lex.error_leader(self.filename,lex.lineno)
609 raise Exception(_("ParseError: Invalid token "
610 "'%s' (not '='): %s: line %s") % \
611 (equ, mycfg, lex.lineno))
616 #unexpected end of file
617 #lex.error_leader(self.filename,lex.lineno)
619 writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
621 raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno))
624 key = _unicode_decode(key)
625 val = _unicode_decode(val)
627 if _invalid_var_name_re.search(key) is not None:
630 "ParseError: Invalid variable name '%s': line %s") % \
631 (key, lex.lineno - 1))
632 writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \
633 % (key, lex.lineno - 1, mycfg), noiselevel=-1)
637 mykeys[key] = varexpand(val, expand_map)
638 expand_map[key] = mykeys[key]
641 except SystemExit as e:
643 except Exception as e:
644 raise portage.exception.ParseError(str(e)+" in "+mycfg)
647 #cache expansions of constant strings
649 def varexpand(mystring, mydict=None):
652 newstring = cexpand.get(" "+mystring, None)
653 if newstring is not None:
657 new variable expansion code. Preserves quotes, handles \n, etc.
658 This code is used by the configfile code, as well as others (parser)
659 This would be a good bunch of code to port to C.
662 mystring=" "+mystring
663 #in single, double quotes
668 while (pos<len(mystring)):
669 if (mystring[pos]=="'") and (mystring[pos-1]!="\\"):
671 newstring=newstring+"'"
673 newstring += "'" # Quote removal is handled by shlex.
677 elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"):
679 newstring=newstring+'"'
681 newstring += '"' # Quote removal is handled by shlex.
687 if (mystring[pos]=="\n"):
688 #convert newlines to spaces
689 newstring=newstring+" "
691 elif (mystring[pos]=="\\"):
692 # For backslash expansion, this function used to behave like
693 # echo -e, but that's not needed for our purposes. We want to
694 # behave like bash does when expanding a variable assignment
695 # in a sourced file, in which case it performs backslash
696 # removal for \\ and \$ but nothing more. It also removes
697 # escaped newline characters. Note that we don't handle
698 # escaped quotes here, since getconfig() uses shlex
699 # to handle that earlier.
700 if (pos+1>=len(mystring)):
701 newstring=newstring+mystring[pos]
704 a = mystring[pos + 1]
707 newstring = newstring + a
711 newstring = newstring + mystring[pos-2:pos]
713 elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"):
715 if mystring[pos]=="{":
721 validchars=string.ascii_letters+string.digits+"_"
722 while mystring[pos] in validchars:
723 if (pos+1)>=len(mystring):
731 myvarname=mystring[myvstart:pos]
733 if mystring[pos]!="}":
738 if len(myvarname)==0:
742 if myvarname in mydict:
743 newstring=newstring+mydict[myvarname]
745 newstring=newstring+mystring[pos]
748 newstring=newstring+mystring[pos]
751 cexpand[mystring]=newstring[1:]
754 # broken and removed, but can still be imported
757 def pickle_read(filename,default=None,debug=0):
758 if not os.access(filename, os.R_OK):
759 writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
763 myf = open(_unicode_encode(filename,
764 encoding=_encodings['fs'], errors='strict'), 'rb')
765 mypickle = pickle.Unpickler(myf)
766 data = mypickle.load()
769 writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
770 except SystemExit as e:
772 except Exception as e:
773 writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
777 def dump_traceback(msg, noiselevel=1):
778 info = sys.exc_info()
780 stack = traceback.extract_stack()[:-1]
783 stack = traceback.extract_tb(info[2])
785 writemsg("\n====================================\n", noiselevel=noiselevel)
786 writemsg("%s\n\n" % msg, noiselevel=noiselevel)
787 for line in traceback.format_list(stack):
788 writemsg(line, noiselevel=noiselevel)
790 writemsg(error+"\n", noiselevel=noiselevel)
791 writemsg("====================================\n\n", noiselevel=noiselevel)
793 class cmp_sort_key(object):
795 In python-3.0 the list.sort() method no longer has a "cmp" keyword
796 argument. This class acts as an adapter which converts a cmp function
797 into one that's suitable for use as the "key" keyword argument to
798 list.sort(), making it easier to port code for python-3.0 compatibility.
799 It works by generating key objects which use the given cmp function to
800 implement their __lt__ method.
802 __slots__ = ("_cmp_func",)
804 def __init__(self, cmp_func):
806 @type cmp_func: callable which takes 2 positional arguments
807 @param cmp_func: A cmp function.
809 self._cmp_func = cmp_func
811 def __call__(self, lhs):
812 return self._cmp_key(self._cmp_func, lhs)
814 class _cmp_key(object):
815 __slots__ = ("_cmp_func", "_obj")
817 def __init__(self, cmp_func, obj):
818 self._cmp_func = cmp_func
821 def __lt__(self, other):
822 if other.__class__ is not self.__class__:
823 raise TypeError("Expected type %s, got %s" % \
824 (self.__class__, other.__class__))
825 return self._cmp_func(self._obj, other._obj) < 0
828 """lifted from python cookbook, credit: Tim Peters
829 Return a list of the elements in s in arbitrary order, sans duplicates"""
831 # assume all elements are hashable, if so, it's linear
837 # so much for linear. abuse sort.
849 t[lasti] = last = t[i]
854 # blah. back to original portage.unique_array
861 def unique_everseen(iterable, key=None):
863 List unique elements, preserving order. Remember all elements ever seen.
864 Taken from itertools documentation.
866 # unique_everseen('AAAABBBCCDAABBB') --> A B C D
867 # unique_everseen('ABBCcAD', str.lower) --> A B C D
871 for element in filterfalse(seen.__contains__, iterable):
875 for element in iterable:
881 def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
882 stat_cached=None, follow_links=True):
883 """Apply user, group, and mode bits to a file if the existing bits do not
884 already match. The default behavior is to force an exact match of mode
885 bits. When mask=0 is specified, mode bits on the target file are allowed
886 to be a superset of the mode argument (via logical OR). When mask>0, the
887 mode bits that the target file is allowed to have are restricted via
889 Returns True if the permissions were modified and False otherwise."""
893 if stat_cached is None:
896 stat_cached = os.stat(filename)
898 stat_cached = os.lstat(filename)
899 except OSError as oe:
900 func_call = "stat('%s')" % filename
901 if oe.errno == errno.EPERM:
902 raise OperationNotPermitted(func_call)
903 elif oe.errno == errno.EACCES:
904 raise PermissionDenied(func_call)
905 elif oe.errno == errno.ENOENT:
906 raise FileNotFound(filename)
910 if (uid != -1 and uid != stat_cached.st_uid) or \
911 (gid != -1 and gid != stat_cached.st_gid):
914 os.chown(filename, uid, gid)
916 portage.data.lchown(filename, uid, gid)
918 except OSError as oe:
919 func_call = "chown('%s', %i, %i)" % (filename, uid, gid)
920 if oe.errno == errno.EPERM:
921 raise OperationNotPermitted(func_call)
922 elif oe.errno == errno.EACCES:
923 raise PermissionDenied(func_call)
924 elif oe.errno == errno.EROFS:
925 raise ReadOnlyFileSystem(func_call)
926 elif oe.errno == errno.ENOENT:
927 raise FileNotFound(filename)
932 st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits
935 mode = 0 # Don't add any mode bits when mode is unspecified.
938 if (mode & st_mode != mode) or \
939 ((mask ^ st_mode) & st_mode != st_mode):
940 new_mode = mode | st_mode
941 new_mode = (mask ^ new_mode) & new_mode
943 mode = mode & 0o7777 # protect from unwanted bits
947 # The chown system call may clear S_ISUID and S_ISGID
948 # bits, so those bits are restored if necessary.
949 if modified and new_mode == -1 and \
950 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID):
956 new_mode = mode | st_mode
957 new_mode = (mask ^ new_mode) & new_mode
960 if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID):
963 if not follow_links and stat.S_ISLNK(stat_cached.st_mode):
964 # Mode doesn't matter for symlinks.
969 os.chmod(filename, new_mode)
971 except OSError as oe:
972 func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
973 if oe.errno == errno.EPERM:
974 raise OperationNotPermitted(func_call)
975 elif oe.errno == errno.EACCES:
976 raise PermissionDenied(func_call)
977 elif oe.errno == errno.EROFS:
978 raise ReadOnlyFileSystem(func_call)
979 elif oe.errno == errno.ENOENT:
980 raise FileNotFound(filename)
984 def apply_stat_permissions(filename, newstat, **kwargs):
985 """A wrapper around apply_secpass_permissions that gets
986 uid, gid, and mode from a stat object"""
987 return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid,
988 mode=newstat.st_mode, **kwargs)
990 def apply_recursive_permissions(top, uid=-1, gid=-1,
991 dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
992 """A wrapper around apply_secpass_permissions that applies permissions
993 recursively. If optional argument onerror is specified, it should be a
994 function; it will be called with one argument, a PortageException instance.
995 Returns True if all permissions are applied and False if some are left
998 # Avoid issues with circular symbolic links, as in bug #339670.
1002 # Default behavior is to dump errors to stderr so they won't
1003 # go unnoticed. Callers can pass in a quiet instance.
1005 if isinstance(e, OperationNotPermitted):
1006 writemsg(_("Operation Not Permitted: %s\n") % str(e),
1008 elif isinstance(e, FileNotFound):
1009 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
1014 for dirpath, dirnames, filenames in os.walk(top):
1016 applied = apply_secpass_permissions(dirpath,
1017 uid=uid, gid=gid, mode=dirmode, mask=dirmask,
1018 follow_links=follow_links)
1021 except PortageException as e:
1025 for name in filenames:
1027 applied = apply_secpass_permissions(os.path.join(dirpath, name),
1028 uid=uid, gid=gid, mode=filemode, mask=filemask,
1029 follow_links=follow_links)
1032 except PortageException as e:
1033 # Ignore InvalidLocation exceptions such as FileNotFound
1034 # and DirectoryNotFound since sometimes things disappear,
1035 # like when adjusting permissions on DISTCC_DIR.
1036 if not isinstance(e, portage.exception.InvalidLocation):
1041 def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
1042 stat_cached=None, follow_links=True):
1043 """A wrapper around apply_permissions that uses secpass and simple
1044 logic to apply as much of the permissions as possible without
1045 generating an obviously avoidable permission exception. Despite
1046 attempts to avoid an exception, it's possible that one will be raised
1047 anyway, so be prepared.
1048 Returns True if all permissions are applied and False if some are left
1051 if stat_cached is None:
1054 stat_cached = os.stat(filename)
1056 stat_cached = os.lstat(filename)
1057 except OSError as oe:
1058 func_call = "stat('%s')" % filename
1059 if oe.errno == errno.EPERM:
1060 raise OperationNotPermitted(func_call)
1061 elif oe.errno == errno.EACCES:
1062 raise PermissionDenied(func_call)
1063 elif oe.errno == errno.ENOENT:
1064 raise FileNotFound(filename)
1070 if portage.data.secpass < 2:
1073 uid != stat_cached.st_uid:
1078 gid != stat_cached.st_gid and \
1079 gid not in os.getgroups():
1083 apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
1084 stat_cached=stat_cached, follow_links=follow_links)
1087 class atomic_ofstream(ObjectProxy):
1088 """Write a file atomically via os.rename(). Atomic replacement prevents
1089 interprocess interference and prevents corruption of the target
1090 file when the write is interrupted (for example, when an 'out of space'
1093 def __init__(self, filename, mode='w', follow_links=True, **kargs):
1094 """Opens a temporary filename.pid in the same directory as filename."""
1095 ObjectProxy.__init__(self)
1096 object.__setattr__(self, '_aborted', False)
1101 kargs.setdefault('encoding', _encodings['content'])
1102 kargs.setdefault('errors', 'backslashreplace')
1105 canonical_path = os.path.realpath(filename)
1106 object.__setattr__(self, '_real_name', canonical_path)
1107 tmp_name = "%s.%i" % (canonical_path, os.getpid())
1109 object.__setattr__(self, '_file',
1110 open_func(_unicode_encode(tmp_name,
1111 encoding=_encodings['fs'], errors='strict'),
1112 mode=mode, **kargs))
1114 except IOError as e:
1115 if canonical_path == filename:
1117 # Ignore this error, since it's irrelevant
1118 # and the below open call will produce a
1119 # new error if necessary.
1121 object.__setattr__(self, '_real_name', filename)
1122 tmp_name = "%s.%i" % (filename, os.getpid())
1123 object.__setattr__(self, '_file',
1124 open_func(_unicode_encode(tmp_name,
1125 encoding=_encodings['fs'], errors='strict'),
1126 mode=mode, **kargs))
1128 def _get_target(self):
1129 return object.__getattribute__(self, '_file')
1131 if sys.hexversion >= 0x3000000:
1133 def __getattribute__(self, attr):
1134 if attr in ('close', 'abort', '__del__'):
1135 return object.__getattribute__(self, attr)
1136 return getattr(object.__getattribute__(self, '_file'), attr)
1140 # For TextIOWrapper, automatically coerce write calls to
1141 # unicode, in order to avoid TypeError when writing raw
1142 # bytes with python2.
1144 def __getattribute__(self, attr):
1145 if attr in ('close', 'abort', 'write', '__del__'):
1146 return object.__getattribute__(self, attr)
1147 return getattr(object.__getattribute__(self, '_file'), attr)
1150 f = object.__getattribute__(self, '_file')
1151 if isinstance(f, io.TextIOWrapper):
1152 s = _unicode_decode(s)
1156 """Closes the temporary file, copies permissions (if possible),
1157 and performs the atomic replacement via os.rename(). If the abort()
1158 method has been called, then the temp file is closed and removed."""
1159 f = object.__getattribute__(self, '_file')
1160 real_name = object.__getattribute__(self, '_real_name')
1164 if not object.__getattribute__(self, '_aborted'):
1166 apply_stat_permissions(f.name, os.stat(real_name))
1167 except OperationNotPermitted:
1169 except FileNotFound:
1171 except OSError as oe: # from the above os.stat call
1172 if oe.errno in (errno.ENOENT, errno.EPERM):
1176 os.rename(f.name, real_name)
1178 # Make sure we cleanup the temp file
1179 # even if an exception is raised.
1182 except OSError as oe:
1186 """If an error occurs while writing the file, the user should
1187 call this method in order to leave the target file unchanged.
1188 This will call close() automatically."""
1189 if not object.__getattribute__(self, '_aborted'):
1190 object.__setattr__(self, '_aborted', True)
1194 """If the user does not explicitely call close(), it is
1195 assumed that an error has occurred, so we abort()."""
1197 f = object.__getattribute__(self, '_file')
1198 except AttributeError:
1203 # ensure destructor from the base class is called
1204 base_destructor = getattr(ObjectProxy, '__del__', None)
1205 if base_destructor is not None:
1206 base_destructor(self)
1208 def write_atomic(file_path, content, **kwargs):
1211 f = atomic_ofstream(file_path, **kwargs)
1214 except (IOError, OSError) as e:
1217 func_call = "write_atomic('%s')" % file_path
1218 if e.errno == errno.EPERM:
1219 raise OperationNotPermitted(func_call)
1220 elif e.errno == errno.EACCES:
1221 raise PermissionDenied(func_call)
1222 elif e.errno == errno.EROFS:
1223 raise ReadOnlyFileSystem(func_call)
1224 elif e.errno == errno.ENOENT:
1225 raise FileNotFound(file_path)
1229 def ensure_dirs(dir_path, **kwargs):
1230 """Create a directory and call apply_permissions.
1231 Returns True if a directory is created or the permissions needed to be
1232 modified, and False otherwise.
1234 This function's handling of EEXIST errors makes it useful for atomic
1235 directory creation, in which multiple processes may be competing to
1236 create the same directory.
1242 os.makedirs(dir_path)
1244 except OSError as oe:
1245 func_call = "makedirs('%s')" % dir_path
1246 if oe.errno in (errno.EEXIST,):
1249 if os.path.isdir(dir_path):
1250 # NOTE: DragonFly raises EPERM for makedir('/')
1251 # and that is supposed to be ignored here.
1252 # Also, sometimes mkdir raises EISDIR on FreeBSD
1253 # and we want to ignore that too (bug #187518).
1255 elif oe.errno == errno.EPERM:
1256 raise OperationNotPermitted(func_call)
1257 elif oe.errno == errno.EACCES:
1258 raise PermissionDenied(func_call)
1259 elif oe.errno == errno.EROFS:
1260 raise ReadOnlyFileSystem(func_call)
1264 perms_modified = apply_permissions(dir_path, **kwargs)
1266 perms_modified = False
1267 return created_dir or perms_modified
1269 class LazyItemsDict(UserDict):
1270 """A mapping object that behaves like a standard dict except that it allows
1271 for lazy initialization of values via callable objects. Lazy items can be
1272 overwritten and deleted just as normal items."""
1274 __slots__ = ('lazy_items',)
1276 def __init__(self, *args, **kwargs):
1278 self.lazy_items = {}
1279 UserDict.__init__(self, *args, **kwargs)
1281 def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1282 """Add a lazy item for the given key. When the item is requested,
1283 value_callable will be called with *pargs and **kwargs arguments."""
1284 self.lazy_items[item_key] = \
1285 self._LazyItem(value_callable, pargs, kwargs, False)
1286 # make it show up in self.keys(), etc...
1287 UserDict.__setitem__(self, item_key, None)
1289 def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1290 """This is like addLazyItem except value_callable will only be called
1291 a maximum of 1 time and the result will be cached for future requests."""
1292 self.lazy_items[item_key] = \
1293 self._LazyItem(value_callable, pargs, kwargs, True)
1294 # make it show up in self.keys(), etc...
1295 UserDict.__setitem__(self, item_key, None)
1297 def update(self, *args, **kwargs):
1300 "expected at most 1 positional argument, got " + \
1308 elif isinstance(map_obj, LazyItemsDict):
1310 if k in map_obj.lazy_items:
1311 UserDict.__setitem__(self, k, None)
1313 UserDict.__setitem__(self, k, map_obj[k])
1314 self.lazy_items.update(map_obj.lazy_items)
1316 UserDict.update(self, map_obj)
1318 UserDict.update(self, kwargs)
1320 def __getitem__(self, item_key):
1321 if item_key in self.lazy_items:
1322 lazy_item = self.lazy_items[item_key]
1323 pargs = lazy_item.pargs
1326 kwargs = lazy_item.kwargs
1329 result = lazy_item.func(*pargs, **kwargs)
1330 if lazy_item.singleton:
1331 self[item_key] = result
1335 return UserDict.__getitem__(self, item_key)
1337 def __setitem__(self, item_key, value):
1338 if item_key in self.lazy_items:
1339 del self.lazy_items[item_key]
1340 UserDict.__setitem__(self, item_key, value)
1342 def __delitem__(self, item_key):
1343 if item_key in self.lazy_items:
1344 del self.lazy_items[item_key]
1345 UserDict.__delitem__(self, item_key)
1348 self.lazy_items.clear()
1349 UserDict.clear(self)
1352 return self.__copy__()
1355 return self.__class__(self)
1357 def __deepcopy__(self, memo=None):
1359 This forces evaluation of each contained lazy item, and deepcopy of
1360 the result. A TypeError is raised if any contained lazy item is not
1361 a singleton, since it is not necessarily possible for the behavior
1362 of this type of item to be safely preserved.
1366 result = self.__class__()
1367 memo[id(self)] = result
1369 k_copy = deepcopy(k, memo)
1370 lazy_item = self.lazy_items.get(k)
1371 if lazy_item is not None:
1372 if not lazy_item.singleton:
1373 raise TypeError(_unicode_decode("LazyItemsDict " + \
1374 "deepcopy is unsafe with lazy items that are " + \
1375 "not singletons: key=%s value=%s") % (k, lazy_item,))
1376 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
1379 class _LazyItem(object):
1381 __slots__ = ('func', 'pargs', 'kwargs', 'singleton')
1383 def __init__(self, func, pargs, kwargs, singleton):
1392 self.kwargs = kwargs
1393 self.singleton = singleton
1396 return self.__class__(self.func, self.pargs,
1397 self.kwargs, self.singleton)
1399 def __deepcopy__(self, memo=None):
1401 Override this since the default implementation can fail silently,
1402 leaving some attributes unset.
1406 result = self.__copy__()
1407 memo[id(self)] = result
1408 result.func = deepcopy(self.func, memo)
1409 result.pargs = deepcopy(self.pargs, memo)
1410 result.kwargs = deepcopy(self.kwargs, memo)
1411 result.singleton = deepcopy(self.singleton, memo)
1414 class ConfigProtect(object):
1415 def __init__(self, myroot, protect_list, mask_list):
1416 self.myroot = myroot
1417 self.protect_list = protect_list
1418 self.mask_list = mask_list
1419 self.updateprotect()
1421 def updateprotect(self):
1422 """Update internal state for isprotected() calls. Nonexistent paths
1429 for x in self.protect_list:
1430 ppath = normalize_path(
1431 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1433 if stat.S_ISDIR(os.stat(ppath).st_mode):
1434 self._dirs.add(ppath)
1435 self.protect.append(ppath)
1437 # If it doesn't exist, there's no need to protect it.
1440 self.protectmask = []
1441 for x in self.mask_list:
1442 ppath = normalize_path(
1443 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1445 """Use lstat so that anything, even a broken symlink can be
1447 if stat.S_ISDIR(os.lstat(ppath).st_mode):
1448 self._dirs.add(ppath)
1449 self.protectmask.append(ppath)
1450 """Now use stat in case this is a symlink to a directory."""
1451 if stat.S_ISDIR(os.stat(ppath).st_mode):
1452 self._dirs.add(ppath)
1454 # If it doesn't exist, there's no need to mask it.
1457 def isprotected(self, obj):
1458 """Returns True if obj is protected, False otherwise. The caller must
1459 ensure that obj is normalized with a single leading slash. A trailing
1460 slash is optional for directories."""
1464 for ppath in self.protect:
1465 if len(ppath) > masked and obj.startswith(ppath):
1466 if ppath in self._dirs:
1467 if obj != ppath and not obj.startswith(ppath + sep):
1468 # /etc/foo does not match /etc/foobaz
1471 # force exact match when CONFIG_PROTECT lists a
1474 protected = len(ppath)
1475 #config file management
1476 for pmpath in self.protectmask:
1477 if len(pmpath) >= protected and obj.startswith(pmpath):
1478 if pmpath in self._dirs:
1479 if obj != pmpath and \
1480 not obj.startswith(pmpath + sep):
1481 # /etc/foo does not match /etc/foobaz
1484 # force exact match when CONFIG_PROTECT_MASK lists
1487 #skip, it's in the mask
1488 masked = len(pmpath)
1489 return protected > masked
1491 def new_protect_filename(mydest, newmd5=None, force=False):
1492 """Resolves a config-protect filename for merging, optionally
1493 using the last filename if the md5 matches. If force is True,
1494 then a new filename will be generated even if mydest does not
1496 (dest,md5) ==> 'string' --- path_to_target_filename
1497 (dest) ==> ('next', 'highest') --- next_target and most-recent_target
1500 # config protection filename format:
1510 not os.path.exists(mydest):
1513 real_filename = os.path.basename(mydest)
1514 real_dirname = os.path.dirname(mydest)
1515 for pfile in os.listdir(real_dirname):
1516 if pfile[0:5] != "._cfg":
1518 if pfile[10:] != real_filename:
1521 new_prot_num = int(pfile[5:9])
1522 if new_prot_num > prot_num:
1523 prot_num = new_prot_num
1527 prot_num = prot_num + 1
1529 new_pfile = normalize_path(os.path.join(real_dirname,
1530 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename))
1531 old_pfile = normalize_path(os.path.join(real_dirname, last_pfile))
1532 if last_pfile and newmd5:
1534 last_pfile_md5 = portage.checksum._perform_md5_merge(old_pfile)
1535 except FileNotFound:
1536 # The file suddenly disappeared or it's a broken symlink.
1539 if last_pfile_md5 == newmd5:
1543 def find_updated_config_files(target_root, config_protect):
1545 Return a tuple of configuration files that needs to be updated.
1546 The tuple contains lists organized like this:
1547 [ protected_dir, file_list ]
1548 If the protected config isn't a protected_dir but a procted_file, list is:
1549 [ protected_file, None ]
1550 If no configuration files needs to be updated, None is returned
1556 # directories with some protect files in them
1557 for x in config_protect:
1560 x = os.path.join(target_root, x.lstrip(os.path.sep))
1561 if not os.access(x, os.W_OK):
1564 mymode = os.lstat(x).st_mode
1568 if stat.S_ISLNK(mymode):
1569 # We want to treat it like a directory if it
1570 # is a symlink to an existing directory.
1572 real_mode = os.stat(x).st_mode
1573 if stat.S_ISDIR(real_mode):
1578 if stat.S_ISDIR(mymode):
1580 "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
1582 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
1583 os.path.split(x.rstrip(os.path.sep))
1584 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
1585 a = subprocess_getstatusoutput(mycommand)
1588 files = a[1].split('\0')
1589 # split always produces an empty string as the last element
1590 if files and not files[-1]:
1593 if stat.S_ISDIR(mymode):
1598 def getlibpaths(root, env=None):
1599 """ Return a list of paths that are used for library lookups """
1602 # the following is based on the information from ld.so(8)
1603 rval = env.get("LD_LIBRARY_PATH", "").split(":")
1604 rval.extend(grabfile(os.path.join(root, "etc", "ld.so.conf")))
1605 rval.append("/usr/lib")
1608 return [normalize_path(x) for x in rval if x]