1 # Copyright 2004-2011 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
4 __all__ = ['apply_permissions', 'apply_recursive_permissions',
5 'apply_secpass_permissions', 'apply_stat_permissions', 'atomic_ofstream',
6 'cmp_sort_key', 'ConfigProtect', 'dump_traceback', 'ensure_dirs',
7 'find_updated_config_files', 'getconfig', 'getlibpaths', 'grabdict',
8 'grabdict_package', 'grabfile', 'grabfile_package', 'grablines',
9 'initialize_logger', 'LazyItemsDict', 'map_dictlist_vals',
10 'new_protect_filename', 'normalize_path', 'pickle_read', 'stack_dictlist',
11 'stack_dicts', 'stack_lists', 'unique_array', 'unique_everseen', 'varexpand',
12 'write_atomic', 'writedict', 'writemsg', 'writemsg_level', 'writemsg_stdout']
14 from copy import deepcopy
18 from itertools import filterfalse
20 from itertools import ifilterfalse as filterfalse
30 portage.proxy.lazyimport.lazyimport(globals(),
33 'portage.util.listdir:_ignorecvs_dirs'
36 from portage import os
37 from portage import subprocess_getstatusoutput
38 from portage import _encodings
39 from portage import _os_merge
40 from portage import _unicode_encode
41 from portage import _unicode_decode
42 from portage.exception import InvalidAtom, PortageException, FileNotFound, \
43 OperationNotPermitted, PermissionDenied, ReadOnlyFileSystem
44 from portage.localization import _
45 from portage.proxy.objectproxy import ObjectProxy
46 from portage.cache.mappings import UserDict
50 def initialize_logger(level=logging.WARN):
51 """Sets up basic logging of portage activities
53 level: the level to emit messages at ('info', 'debug', 'warning' ...)
57 logging.basicConfig(level=logging.WARN, format='[%(levelname)-4s] %(message)s')
59 def writemsg(mystr,noiselevel=0,fd=None):
60 """Prints out warning and debug messages based on the noiselimit setting"""
64 if noiselevel <= noiselimit:
65 # avoid potential UnicodeEncodeError
66 if isinstance(fd, io.StringIO):
67 mystr = _unicode_decode(mystr,
68 encoding=_encodings['content'], errors='replace')
70 mystr = _unicode_encode(mystr,
71 encoding=_encodings['stdio'], errors='backslashreplace')
72 if sys.hexversion >= 0x3000000 and fd in (sys.stdout, sys.stderr):
77 def writemsg_stdout(mystr,noiselevel=0):
78 """Prints messages stdout based on the noiselimit setting"""
79 writemsg(mystr, noiselevel=noiselevel, fd=sys.stdout)
81 def writemsg_level(msg, level=0, noiselevel=0):
83 Show a message for the given level as defined by the logging module
84 (default is 0). When level >= logging.WARNING then the message is
85 sent to stderr, otherwise it is sent to stdout. The noiselevel is
86 passed directly to writemsg().
89 @param msg: a message string, including newline if appropriate
91 @param level: a numeric logging level (see the logging module)
93 @param noiselevel: passed directly to writemsg
95 if level >= logging.WARNING:
99 writemsg(msg, noiselevel=noiselevel, fd=fd)
101 def normalize_path(mypath):
103 os.path.normpath("//foo") returns "//foo" instead of "/foo"
104 We dislike this behavior so we create our own normpath func
107 if sys.hexversion >= 0x3000000 and isinstance(mypath, bytes):
108 path_sep = os.path.sep.encode()
110 path_sep = os.path.sep
112 if mypath.startswith(path_sep):
113 # posixpath.normpath collapses 3 or more leading slashes to just 1.
114 return os.path.normpath(2*path_sep + mypath)
116 return os.path.normpath(mypath)
118 def grabfile(myfilename, compat_level=0, recursive=0, remember_source_file=False):
119 """This function grabs the lines in a file, normalizes whitespace and returns lines in a list; if a line
120 begins with a #, it is ignored, as are empty lines"""
122 mylines=grablines(myfilename, recursive, remember_source_file=True)
125 for x, source_file in mylines:
126 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
129 if x and x[0] != "#":
133 mylinetemp.append(item)
138 myline = " ".join(myline)
142 # Check if we have a compat-level string. BC-integration data.
143 # '##COMPAT==>N<==' 'some string attached to it'
144 mylinetest = myline.split("<==",1)
145 if len(mylinetest) == 2:
146 myline_potential = mylinetest[1]
147 mylinetest = mylinetest[0].split("##COMPAT==>")
148 if len(mylinetest) == 2:
149 if compat_level >= int(mylinetest[1]):
150 # It's a compat line, and the key matches.
151 newlines.append(myline_potential)
155 if remember_source_file:
156 newlines.append((myline, source_file))
158 newlines.append(myline)
161 def map_dictlist_vals(func,myDict):
162 """Performs a function on each value of each key in a dictlist.
163 Returns a new dictlist."""
167 new_dl[key] = [func(x) for x in myDict[key]]
170 def stack_dictlist(original_dicts, incremental=0, incrementals=[], ignore_none=0):
172 Stacks an array of dict-types into one array. Optionally merging or
173 overwriting matching key/value pairs for the dict[key]->list.
174 Returns a single dict. Higher index in lists is preferenced.
177 >>> from portage.util import stack_dictlist
178 >>> print stack_dictlist( [{'a':'b'},{'x':'y'}])
179 >>> {'a':'b','x':'y'}
180 >>> print stack_dictlist( [{'a':'b'},{'a':'c'}], incremental = True )
182 >>> a = {'KEYWORDS':['x86','alpha']}
183 >>> b = {'KEYWORDS':['-x86']}
184 >>> print stack_dictlist( [a,b] )
185 >>> { 'KEYWORDS':['x86','alpha','-x86']}
186 >>> print stack_dictlist( [a,b], incremental=True)
187 >>> { 'KEYWORDS':['alpha'] }
188 >>> print stack_dictlist( [a,b], incrementals=['KEYWORDS'])
189 >>> { 'KEYWORDS':['alpha'] }
191 @param original_dicts a list of (dictionary objects or None)
193 @param incremental True or false depending on whether new keys should overwrite
194 keys which already exist.
196 @param incrementals A list of items that should be incremental (-foo removes foo from
199 @param ignore_none Appears to be ignored, but probably was used long long ago.
204 for mydict in original_dicts:
208 if not y in final_dict:
211 for thing in mydict[y]:
213 if incremental or y in incrementals:
217 elif thing[:1] == '-':
219 final_dict[y].remove(thing[1:])
223 if thing not in final_dict[y]:
224 final_dict[y].append(thing)
225 if y in final_dict and not final_dict[y]:
229 def stack_dicts(dicts, incremental=0, incrementals=[], ignore_none=0):
230 """Stacks an array of dict-types into one array. Optionally merging or
231 overwriting matching key/value pairs for the dict[key]->string.
232 Returns a single dict."""
237 for k, v in mydict.items():
238 if k in final_dict and (incremental or (k in incrementals)):
239 final_dict[k] += " " + v
244 def append_repo(atom_list, repo_name, remember_source_file=False):
246 Takes a list of valid atoms without repo spec and appends ::repo_name.
248 if remember_source_file:
249 return [(Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True), source) \
250 for atom, source in atom_list]
252 return [Atom(atom + "::" + repo_name, allow_wildcard=True, allow_repo=True) \
253 for atom in atom_list]
255 def stack_lists(lists, incremental=1, remember_source_file=False,
256 warn_for_unmatched_removal=False, strict_warn_for_unmatched_removal=False, ignore_repo=False):
257 """Stacks an array of list-types into one array. Optionally removing
258 distinct values using '-value' notation. Higher index is preferenced.
260 all elements must be hashable."""
261 matched_removals = set()
262 unmatched_removals = {}
264 for sub_list in lists:
265 for token in sub_list:
267 if remember_source_file:
268 token, source_file = token
278 elif token[:1] == '-':
280 if ignore_repo and not "::" in token:
281 #Let -cat/pkg remove cat/pkg::repo.
283 token_slice = token[1:]
284 for atom in new_list:
285 atom_without_repo = atom
286 if atom.repo is not None:
287 # Atom.without_repo instantiates a new Atom,
288 # which is unnecessary here, so use string
289 # replacement instead.
290 atom_without_repo = \
291 atom.replace("::" + atom.repo, "", 1)
292 if atom_without_repo == token_slice:
293 to_be_removed.append(atom)
296 for atom in to_be_removed:
300 new_list.pop(token[1:])
307 (strict_warn_for_unmatched_removal or \
308 token_key not in matched_removals):
309 unmatched_removals.setdefault(source_file, set()).add(token)
311 matched_removals.add(token_key)
313 new_list[token] = source_file
315 new_list[token] = source_file
317 if warn_for_unmatched_removal:
318 for source_file, tokens in unmatched_removals.items():
320 selected = [tokens.pop(), tokens.pop(), tokens.pop()]
321 writemsg(_("--- Unmatched removal atoms in %s: %s and %s more\n") % \
322 (source_file, ", ".join(selected), len(tokens)),
325 writemsg(_("--- Unmatched removal atom(s) in %s: %s\n") % (source_file, ", ".join(tokens)),
328 if remember_source_file:
329 return list(new_list.items())
331 return list(new_list)
333 def grabdict(myfilename, juststrings=0, empty=0, recursive=0, incremental=1):
335 This function grabs the lines in a file, normalizes whitespace and returns lines in a dictionary
337 @param myfilename: file to process
338 @type myfilename: string (path)
339 @param juststrings: only return strings
340 @type juststrings: Boolean (integer)
341 @param empty: Ignore certain lines
342 @type empty: Boolean (integer)
343 @param recursive: Recursively grab ( support for /etc/portage/package.keywords/* and friends )
344 @type recursive: Boolean (integer)
345 @param incremental: Append to the return list, don't overwrite
346 @type incremental: Boolean (integer)
349 1. Returns the lines in a file in a dictionary, for example:
350 'sys-apps/portage x86 amd64 ppc'
352 { "sys-apps/portage" : [ 'x86', 'amd64', 'ppc' ]
353 the line syntax is key : [list of values]
356 for x in grablines(myfilename, recursive):
357 #the split/join thing removes leading and trailing whitespace, and converts any whitespace in the line
365 mylinetemp.append(item)
369 if len(myline) < 2 and empty == 0:
371 if len(myline) < 1 and empty == 1:
374 newdict.setdefault(myline[0], []).extend(myline[1:])
376 newdict[myline[0]] = myline[1:]
378 for k, v in newdict.items():
379 newdict[k] = " ".join(v)
382 def read_corresponding_eapi_file(filename):
384 Read the 'eapi' file from the directory 'filename' is in.
385 Returns "0" if the file is not present or invalid.
388 eapi_file = os.path.join(os.path.dirname(filename), "eapi")
390 f = open(eapi_file, "r")
391 lines = f.readlines()
393 eapi = lines[0].rstrip("\n")
395 writemsg(_("--- Invalid 'eapi' file (doesn't contain exactly one line): %s\n") % (eapi_file),
404 def grabdict_package(myfilename, juststrings=0, recursive=0, allow_wildcard=False, allow_repo=False,
405 verify_eapi=False, eapi=None):
406 """ Does the same thing as grabdict except it validates keys
407 with isvalidatom()"""
408 pkgs=grabdict(myfilename, juststrings, empty=1, recursive=recursive)
411 if verify_eapi and eapi is None:
412 eapi = read_corresponding_eapi_file(myfilename)
414 # We need to call keys() here in order to avoid the possibility of
415 # "RuntimeError: dictionary changed size during iteration"
416 # when an invalid atom is deleted.
418 for k, v in pkgs.items():
420 k = Atom(k, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
421 except InvalidAtom as e:
422 writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
428 def grabfile_package(myfilename, compatlevel=0, recursive=0, allow_wildcard=False, allow_repo=False,
429 remember_source_file=False, verify_eapi=False, eapi=None):
431 pkgs=grabfile(myfilename, compatlevel, recursive=recursive, remember_source_file=True)
434 if verify_eapi and eapi is None:
435 eapi = read_corresponding_eapi_file(myfilename)
436 mybasename = os.path.basename(myfilename)
438 for pkg, source_file in pkgs:
440 # for packages and package.mask files
443 if pkg[:1] == '*' and mybasename == 'packages':
446 pkg = Atom(pkg, allow_wildcard=allow_wildcard, allow_repo=allow_repo, eapi=eapi)
447 except InvalidAtom as e:
448 writemsg(_("--- Invalid atom in %s: %s\n") % (myfilename, e),
451 if pkg_orig == str(pkg):
452 # normal atom, so return as Atom instance
453 if remember_source_file:
454 atoms.append((pkg, source_file))
458 # atom has special prefix, so return as string
459 if remember_source_file:
460 atoms.append((pkg_orig, source_file))
462 atoms.append(pkg_orig)
465 def grablines(myfilename, recursive=0, remember_source_file=False):
467 if recursive and os.path.isdir(myfilename):
468 if os.path.basename(myfilename) in _ignorecvs_dirs:
470 dirlist = os.listdir(myfilename)
473 if not f.startswith(".") and not f.endswith("~"):
474 mylines.extend(grablines(
475 os.path.join(myfilename, f), recursive, remember_source_file))
478 myfile = io.open(_unicode_encode(myfilename,
479 encoding=_encodings['fs'], errors='strict'),
480 mode='r', encoding=_encodings['content'], errors='replace')
481 if remember_source_file:
482 mylines = [(line, myfilename) for line in myfile.readlines()]
484 mylines = myfile.readlines()
487 if e.errno == PermissionDenied.errno:
488 raise PermissionDenied(myfilename)
492 def writedict(mydict,myfilename,writekey=True):
493 """Writes out a dict to a file; writekey=0 mode doesn't write out
494 the key and assumes all values are strings, not lists."""
497 for v in mydict.values():
498 lines.append(v + "\n")
500 for k, v in mydict.items():
501 lines.append("%s %s\n" % (k, " ".join(v)))
502 write_atomic(myfilename, "".join(lines))
506 This is equivalent to shlex.split but it temporarily encodes unicode
507 strings to bytes since shlex.split() doesn't handle unicode strings.
509 is_unicode = sys.hexversion < 0x3000000 and isinstance(s, unicode)
511 s = _unicode_encode(s)
512 rval = shlex.split(s)
514 rval = [_unicode_decode(x) for x in rval]
517 class _tolerant_shlex(shlex.shlex):
518 def sourcehook(self, newfile):
520 return shlex.shlex.sourcehook(self, newfile)
521 except EnvironmentError as e:
522 writemsg(_("!!! Parse error in '%s': source command failed: %s\n") % \
523 (self.infile, str(e)), noiselevel=-1)
524 return (newfile, io.StringIO())
526 _invalid_var_name_re = re.compile(r'^\d|\W')
528 def getconfig(mycfg, tolerant=0, allow_sourcing=False, expand=True):
529 if isinstance(expand, dict):
530 # Some existing variable definitions have been
531 # passed in, for use in substitutions.
539 # NOTE: shlex doesn't support unicode objects with Python 2
540 # (produces spurious \0 characters).
541 if sys.hexversion < 0x3000000:
542 f = open(_unicode_encode(mycfg,
543 encoding=_encodings['fs'], errors='strict'), 'rb')
545 f = open(_unicode_encode(mycfg,
546 encoding=_encodings['fs'], errors='strict'), mode='r',
547 encoding=_encodings['content'], errors='replace')
550 if e.errno == PermissionDenied.errno:
551 raise PermissionDenied(mycfg)
552 if e.errno != errno.ENOENT:
553 writemsg("open('%s', 'r'): %s\n" % (mycfg, e), noiselevel=-1)
554 if e.errno not in (errno.EISDIR,):
561 # Workaround for avoiding a silent error in shlex that is
562 # triggered by a source statement at the end of the file
563 # without a trailing newline after the source statement.
564 if content and content[-1] != '\n':
567 # Warn about dos-style line endings since that prevents
568 # people from being able to source them with bash.
570 writemsg(("!!! " + _("Please use dos2unix to convert line endings " + \
571 "in config file: '%s'") + "\n") % mycfg, noiselevel=-1)
575 shlex_class = _tolerant_shlex
577 shlex_class = shlex.shlex
578 # The default shlex.sourcehook() implementation
579 # only joins relative paths when the infile
580 # attribute is properly set.
581 lex = shlex_class(content, infile=mycfg, posix=True)
582 lex.wordchars = string.digits + string.ascii_letters + \
583 "~!@#$%*_\:;?,./-+{}"
590 key = lex.get_token()
596 #unexpected end of file
597 #lex.error_leader(self.filename,lex.lineno)
599 writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
601 raise Exception(_("ParseError: Unexpected EOF: %s: on/before line %s") % (mycfg, lex.lineno))
606 #lex.error_leader(self.filename,lex.lineno)
608 raise Exception(_("ParseError: Invalid token "
609 "'%s' (not '='): %s: line %s") % \
610 (equ, mycfg, lex.lineno))
615 #unexpected end of file
616 #lex.error_leader(self.filename,lex.lineno)
618 writemsg(_("!!! Unexpected end of config file: variable %s\n") % key,
620 raise portage.exception.CorruptionError(_("ParseError: Unexpected EOF: %s: line %s") % (mycfg, lex.lineno))
623 key = _unicode_decode(key)
624 val = _unicode_decode(val)
626 if _invalid_var_name_re.search(key) is not None:
629 "ParseError: Invalid variable name '%s': line %s") % \
630 (key, lex.lineno - 1))
631 writemsg(_("!!! Invalid variable name '%s': line %s in %s\n") \
632 % (key, lex.lineno - 1, mycfg), noiselevel=-1)
636 mykeys[key] = varexpand(val, expand_map)
637 expand_map[key] = mykeys[key]
640 except SystemExit as e:
642 except Exception as e:
643 raise portage.exception.ParseError(str(e)+" in "+mycfg)
646 #cache expansions of constant strings
648 def varexpand(mystring, mydict=None):
651 newstring = cexpand.get(" "+mystring, None)
652 if newstring is not None:
656 new variable expansion code. Preserves quotes, handles \n, etc.
657 This code is used by the configfile code, as well as others (parser)
658 This would be a good bunch of code to port to C.
661 mystring=" "+mystring
662 #in single, double quotes
667 while (pos<len(mystring)):
668 if (mystring[pos]=="'") and (mystring[pos-1]!="\\"):
670 newstring=newstring+"'"
672 newstring += "'" # Quote removal is handled by shlex.
676 elif (mystring[pos]=='"') and (mystring[pos-1]!="\\"):
678 newstring=newstring+'"'
680 newstring += '"' # Quote removal is handled by shlex.
686 if (mystring[pos]=="\n"):
687 #convert newlines to spaces
688 newstring=newstring+" "
690 elif (mystring[pos]=="\\"):
691 # For backslash expansion, this function used to behave like
692 # echo -e, but that's not needed for our purposes. We want to
693 # behave like bash does when expanding a variable assignment
694 # in a sourced file, in which case it performs backslash
695 # removal for \\ and \$ but nothing more. It also removes
696 # escaped newline characters. Note that we don't handle
697 # escaped quotes here, since getconfig() uses shlex
698 # to handle that earlier.
699 if (pos+1>=len(mystring)):
700 newstring=newstring+mystring[pos]
703 a = mystring[pos + 1]
706 newstring = newstring + a
710 newstring = newstring + mystring[pos-2:pos]
712 elif (mystring[pos]=="$") and (mystring[pos-1]!="\\"):
714 if mystring[pos]=="{":
720 validchars=string.ascii_letters+string.digits+"_"
721 while mystring[pos] in validchars:
722 if (pos+1)>=len(mystring):
730 myvarname=mystring[myvstart:pos]
732 if mystring[pos]!="}":
737 if len(myvarname)==0:
741 if myvarname in mydict:
742 newstring=newstring+mydict[myvarname]
744 newstring=newstring+mystring[pos]
747 newstring=newstring+mystring[pos]
750 cexpand[mystring]=newstring[1:]
753 # broken and removed, but can still be imported
756 def pickle_read(filename,default=None,debug=0):
757 if not os.access(filename, os.R_OK):
758 writemsg(_("pickle_read(): File not readable. '")+filename+"'\n",1)
762 myf = open(_unicode_encode(filename,
763 encoding=_encodings['fs'], errors='strict'), 'rb')
764 mypickle = pickle.Unpickler(myf)
765 data = mypickle.load()
768 writemsg(_("pickle_read(): Loaded pickle. '")+filename+"'\n",1)
769 except SystemExit as e:
771 except Exception as e:
772 writemsg(_("!!! Failed to load pickle: ")+str(e)+"\n",1)
776 def dump_traceback(msg, noiselevel=1):
777 info = sys.exc_info()
779 stack = traceback.extract_stack()[:-1]
782 stack = traceback.extract_tb(info[2])
784 writemsg("\n====================================\n", noiselevel=noiselevel)
785 writemsg("%s\n\n" % msg, noiselevel=noiselevel)
786 for line in traceback.format_list(stack):
787 writemsg(line, noiselevel=noiselevel)
789 writemsg(error+"\n", noiselevel=noiselevel)
790 writemsg("====================================\n\n", noiselevel=noiselevel)
792 class cmp_sort_key(object):
794 In python-3.0 the list.sort() method no longer has a "cmp" keyword
795 argument. This class acts as an adapter which converts a cmp function
796 into one that's suitable for use as the "key" keyword argument to
797 list.sort(), making it easier to port code for python-3.0 compatibility.
798 It works by generating key objects which use the given cmp function to
799 implement their __lt__ method.
801 __slots__ = ("_cmp_func",)
803 def __init__(self, cmp_func):
805 @type cmp_func: callable which takes 2 positional arguments
806 @param cmp_func: A cmp function.
808 self._cmp_func = cmp_func
810 def __call__(self, lhs):
811 return self._cmp_key(self._cmp_func, lhs)
813 class _cmp_key(object):
814 __slots__ = ("_cmp_func", "_obj")
816 def __init__(self, cmp_func, obj):
817 self._cmp_func = cmp_func
820 def __lt__(self, other):
821 if other.__class__ is not self.__class__:
822 raise TypeError("Expected type %s, got %s" % \
823 (self.__class__, other.__class__))
824 return self._cmp_func(self._obj, other._obj) < 0
827 """lifted from python cookbook, credit: Tim Peters
828 Return a list of the elements in s in arbitrary order, sans duplicates"""
830 # assume all elements are hashable, if so, it's linear
836 # so much for linear. abuse sort.
848 t[lasti] = last = t[i]
853 # blah. back to original portage.unique_array
860 def unique_everseen(iterable, key=None):
862 List unique elements, preserving order. Remember all elements ever seen.
863 Taken from itertools documentation.
865 # unique_everseen('AAAABBBCCDAABBB') --> A B C D
866 # unique_everseen('ABBCcAD', str.lower) --> A B C D
870 for element in filterfalse(seen.__contains__, iterable):
874 for element in iterable:
880 def apply_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
881 stat_cached=None, follow_links=True):
882 """Apply user, group, and mode bits to a file if the existing bits do not
883 already match. The default behavior is to force an exact match of mode
884 bits. When mask=0 is specified, mode bits on the target file are allowed
885 to be a superset of the mode argument (via logical OR). When mask>0, the
886 mode bits that the target file is allowed to have are restricted via
888 Returns True if the permissions were modified and False otherwise."""
892 if stat_cached is None:
895 stat_cached = os.stat(filename)
897 stat_cached = os.lstat(filename)
898 except OSError as oe:
899 func_call = "stat('%s')" % filename
900 if oe.errno == errno.EPERM:
901 raise OperationNotPermitted(func_call)
902 elif oe.errno == errno.EACCES:
903 raise PermissionDenied(func_call)
904 elif oe.errno == errno.ENOENT:
905 raise FileNotFound(filename)
909 if (uid != -1 and uid != stat_cached.st_uid) or \
910 (gid != -1 and gid != stat_cached.st_gid):
913 os.chown(filename, uid, gid)
915 portage.data.lchown(filename, uid, gid)
917 except OSError as oe:
918 func_call = "chown('%s', %i, %i)" % (filename, uid, gid)
919 if oe.errno == errno.EPERM:
920 raise OperationNotPermitted(func_call)
921 elif oe.errno == errno.EACCES:
922 raise PermissionDenied(func_call)
923 elif oe.errno == errno.EROFS:
924 raise ReadOnlyFileSystem(func_call)
925 elif oe.errno == errno.ENOENT:
926 raise FileNotFound(filename)
931 st_mode = stat_cached.st_mode & 0o7777 # protect from unwanted bits
934 mode = 0 # Don't add any mode bits when mode is unspecified.
937 if (mode & st_mode != mode) or \
938 ((mask ^ st_mode) & st_mode != st_mode):
939 new_mode = mode | st_mode
940 new_mode = (mask ^ new_mode) & new_mode
942 mode = mode & 0o7777 # protect from unwanted bits
946 # The chown system call may clear S_ISUID and S_ISGID
947 # bits, so those bits are restored if necessary.
948 if modified and new_mode == -1 and \
949 (st_mode & stat.S_ISUID or st_mode & stat.S_ISGID):
955 new_mode = mode | st_mode
956 new_mode = (mask ^ new_mode) & new_mode
959 if not (new_mode & stat.S_ISUID or new_mode & stat.S_ISGID):
962 if not follow_links and stat.S_ISLNK(stat_cached.st_mode):
963 # Mode doesn't matter for symlinks.
968 os.chmod(filename, new_mode)
970 except OSError as oe:
971 func_call = "chmod('%s', %s)" % (filename, oct(new_mode))
972 if oe.errno == errno.EPERM:
973 raise OperationNotPermitted(func_call)
974 elif oe.errno == errno.EACCES:
975 raise PermissionDenied(func_call)
976 elif oe.errno == errno.EROFS:
977 raise ReadOnlyFileSystem(func_call)
978 elif oe.errno == errno.ENOENT:
979 raise FileNotFound(filename)
983 def apply_stat_permissions(filename, newstat, **kwargs):
984 """A wrapper around apply_secpass_permissions that gets
985 uid, gid, and mode from a stat object"""
986 return apply_secpass_permissions(filename, uid=newstat.st_uid, gid=newstat.st_gid,
987 mode=newstat.st_mode, **kwargs)
989 def apply_recursive_permissions(top, uid=-1, gid=-1,
990 dirmode=-1, dirmask=-1, filemode=-1, filemask=-1, onerror=None):
991 """A wrapper around apply_secpass_permissions that applies permissions
992 recursively. If optional argument onerror is specified, it should be a
993 function; it will be called with one argument, a PortageException instance.
994 Returns True if all permissions are applied and False if some are left
997 # Avoid issues with circular symbolic links, as in bug #339670.
1001 # Default behavior is to dump errors to stderr so they won't
1002 # go unnoticed. Callers can pass in a quiet instance.
1004 if isinstance(e, OperationNotPermitted):
1005 writemsg(_("Operation Not Permitted: %s\n") % str(e),
1007 elif isinstance(e, FileNotFound):
1008 writemsg(_("File Not Found: '%s'\n") % str(e), noiselevel=-1)
1013 for dirpath, dirnames, filenames in os.walk(top):
1015 applied = apply_secpass_permissions(dirpath,
1016 uid=uid, gid=gid, mode=dirmode, mask=dirmask,
1017 follow_links=follow_links)
1020 except PortageException as e:
1024 for name in filenames:
1026 applied = apply_secpass_permissions(os.path.join(dirpath, name),
1027 uid=uid, gid=gid, mode=filemode, mask=filemask,
1028 follow_links=follow_links)
1031 except PortageException as e:
1032 # Ignore InvalidLocation exceptions such as FileNotFound
1033 # and DirectoryNotFound since sometimes things disappear,
1034 # like when adjusting permissions on DISTCC_DIR.
1035 if not isinstance(e, portage.exception.InvalidLocation):
1040 def apply_secpass_permissions(filename, uid=-1, gid=-1, mode=-1, mask=-1,
1041 stat_cached=None, follow_links=True):
1042 """A wrapper around apply_permissions that uses secpass and simple
1043 logic to apply as much of the permissions as possible without
1044 generating an obviously avoidable permission exception. Despite
1045 attempts to avoid an exception, it's possible that one will be raised
1046 anyway, so be prepared.
1047 Returns True if all permissions are applied and False if some are left
1050 if stat_cached is None:
1053 stat_cached = os.stat(filename)
1055 stat_cached = os.lstat(filename)
1056 except OSError as oe:
1057 func_call = "stat('%s')" % filename
1058 if oe.errno == errno.EPERM:
1059 raise OperationNotPermitted(func_call)
1060 elif oe.errno == errno.EACCES:
1061 raise PermissionDenied(func_call)
1062 elif oe.errno == errno.ENOENT:
1063 raise FileNotFound(filename)
1069 if portage.data.secpass < 2:
1072 uid != stat_cached.st_uid:
1077 gid != stat_cached.st_gid and \
1078 gid not in os.getgroups():
1082 apply_permissions(filename, uid=uid, gid=gid, mode=mode, mask=mask,
1083 stat_cached=stat_cached, follow_links=follow_links)
1086 class atomic_ofstream(ObjectProxy):
1087 """Write a file atomically via os.rename(). Atomic replacement prevents
1088 interprocess interference and prevents corruption of the target
1089 file when the write is interrupted (for example, when an 'out of space'
1092 def __init__(self, filename, mode='w', follow_links=True, **kargs):
1093 """Opens a temporary filename.pid in the same directory as filename."""
1094 ObjectProxy.__init__(self)
1095 object.__setattr__(self, '_aborted', False)
1100 kargs.setdefault('encoding', _encodings['content'])
1101 kargs.setdefault('errors', 'backslashreplace')
1104 canonical_path = os.path.realpath(filename)
1105 object.__setattr__(self, '_real_name', canonical_path)
1106 tmp_name = "%s.%i" % (canonical_path, os.getpid())
1108 object.__setattr__(self, '_file',
1109 open_func(_unicode_encode(tmp_name,
1110 encoding=_encodings['fs'], errors='strict'),
1111 mode=mode, **kargs))
1113 except IOError as e:
1114 if canonical_path == filename:
1116 # Ignore this error, since it's irrelevant
1117 # and the below open call will produce a
1118 # new error if necessary.
1120 object.__setattr__(self, '_real_name', filename)
1121 tmp_name = "%s.%i" % (filename, os.getpid())
1122 object.__setattr__(self, '_file',
1123 open_func(_unicode_encode(tmp_name,
1124 encoding=_encodings['fs'], errors='strict'),
1125 mode=mode, **kargs))
1127 def _get_target(self):
1128 return object.__getattribute__(self, '_file')
1130 if sys.hexversion >= 0x3000000:
1132 def __getattribute__(self, attr):
1133 if attr in ('close', 'abort', '__del__'):
1134 return object.__getattribute__(self, attr)
1135 return getattr(object.__getattribute__(self, '_file'), attr)
1139 # For TextIOWrapper, automatically coerce write calls to
1140 # unicode, in order to avoid TypeError when writing raw
1141 # bytes with python2.
1143 def __getattribute__(self, attr):
1144 if attr in ('close', 'abort', 'write', '__del__'):
1145 return object.__getattribute__(self, attr)
1146 return getattr(object.__getattribute__(self, '_file'), attr)
1149 f = object.__getattribute__(self, '_file')
1150 if isinstance(f, io.TextIOWrapper):
1151 s = _unicode_decode(s)
1155 """Closes the temporary file, copies permissions (if possible),
1156 and performs the atomic replacement via os.rename(). If the abort()
1157 method has been called, then the temp file is closed and removed."""
1158 f = object.__getattribute__(self, '_file')
1159 real_name = object.__getattribute__(self, '_real_name')
1163 if not object.__getattribute__(self, '_aborted'):
1165 apply_stat_permissions(f.name, os.stat(real_name))
1166 except OperationNotPermitted:
1168 except FileNotFound:
1170 except OSError as oe: # from the above os.stat call
1171 if oe.errno in (errno.ENOENT, errno.EPERM):
1175 os.rename(f.name, real_name)
1177 # Make sure we cleanup the temp file
1178 # even if an exception is raised.
1181 except OSError as oe:
1185 """If an error occurs while writing the file, the user should
1186 call this method in order to leave the target file unchanged.
1187 This will call close() automatically."""
1188 if not object.__getattribute__(self, '_aborted'):
1189 object.__setattr__(self, '_aborted', True)
1193 """If the user does not explicitely call close(), it is
1194 assumed that an error has occurred, so we abort()."""
1196 f = object.__getattribute__(self, '_file')
1197 except AttributeError:
1202 # ensure destructor from the base class is called
1203 base_destructor = getattr(ObjectProxy, '__del__', None)
1204 if base_destructor is not None:
1205 base_destructor(self)
1207 def write_atomic(file_path, content, **kwargs):
1210 f = atomic_ofstream(file_path, **kwargs)
1213 except (IOError, OSError) as e:
1216 func_call = "write_atomic('%s')" % file_path
1217 if e.errno == errno.EPERM:
1218 raise OperationNotPermitted(func_call)
1219 elif e.errno == errno.EACCES:
1220 raise PermissionDenied(func_call)
1221 elif e.errno == errno.EROFS:
1222 raise ReadOnlyFileSystem(func_call)
1223 elif e.errno == errno.ENOENT:
1224 raise FileNotFound(file_path)
1228 def ensure_dirs(dir_path, **kwargs):
1229 """Create a directory and call apply_permissions.
1230 Returns True if a directory is created or the permissions needed to be
1231 modified, and False otherwise.
1233 This function's handling of EEXIST errors makes it useful for atomic
1234 directory creation, in which multiple processes may be competing to
1235 create the same directory.
1241 os.makedirs(dir_path)
1243 except OSError as oe:
1244 func_call = "makedirs('%s')" % dir_path
1245 if oe.errno in (errno.EEXIST,):
1248 if os.path.isdir(dir_path):
1249 # NOTE: DragonFly raises EPERM for makedir('/')
1250 # and that is supposed to be ignored here.
1251 # Also, sometimes mkdir raises EISDIR on FreeBSD
1252 # and we want to ignore that too (bug #187518).
1254 elif oe.errno == errno.EPERM:
1255 raise OperationNotPermitted(func_call)
1256 elif oe.errno == errno.EACCES:
1257 raise PermissionDenied(func_call)
1258 elif oe.errno == errno.EROFS:
1259 raise ReadOnlyFileSystem(func_call)
1263 perms_modified = apply_permissions(dir_path, **kwargs)
1265 perms_modified = False
1266 return created_dir or perms_modified
1268 class LazyItemsDict(UserDict):
1269 """A mapping object that behaves like a standard dict except that it allows
1270 for lazy initialization of values via callable objects. Lazy items can be
1271 overwritten and deleted just as normal items."""
1273 __slots__ = ('lazy_items',)
1275 def __init__(self, *args, **kwargs):
1277 self.lazy_items = {}
1278 UserDict.__init__(self, *args, **kwargs)
1280 def addLazyItem(self, item_key, value_callable, *pargs, **kwargs):
1281 """Add a lazy item for the given key. When the item is requested,
1282 value_callable will be called with *pargs and **kwargs arguments."""
1283 self.lazy_items[item_key] = \
1284 self._LazyItem(value_callable, pargs, kwargs, False)
1285 # make it show up in self.keys(), etc...
1286 UserDict.__setitem__(self, item_key, None)
1288 def addLazySingleton(self, item_key, value_callable, *pargs, **kwargs):
1289 """This is like addLazyItem except value_callable will only be called
1290 a maximum of 1 time and the result will be cached for future requests."""
1291 self.lazy_items[item_key] = \
1292 self._LazyItem(value_callable, pargs, kwargs, True)
1293 # make it show up in self.keys(), etc...
1294 UserDict.__setitem__(self, item_key, None)
1296 def update(self, *args, **kwargs):
1299 "expected at most 1 positional argument, got " + \
1307 elif isinstance(map_obj, LazyItemsDict):
1309 if k in map_obj.lazy_items:
1310 UserDict.__setitem__(self, k, None)
1312 UserDict.__setitem__(self, k, map_obj[k])
1313 self.lazy_items.update(map_obj.lazy_items)
1315 UserDict.update(self, map_obj)
1317 UserDict.update(self, kwargs)
1319 def __getitem__(self, item_key):
1320 if item_key in self.lazy_items:
1321 lazy_item = self.lazy_items[item_key]
1322 pargs = lazy_item.pargs
1325 kwargs = lazy_item.kwargs
1328 result = lazy_item.func(*pargs, **kwargs)
1329 if lazy_item.singleton:
1330 self[item_key] = result
1334 return UserDict.__getitem__(self, item_key)
1336 def __setitem__(self, item_key, value):
1337 if item_key in self.lazy_items:
1338 del self.lazy_items[item_key]
1339 UserDict.__setitem__(self, item_key, value)
1341 def __delitem__(self, item_key):
1342 if item_key in self.lazy_items:
1343 del self.lazy_items[item_key]
1344 UserDict.__delitem__(self, item_key)
1347 self.lazy_items.clear()
1348 UserDict.clear(self)
1351 return self.__copy__()
1354 return self.__class__(self)
1356 def __deepcopy__(self, memo=None):
1358 This forces evaluation of each contained lazy item, and deepcopy of
1359 the result. A TypeError is raised if any contained lazy item is not
1360 a singleton, since it is not necessarily possible for the behavior
1361 of this type of item to be safely preserved.
1365 result = self.__class__()
1366 memo[id(self)] = result
1368 k_copy = deepcopy(k, memo)
1369 lazy_item = self.lazy_items.get(k)
1370 if lazy_item is not None:
1371 if not lazy_item.singleton:
1372 raise TypeError(_unicode_decode("LazyItemsDict " + \
1373 "deepcopy is unsafe with lazy items that are " + \
1374 "not singletons: key=%s value=%s") % (k, lazy_item,))
1375 UserDict.__setitem__(result, k_copy, deepcopy(self[k], memo))
1378 class _LazyItem(object):
1380 __slots__ = ('func', 'pargs', 'kwargs', 'singleton')
1382 def __init__(self, func, pargs, kwargs, singleton):
1391 self.kwargs = kwargs
1392 self.singleton = singleton
1395 return self.__class__(self.func, self.pargs,
1396 self.kwargs, self.singleton)
1398 def __deepcopy__(self, memo=None):
1400 Override this since the default implementation can fail silently,
1401 leaving some attributes unset.
1405 result = self.__copy__()
1406 memo[id(self)] = result
1407 result.func = deepcopy(self.func, memo)
1408 result.pargs = deepcopy(self.pargs, memo)
1409 result.kwargs = deepcopy(self.kwargs, memo)
1410 result.singleton = deepcopy(self.singleton, memo)
1413 class ConfigProtect(object):
1414 def __init__(self, myroot, protect_list, mask_list):
1415 self.myroot = myroot
1416 self.protect_list = protect_list
1417 self.mask_list = mask_list
1418 self.updateprotect()
1420 def updateprotect(self):
1421 """Update internal state for isprotected() calls. Nonexistent paths
1428 for x in self.protect_list:
1429 ppath = normalize_path(
1430 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1432 if stat.S_ISDIR(os.stat(ppath).st_mode):
1433 self._dirs.add(ppath)
1434 self.protect.append(ppath)
1436 # If it doesn't exist, there's no need to protect it.
1439 self.protectmask = []
1440 for x in self.mask_list:
1441 ppath = normalize_path(
1442 os.path.join(self.myroot, x.lstrip(os.path.sep)))
1444 """Use lstat so that anything, even a broken symlink can be
1446 if stat.S_ISDIR(os.lstat(ppath).st_mode):
1447 self._dirs.add(ppath)
1448 self.protectmask.append(ppath)
1449 """Now use stat in case this is a symlink to a directory."""
1450 if stat.S_ISDIR(os.stat(ppath).st_mode):
1451 self._dirs.add(ppath)
1453 # If it doesn't exist, there's no need to mask it.
1456 def isprotected(self, obj):
1457 """Returns True if obj is protected, False otherwise. The caller must
1458 ensure that obj is normalized with a single leading slash. A trailing
1459 slash is optional for directories."""
1463 for ppath in self.protect:
1464 if len(ppath) > masked and obj.startswith(ppath):
1465 if ppath in self._dirs:
1466 if obj != ppath and not obj.startswith(ppath + sep):
1467 # /etc/foo does not match /etc/foobaz
1470 # force exact match when CONFIG_PROTECT lists a
1473 protected = len(ppath)
1474 #config file management
1475 for pmpath in self.protectmask:
1476 if len(pmpath) >= protected and obj.startswith(pmpath):
1477 if pmpath in self._dirs:
1478 if obj != pmpath and \
1479 not obj.startswith(pmpath + sep):
1480 # /etc/foo does not match /etc/foobaz
1483 # force exact match when CONFIG_PROTECT_MASK lists
1486 #skip, it's in the mask
1487 masked = len(pmpath)
1488 return protected > masked
1490 def new_protect_filename(mydest, newmd5=None, force=False):
1491 """Resolves a config-protect filename for merging, optionally
1492 using the last filename if the md5 matches. If force is True,
1493 then a new filename will be generated even if mydest does not
1495 (dest,md5) ==> 'string' --- path_to_target_filename
1496 (dest) ==> ('next', 'highest') --- next_target and most-recent_target
1499 # config protection filename format:
1509 not os.path.exists(mydest):
1512 real_filename = os.path.basename(mydest)
1513 real_dirname = os.path.dirname(mydest)
1514 for pfile in os.listdir(real_dirname):
1515 if pfile[0:5] != "._cfg":
1517 if pfile[10:] != real_filename:
1520 new_prot_num = int(pfile[5:9])
1521 if new_prot_num > prot_num:
1522 prot_num = new_prot_num
1526 prot_num = prot_num + 1
1528 new_pfile = normalize_path(os.path.join(real_dirname,
1529 "._cfg" + str(prot_num).zfill(4) + "_" + real_filename))
1530 old_pfile = normalize_path(os.path.join(real_dirname, last_pfile))
1531 if last_pfile and newmd5:
1533 last_pfile_md5 = portage.checksum._perform_md5_merge(old_pfile)
1534 except FileNotFound:
1535 # The file suddenly disappeared or it's a broken symlink.
1538 if last_pfile_md5 == newmd5:
1542 def find_updated_config_files(target_root, config_protect):
1544 Return a tuple of configuration files that needs to be updated.
1545 The tuple contains lists organized like this:
1546 [ protected_dir, file_list ]
1547 If the protected config isn't a protected_dir but a procted_file, list is:
1548 [ protected_file, None ]
1549 If no configuration files needs to be updated, None is returned
1555 # directories with some protect files in them
1556 for x in config_protect:
1559 x = os.path.join(target_root, x.lstrip(os.path.sep))
1560 if not os.access(x, os.W_OK):
1563 mymode = os.lstat(x).st_mode
1567 if stat.S_ISLNK(mymode):
1568 # We want to treat it like a directory if it
1569 # is a symlink to an existing directory.
1571 real_mode = os.stat(x).st_mode
1572 if stat.S_ISDIR(real_mode):
1577 if stat.S_ISDIR(mymode):
1579 "find '%s' -name '.*' -type d -prune -o -name '._cfg????_*'" % x
1581 mycommand = "find '%s' -maxdepth 1 -name '._cfg????_%s'" % \
1582 os.path.split(x.rstrip(os.path.sep))
1583 mycommand += " ! -name '.*~' ! -iname '.*.bak' -print0"
1584 a = subprocess_getstatusoutput(mycommand)
1587 files = a[1].split('\0')
1588 # split always produces an empty string as the last element
1589 if files and not files[-1]:
1592 if stat.S_ISDIR(mymode):
1597 def getlibpaths(root, env=None):
1598 """ Return a list of paths that are used for library lookups """
1601 # the following is based on the information from ld.so(8)
1602 rval = env.get("LD_LIBRARY_PATH", "").split(":")
1603 rval.extend(grabfile(os.path.join(root, "etc", "ld.so.conf")))
1604 rval.append("/usr/lib")
1607 return [normalize_path(x) for x in rval if x]