From: dol-sen Date: Sun, 24 Apr 2011 22:24:01 +0000 (-0700) Subject: convert to gentoolkit's standard tab indent. X-Git-Tag: gentoolkit-0.3.0.5~46 X-Git-Url: http://git.tremily.us/?a=commitdiff_plain;h=f5f252ee461ee514bd13a4025ca47b268212ddb6;p=gentoolkit.git convert to gentoolkit's standard tab indent. --- diff --git a/pym/gentoolkit/revdep_rebuild/analyse.py b/pym/gentoolkit/revdep_rebuild/analyse.py index c1c755b..a9ac753 100644 --- a/pym/gentoolkit/revdep_rebuild/analyse.py +++ b/pym/gentoolkit/revdep_rebuild/analyse.py @@ -16,179 +16,179 @@ from settings import SETTINGS def prepare_checks(files_to_check, libraries, bits): - ''' Calls scanelf for all files_to_check, then returns found libraries and dependencies - ''' - - libs = [] # libs found by scanelf - dependencies = [] # list of lists of files (from file_to_check) that uses - # library (for dependencies[id] and libs[id] => id==id) - - for line in scan(['-M', str(bits), '-nBF', '%F %n'], files_to_check, SETTINGS['CMD_MAX_ARGS']): - #call_program(['scanelf', '-M', str(bits), '-nBF', '%F %n',]+files_to_check).strip().split('\n'): - r = line.strip().split(' ') - if len(r) < 2: # no dependencies? - continue - - deps = r[1].split(',') - for d in deps: - if d in libs: - i = libs.index(d) - dependencies[i].append(r[0]) - else: - libs.append(d) - dependencies.append([r[0],]) - return (libs, dependencies) + ''' Calls scanelf for all files_to_check, then returns found libraries and dependencies + ''' + + libs = [] # libs found by scanelf + dependencies = [] # list of lists of files (from file_to_check) that uses + # library (for dependencies[id] and libs[id] => id==id) + + for line in scan(['-M', str(bits), '-nBF', '%F %n'], files_to_check, SETTINGS['CMD_MAX_ARGS']): + #call_program(['scanelf', '-M', str(bits), '-nBF', '%F %n',]+files_to_check).strip().split('\n'): + r = line.strip().split(' ') + if len(r) < 2: # no dependencies? + continue + + deps = r[1].split(',') + for d in deps: + if d in libs: + i = libs.index(d) + dependencies[i].append(r[0]) + else: + libs.append(d) + dependencies.append([r[0],]) + return (libs, dependencies) def extract_dependencies_from_la(la, libraries, to_check, logger): - broken = [] - for f in la: - if not os.path.exists(f): - continue - - for line in open(f, 'r').readlines(): - line = line.strip() - if line.startswith('dependency_libs='): - m = re.match("dependency_libs='([^']+)'", line) - if m is not None: - for el in m.group(1).split(' '): - el = el.strip() - if len(el) < 1 or el.startswith('-'): - continue - - if el in la or el in libraries: - pass - else: - if to_check: - _break = False - for tc in to_check: - if tc in el: - _break = True - break - if not _break: - continue - - logger.info(yellow(' * ') + f + ' is broken (requires: ' + bold(el)) - broken.append(f) - return broken + broken = [] + for f in la: + if not os.path.exists(f): + continue + + for line in open(f, 'r').readlines(): + line = line.strip() + if line.startswith('dependency_libs='): + m = re.match("dependency_libs='([^']+)'", line) + if m is not None: + for el in m.group(1).split(' '): + el = el.strip() + if len(el) < 1 or el.startswith('-'): + continue + + if el in la or el in libraries: + pass + else: + if to_check: + _break = False + for tc in to_check: + if tc in el: + _break = True + break + if not _break: + continue + + logger.info(yellow(' * ') + f + ' is broken (requires: ' + bold(el)) + broken.append(f) + return broken def find_broken(found_libs, system_libraries, to_check): - ''' Search for broken libraries. - Check if system_libraries contains found_libs, where - system_libraries is list of obsolute pathes and found_libs - is list of library names. - ''' + ''' Search for broken libraries. + Check if system_libraries contains found_libs, where + system_libraries is list of obsolute pathes and found_libs + is list of library names. + ''' - # join libraries and looking at it as string is way too faster than for-jumping + # join libraries and looking at it as string is way too faster than for-jumping - broken = [] - sl = '|'.join(system_libraries) + broken = [] + sl = '|'.join(system_libraries) - if not to_check: - for f in found_libs: - if f+'|' not in sl: - broken.append(found_libs.index(f)) - else: - for tc in to_check: - for f in found_libs: - if tc in f:# and f+'|' not in sl: - broken.append(found_libs.index(f)) + if not to_check: + for f in found_libs: + if f+'|' not in sl: + broken.append(found_libs.index(f)) + else: + for tc in to_check: + for f in found_libs: + if tc in f:# and f+'|' not in sl: + broken.append(found_libs.index(f)) - return broken + return broken def main_checks(found_libs, broken, dependencies, logger): - ''' Checks for broken dependencies. - found_libs have to be the same as returned by prepare_checks - broken is list of libraries found by scanelf - dependencies is the value returned by prepare_checks - ''' + ''' Checks for broken dependencies. + found_libs have to be the same as returned by prepare_checks + broken is list of libraries found by scanelf + dependencies is the value returned by prepare_checks + ''' - broken_pathes = [] + broken_pathes = [] - for b in broken: - f = found_libs[b] - logger.info('Broken files that requires: ' + bold(f)) - for d in dependencies[b]: - logger.info(yellow(' * ') + d) - broken_pathes.append(d) - return broken_pathes + for b in broken: + f = found_libs[b] + logger.info('Broken files that requires: ' + bold(f)) + for d in dependencies[b]: + logger.info(yellow(' * ') + d) + broken_pathes.append(d) + return broken_pathes def analyse(logger=logging, libraries=None, la_libraries=None, libraries_links=None, binaries=None, _libs_to_check=set()): - """Main program body. It will collect all info and determine the - pkgs needing rebuilding. + """Main program body. It will collect all info and determine the + pkgs needing rebuilding. - @param logger: logger used for logging messages, instance of logging.Logger - class. Can be logging (RootLogger). - @param _libs_to_check Libraries that need to be checked only - @rtype list: list of pkgs that need rebuilding - """ + @param logger: logger used for logging messages, instance of logging.Logger + class. Can be logging (RootLogger). + @param _libs_to_check Libraries that need to be checked only + @rtype list: list of pkgs that need rebuilding + """ - if libraries and la_libraries and libraries_links and binaries: - logger.info(blue(' * ') + bold('Found a valid cache, skipping collecting phase')) - else: - #TODO: add partial cache (for ex. only libraries) when found for some reason + if libraries and la_libraries and libraries_links and binaries: + logger.info(blue(' * ') + bold('Found a valid cache, skipping collecting phase')) + else: + #TODO: add partial cache (for ex. only libraries) when found for some reason - logger.warn(green(' * ') + bold('Collecting system binaries and libraries')) - bin_dirs, lib_dirs = prepare_search_dirs(logger) + logger.warn(green(' * ') + bold('Collecting system binaries and libraries')) + bin_dirs, lib_dirs = prepare_search_dirs(logger) - masked_dirs, masked_files, ld = parse_revdep_config() - lib_dirs = lib_dirs.union(ld) - bin_dirs = bin_dirs.union(ld) - masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) + masked_dirs, masked_files, ld = parse_revdep_config() + lib_dirs = lib_dirs.union(ld) + bin_dirs = bin_dirs.union(ld) + masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) - logger.info(green(' * ') + bold('Collecting dynamic linking informations')) - libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs, logger) - binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logger) + logger.info(green(' * ') + bold('Collecting dynamic linking informations')) + libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs, logger) + binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logger) - if SETTINGS['USE_TMP_FILES']: - save_cache(to_save={'libraries':libraries, 'la_libraries':la_libraries, 'libraries_links':libraries_links, 'binaries':binaries}) + if SETTINGS['USE_TMP_FILES']: + save_cache(to_save={'libraries':libraries, 'la_libraries':la_libraries, 'libraries_links':libraries_links, 'binaries':binaries}) - logger.debug('Found '+ str(len(libraries)) + ' libraries (+' + str(len(libraries_links)) + ' symlinks) and ' + str(len(binaries)) + ' binaries') + logger.debug('Found '+ str(len(libraries)) + ' libraries (+' + str(len(libraries_links)) + ' symlinks) and ' + str(len(binaries)) + ' binaries') - logger.warn(green(' * ') + bold('Checking dynamic linking consistency')) - logger.debug('Search for ' + str(len(binaries)+len(libraries)) + ' within ' + str(len(libraries)+len(libraries_links))) - libs_and_bins = libraries+binaries + logger.warn(green(' * ') + bold('Checking dynamic linking consistency')) + logger.debug('Search for ' + str(len(binaries)+len(libraries)) + ' within ' + str(len(libraries)+len(libraries_links))) + libs_and_bins = libraries+binaries - #l = [] - #for line in call_program(['scanelf', '-M', '64', '-BF', '%F',] + libraries).strip().split('\n'): - #l.append(line) - #libraries = l + #l = [] + #for line in call_program(['scanelf', '-M', '64', '-BF', '%F',] + libraries).strip().split('\n'): + #l.append(line) + #libraries = l - found_libs = [] - dependencies = [] + found_libs = [] + dependencies = [] - _bits, linkg = platform.architecture() - if _bits.startswith('32'): - bits = 32 - elif _bits.startswith('64'): - bits = 64 + _bits, linkg = platform.architecture() + if _bits.startswith('32'): + bits = 32 + elif _bits.startswith('64'): + bits = 64 - for av_bits in glob.glob('/lib[0-9]*') or ('/lib32',): - bits = int(av_bits[4:]) - _libraries = scan(['-M', str(bits), '-BF', '%F'], libraries+libraries_links, SETTINGS['CMD_MAX_ARGS']) - #call_program(['scanelf', '-M', str(bits), '-BF', '%F',] + libraries+libraries_links).strip().split('\n') + for av_bits in glob.glob('/lib[0-9]*') or ('/lib32',): + bits = int(av_bits[4:]) + _libraries = scan(['-M', str(bits), '-BF', '%F'], libraries+libraries_links, SETTINGS['CMD_MAX_ARGS']) + #call_program(['scanelf', '-M', str(bits), '-BF', '%F',] + libraries+libraries_links).strip().split('\n') - found_libs, dependencies = prepare_checks(libs_and_bins, _libraries, bits) + found_libs, dependencies = prepare_checks(libs_and_bins, _libraries, bits) - broken = find_broken(found_libs, _libraries, _libs_to_check) - broken_la = extract_dependencies_from_la(la_libraries, _libraries, _libs_to_check, logger) + broken = find_broken(found_libs, _libraries, _libs_to_check) + broken_la = extract_dependencies_from_la(la_libraries, _libraries, _libs_to_check, logger) - bits /= 2 - bits = int(bits) + bits /= 2 + bits = int(bits) - broken_pathes = main_checks(found_libs, broken, dependencies, logger) - broken_pathes += broken_la + broken_pathes = main_checks(found_libs, broken, dependencies, logger) + broken_pathes += broken_la - logger.warn(green(' * ') + bold('Assign files to packages')) + logger.warn(green(' * ') + bold('Assign files to packages')) - return assign_packages(broken_pathes, logger) + return assign_packages(broken_pathes, logger) if __name__ == '__main__': - print "This script shouldn't be called directly" + print "This script shouldn't be called directly" diff --git a/pym/gentoolkit/revdep_rebuild/assign.py b/pym/gentoolkit/revdep_rebuild/assign.py index 6dcf6e3..c4ae750 100644 --- a/pym/gentoolkit/revdep_rebuild/assign.py +++ b/pym/gentoolkit/revdep_rebuild/assign.py @@ -10,84 +10,84 @@ from settings import SETTINGS def assign_packages(broken, logger=logging): - ''' Finds and returns packages that owns files placed in broken. - Broken is list of files - ''' - assigned = set() - for group in os.listdir(SETTINGS['PKG_DIR']): - for pkg in os.listdir(SETTINGS['PKG_DIR'] + group): - f = SETTINGS['PKG_DIR'] + group + '/' + pkg + '/CONTENTS' - if os.path.exists(f): - try: - with open(f, 'r') as cnt: - for line in cnt.readlines(): - m = re.match('^obj (/[^ ]+)', line) - if m is not None: - m = m.group(1) - if m in broken: - found = group+'/'+pkg - if found not in assigned: - assigned.add(found) - logger.info('\t' + m + ' -> ' + bold(found)) - except Exception as e: - logger.warn(red(' !! Failed to read ' + f)) - - return assigned + ''' Finds and returns packages that owns files placed in broken. + Broken is list of files + ''' + assigned = set() + for group in os.listdir(SETTINGS['PKG_DIR']): + for pkg in os.listdir(SETTINGS['PKG_DIR'] + group): + f = SETTINGS['PKG_DIR'] + group + '/' + pkg + '/CONTENTS' + if os.path.exists(f): + try: + with open(f, 'r') as cnt: + for line in cnt.readlines(): + m = re.match('^obj (/[^ ]+)', line) + if m is not None: + m = m.group(1) + if m in broken: + found = group+'/'+pkg + if found not in assigned: + assigned.add(found) + logger.info('\t' + m + ' -> ' + bold(found)) + except Exception as e: + logger.warn(red(' !! Failed to read ' + f)) + + return assigned def get_best_match(cpv, cp, logger): - """Tries to find another version of the pkg with the same slot - as the deprecated installed version. Failing that attempt to get any version - of the same app - - @param cpv: string - @param cp: string - @rtype tuple: ([cpv,...], SLOT) - """ - - slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"]) - logger.warn(yellow('Warning: ebuild "' + cpv + '" not found.')) - logger.info('Looking for %s:%s' %(cp, slot)) - try: - m = portdb.match('%s:%s' %(cp, slot)) - except portage.exception.InvalidAtom: - m = None - - if not m: - logger.warn(red('!!') + ' ' + yellow('Could not find ebuild for %s:%s' %(cp, slot))) - slot = [''] - m = portdb.match(cp) - if not m: - logger.warn(red('!!') + ' ' + yellow('Could not find ebuild for ' + cp)) - return m, slot + """Tries to find another version of the pkg with the same slot + as the deprecated installed version. Failing that attempt to get any version + of the same app + + @param cpv: string + @param cp: string + @rtype tuple: ([cpv,...], SLOT) + """ + + slot = portage.db[portage.root]["vartree"].dbapi.aux_get(cpv, ["SLOT"]) + logger.warn(yellow('Warning: ebuild "' + cpv + '" not found.')) + logger.info('Looking for %s:%s' %(cp, slot)) + try: + m = portdb.match('%s:%s' %(cp, slot)) + except portage.exception.InvalidAtom: + m = None + + if not m: + logger.warn(red('!!') + ' ' + yellow('Could not find ebuild for %s:%s' %(cp, slot))) + slot = [''] + m = portdb.match(cp) + if not m: + logger.warn(red('!!') + ' ' + yellow('Could not find ebuild for ' + cp)) + return m, slot def get_slotted_cps(cpvs, logger): - """Uses portage to reduce the cpv list into a cp:slot list and returns it - """ - from portage.versions import catpkgsplit - from portage import portdb + """Uses portage to reduce the cpv list into a cp:slot list and returns it + """ + from portage.versions import catpkgsplit + from portage import portdb - cps = [] - for cpv in cpvs: - parts = catpkgsplit(cpv) - cp = parts[0] + '/' + parts[1] - try: - slot = portdb.aux_get(cpv, ["SLOT"]) - except KeyError: - m, slot = get_best_match(cpv, cp, logger) - if not m: - logger.warn(red("Installed package: %s is no longer available" %cp)) - continue + cps = [] + for cpv in cpvs: + parts = catpkgsplit(cpv) + cp = parts[0] + '/' + parts[1] + try: + slot = portdb.aux_get(cpv, ["SLOT"]) + except KeyError: + m, slot = get_best_match(cpv, cp, logger) + if not m: + logger.warn(red("Installed package: %s is no longer available" %cp)) + continue - if slot[0]: - cps.append(cp + ":" + slot[0]) - else: - cps.append(cp) + if slot[0]: + cps.append(cp + ":" + slot[0]) + else: + cps.append(cp) - return cps + return cps if __name__ == '__main__': - print 'Nothing to call here' + print 'Nothing to call here' diff --git a/pym/gentoolkit/revdep_rebuild/cache.py b/pym/gentoolkit/revdep_rebuild/cache.py index 421e22d..616ef79 100644 --- a/pym/gentoolkit/revdep_rebuild/cache.py +++ b/pym/gentoolkit/revdep_rebuild/cache.py @@ -8,96 +8,96 @@ from settings import SETTINGS def read_cache(temp_path=SETTINGS['DEFAULT_TMP_DIR']): - ''' Reads cache information needed by analyse function. - This function does not checks if files exists nor timestamps, - check_temp_files should be called first - @param temp_path: directory where all temp files should reside - @return tuple with values of: libraries, la_libraries, libraries_links, symlink_pairs, binaries - ''' - - ret = {'libraries':[], 'la_libraries':[], 'libraries_links':[], 'binaries':[]} - try: - for key,val in ret.iteritems(): - f = open(os.path.join(temp_path, key)) - for line in f.readlines(): - val.append(line.strip()) - #libraries.remove('\n') - f.close() - except EnvironmentError: - pass - - return (ret['libraries'], ret['la_libraries'], ret['libraries_links'], ret['binaries']) + ''' Reads cache information needed by analyse function. + This function does not checks if files exists nor timestamps, + check_temp_files should be called first + @param temp_path: directory where all temp files should reside + @return tuple with values of: libraries, la_libraries, libraries_links, symlink_pairs, binaries + ''' + + ret = {'libraries':[], 'la_libraries':[], 'libraries_links':[], 'binaries':[]} + try: + for key,val in ret.iteritems(): + f = open(os.path.join(temp_path, key)) + for line in f.readlines(): + val.append(line.strip()) + #libraries.remove('\n') + f.close() + except EnvironmentError: + pass + + return (ret['libraries'], ret['la_libraries'], ret['libraries_links'], ret['binaries']) def save_cache(logger=logging, to_save={}, temp_path=SETTINGS['DEFAULT_TMP_DIR']): - ''' Tries to store caching information. - @param logger - @param to_save have to be dict with keys: libraries, la_libraries, libraries_links and binaries - ''' + ''' Tries to store caching information. + @param logger + @param to_save have to be dict with keys: libraries, la_libraries, libraries_links and binaries + ''' - if not os.path.exists(temp_path): - os.makedirs(temp_path) + if not os.path.exists(temp_path): + os.makedirs(temp_path) - try: - f = open(os.path.join(temp_path, 'timestamp'), 'w') - f.write(str(int(time.time()))) - f.close() + try: + f = open(os.path.join(temp_path, 'timestamp'), 'w') + f.write(str(int(time.time()))) + f.close() - for key,val in to_save.iteritems(): - f = open(os.path.join(temp_path, key), 'w') - for line in val: - f.write(line + '\n') - f.close() - except Exception as ex: - logger.warn(red('Could not save cache: %s' %str(ex))) + for key,val in to_save.iteritems(): + f = open(os.path.join(temp_path, key), 'w') + for line in val: + f.write(line + '\n') + f.close() + except Exception as ex: + logger.warn(red('Could not save cache: %s' %str(ex))) def check_temp_files(temp_path=SETTINGS['DEFAULT_TMP_DIR'], max_delay=3600): - ''' Checks if temporary files from previous run are still available - and if they aren't too old - @param temp_path is directory, where temporary files should be found - @param max_delay is maximum time difference (in seconds) when those files - are still considered fresh and useful - returns True, when files can be used, or False, when they don't - exists or they are too old - ''' + ''' Checks if temporary files from previous run are still available + and if they aren't too old + @param temp_path is directory, where temporary files should be found + @param max_delay is maximum time difference (in seconds) when those files + are still considered fresh and useful + returns True, when files can be used, or False, when they don't + exists or they are too old + ''' - if not os.path.exists(temp_path) or not os.path.isdir(temp_path): - return False + if not os.path.exists(temp_path) or not os.path.isdir(temp_path): + return False - timestamp_path = os.path.join(temp_path, 'timestamp') - if not os.path.exists(timestamp_path) or not os.path.isfile(timestamp_path): - return False + timestamp_path = os.path.join(temp_path, 'timestamp') + if not os.path.exists(timestamp_path) or not os.path.isfile(timestamp_path): + return False - try: - f = open(timestamp_path) - timestamp = int(f.readline()) - f.close() - except: - timestamp = 0 - return False + try: + f = open(timestamp_path) + timestamp = int(f.readline()) + f.close() + except: + timestamp = 0 + return False - diff = int(time.time()) - timestamp - return max_delay > diff + diff = int(time.time()) - timestamp + return max_delay > diff if __name__ == '__main__': - print 'Preparing cache ... ' + print 'Preparing cache ... ' - from collect import * + from collect import * - bin_dirs, lib_dirs = prepare_search_dirs() + bin_dirs, lib_dirs = prepare_search_dirs() - masked_dirs, masked_files, ld = parse_revdep_config() - lib_dirs = lib_dirs.union(ld) - bin_dirs = bin_dirs.union(ld) - masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) + masked_dirs, masked_files, ld = parse_revdep_config() + lib_dirs = lib_dirs.union(ld) + bin_dirs = bin_dirs.union(ld) + masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) - libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs) - binaries = collect_binaries_from_dir(bin_dirs, masked_dirs) + libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs) + binaries = collect_binaries_from_dir(bin_dirs, masked_dirs) - save_cache(to_save={'libraries':libraries, 'la_libraries':la_libraries, 'libraries_links':libraries_links, 'binaries':binaries}) + save_cache(to_save={'libraries':libraries, 'la_libraries':la_libraries, 'libraries_links':libraries_links, 'binaries':binaries}) - print 'Done.' + print 'Done.' diff --git a/pym/gentoolkit/revdep_rebuild/collect.py b/pym/gentoolkit/revdep_rebuild/collect.py index 4a0714b..613749a 100644 --- a/pym/gentoolkit/revdep_rebuild/collect.py +++ b/pym/gentoolkit/revdep_rebuild/collect.py @@ -11,236 +11,236 @@ from settings import SETTINGS def parse_conf(conf_file=SETTINGS['DEFAULT_LD_FILE'], visited=None, logger=logging): - ''' Parses supplied conf_file for libraries pathes. - conf_file is file or files to parse - visited is set of files already parsed - ''' - lib_dirs = set() - to_parse = set() - - if isinstance(conf_file, basestring): - conf_file = [conf_file] - - for conf in conf_file: - try: - with open(conf) as f: - for line in f.readlines(): - line = line.strip() - if line.startswith('#'): - continue - elif line.startswith('include'): - include_line = line.split()[1:] - for included in include_line: - if not included.startswith('/'): - path = os.path.join(os.path.dirname(conf), \ - included) - else: - path = included - - to_parse = to_parse.union(glob.glob(path)) - else: - lib_dirs.add(line) - except EnvironmentError: - logger.warn(yellow('Error when parsing file %s' %conf)) - - if visited is None: - visited = set() - - visited = visited.union(conf_file) - to_parse = to_parse.difference(visited) - if to_parse: - lib_dirs = lib_dirs.union(parse_conf(to_parse, visited, logger=logger)) - - return lib_dirs + ''' Parses supplied conf_file for libraries pathes. + conf_file is file or files to parse + visited is set of files already parsed + ''' + lib_dirs = set() + to_parse = set() + + if isinstance(conf_file, basestring): + conf_file = [conf_file] + + for conf in conf_file: + try: + with open(conf) as f: + for line in f.readlines(): + line = line.strip() + if line.startswith('#'): + continue + elif line.startswith('include'): + include_line = line.split()[1:] + for included in include_line: + if not included.startswith('/'): + path = os.path.join(os.path.dirname(conf), \ + included) + else: + path = included + + to_parse = to_parse.union(glob.glob(path)) + else: + lib_dirs.add(line) + except EnvironmentError: + logger.warn(yellow('Error when parsing file %s' %conf)) + + if visited is None: + visited = set() + + visited = visited.union(conf_file) + to_parse = to_parse.difference(visited) + if to_parse: + lib_dirs = lib_dirs.union(parse_conf(to_parse, visited, logger=logger)) + + return lib_dirs def prepare_search_dirs(logger=logging): - ''' Lookup for search dirs. Returns tuple with two lists, - (list_of_bin_dirs, list_of_lib_dirs) - ''' + ''' Lookup for search dirs. Returns tuple with two lists, + (list_of_bin_dirs, list_of_lib_dirs) + ''' - bin_dirs = set(['/bin', '/usr/bin', ]) - lib_dirs = set(['/lib', '/usr/lib', ]) + bin_dirs = set(['/bin', '/usr/bin', ]) + lib_dirs = set(['/lib', '/usr/lib', ]) - #try: - with open(os.path.join(portage.root, SETTINGS['DEFAULT_ENV_FILE']), 'r') as f: - for line in f.readlines(): - line = line.strip() - m = re.match("^export (ROOT)?PATH='([^']+)'", line) - if m is not None: - bin_dirs = bin_dirs.union(set(m.group(2).split(':'))) - #except EnvironmentError: - #logger.debug(yellow('Could not open file %s' % f)) + #try: + with open(os.path.join(portage.root, SETTINGS['DEFAULT_ENV_FILE']), 'r') as f: + for line in f.readlines(): + line = line.strip() + m = re.match("^export (ROOT)?PATH='([^']+)'", line) + if m is not None: + bin_dirs = bin_dirs.union(set(m.group(2).split(':'))) + #except EnvironmentError: + #logger.debug(yellow('Could not open file %s' % f)) - lib_dirs = parse_conf(logger=logger) - return (bin_dirs, lib_dirs) + lib_dirs = parse_conf(logger=logger) + return (bin_dirs, lib_dirs) def parse_revdep_config(): - ''' Parses all files under /etc/revdep-rebuild/ and returns - tuple of: (masked_dirs, masked_files, search_dirs)''' - - search_dirs = set() - masked_dirs = set() - masked_files = set() - - #TODO: remove hard-coded path - for f in os.listdir(SETTINGS['REVDEP_CONFDIR']): - for line in open(os.path.join('/etc/revdep-rebuild', f)): - line = line.strip() - if not line.startswith('#'): #first check for comment, we do not want to regex all lines - m = re.match('LD_LIBRARY_MASK=\\"([^"]+)\\"', line) - if m is not None: - s = m.group(1).split(' ') - masked_files = masked_files.union(s) - continue - m = re.match('SEARCH_DIRS_MASK=\\"([^"]+)\\"', line) - if m is not None: - s = m.group(1).split(' ') - for ss in s: - masked_dirs = masked_dirs.union(glob.glob(ss)) - continue - m = re.match('SEARCH_DIRS=\\"([^"]+)\\"', line) - if m is not None: - s = m.group(1).split() - for ss in s: - search_dirs = search_dirs.union(glob.glob(ss)) - continue - - return (masked_dirs, masked_files, search_dirs) + ''' Parses all files under /etc/revdep-rebuild/ and returns + tuple of: (masked_dirs, masked_files, search_dirs)''' + + search_dirs = set() + masked_dirs = set() + masked_files = set() + + #TODO: remove hard-coded path + for f in os.listdir(SETTINGS['REVDEP_CONFDIR']): + for line in open(os.path.join('/etc/revdep-rebuild', f)): + line = line.strip() + if not line.startswith('#'): #first check for comment, we do not want to regex all lines + m = re.match('LD_LIBRARY_MASK=\\"([^"]+)\\"', line) + if m is not None: + s = m.group(1).split(' ') + masked_files = masked_files.union(s) + continue + m = re.match('SEARCH_DIRS_MASK=\\"([^"]+)\\"', line) + if m is not None: + s = m.group(1).split(' ') + for ss in s: + masked_dirs = masked_dirs.union(glob.glob(ss)) + continue + m = re.match('SEARCH_DIRS=\\"([^"]+)\\"', line) + if m is not None: + s = m.group(1).split() + for ss in s: + search_dirs = search_dirs.union(glob.glob(ss)) + continue + + return (masked_dirs, masked_files, search_dirs) def collect_libraries_from_dir(dirs, mask, logger=logging): - ''' Collects all libraries from specified list of directories. - mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory - Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair - (symlink_id, library_id) for resolving dependencies - ''' - - - found_directories = [] # contains list of directories found; allow us to reduce number of fnc calls - found_files = [] - found_symlinks = [] - found_la_files = [] # la libraries - symlink_pairs = [] # list of pairs symlink_id->library_id - - for d in dirs: - if d in mask: - continue - - try: - for l in os.listdir(d): - l = os.path.join(d, l) - if l in mask: - continue - - if os.path.isdir(l): - if os.path.islink(l): - #we do not want scan symlink-directories - pass - else: - found_directories.append(l) - elif os.path.isfile(l): - if l.endswith('.so') or '.so.' in l: - if l in found_files or l in found_symlinks: - continue - - if os.path.islink(l): - found_symlinks.append(l) - abs_path = os.path.realpath(l) - if abs_path in found_files: - i = found_files.index(abs_path) - else: - found_files.append(abs_path) - i = len(found_files)-1 - symlink_pairs.append((len(found_symlinks)-1, i,)) - else: - found_files.append(l) - continue - elif l.endswith('.la'): - if l in found_la_files: - continue - - found_la_files.append(l) - else: - # sometimes there are binaries in libs' subdir, for example in nagios - if not os.path.islink(l): - if l in found_files or l in found_symlinks: - continue - prv = os.stat(l)[stat.ST_MODE] - if prv & stat.S_IXUSR == stat.S_IXUSR or \ - prv & stat.S_IXGRP == stat.S_IXGRP or \ - prv & stat.S_IXOTH == stat.S_IXOTH: - found_files.append(l) - except Exception as ex: - logger.debug(yellow('Exception during collecting libraries: %s' %str(ex))) - - - if found_directories: - f,a,l,p = collect_libraries_from_dir(found_directories, mask, logger) - found_files+=f - found_la_files+=a - found_symlinks+=l - symlink_pairs+=p - - return (found_files, found_la_files, found_symlinks, symlink_pairs) + ''' Collects all libraries from specified list of directories. + mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory + Returns tuple composed of: list of libraries, list of symlinks, and toupe with pair + (symlink_id, library_id) for resolving dependencies + ''' + + + found_directories = [] # contains list of directories found; allow us to reduce number of fnc calls + found_files = [] + found_symlinks = [] + found_la_files = [] # la libraries + symlink_pairs = [] # list of pairs symlink_id->library_id + + for d in dirs: + if d in mask: + continue + + try: + for l in os.listdir(d): + l = os.path.join(d, l) + if l in mask: + continue + + if os.path.isdir(l): + if os.path.islink(l): + #we do not want scan symlink-directories + pass + else: + found_directories.append(l) + elif os.path.isfile(l): + if l.endswith('.so') or '.so.' in l: + if l in found_files or l in found_symlinks: + continue + + if os.path.islink(l): + found_symlinks.append(l) + abs_path = os.path.realpath(l) + if abs_path in found_files: + i = found_files.index(abs_path) + else: + found_files.append(abs_path) + i = len(found_files)-1 + symlink_pairs.append((len(found_symlinks)-1, i,)) + else: + found_files.append(l) + continue + elif l.endswith('.la'): + if l in found_la_files: + continue + + found_la_files.append(l) + else: + # sometimes there are binaries in libs' subdir, for example in nagios + if not os.path.islink(l): + if l in found_files or l in found_symlinks: + continue + prv = os.stat(l)[stat.ST_MODE] + if prv & stat.S_IXUSR == stat.S_IXUSR or \ + prv & stat.S_IXGRP == stat.S_IXGRP or \ + prv & stat.S_IXOTH == stat.S_IXOTH: + found_files.append(l) + except Exception as ex: + logger.debug(yellow('Exception during collecting libraries: %s' %str(ex))) + + + if found_directories: + f,a,l,p = collect_libraries_from_dir(found_directories, mask, logger) + found_files+=f + found_la_files+=a + found_symlinks+=l + symlink_pairs+=p + + return (found_files, found_la_files, found_symlinks, symlink_pairs) def collect_binaries_from_dir(dirs, mask, logger=logging): - ''' Collects all binaries from specified list of directories. - mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory - Returns list of binaries - ''' - - found_directories = [] # contains list of directories found; allow us to reduce number of fnc calls - found_files = [] - - for d in dirs: - if d in mask: - continue - - try: - for l in os.listdir(d): - l = os.path.join(d, l) - if d in mask: - continue - - if os.path.isdir(l): - if os.path.islink(l): - #we do not want scan symlink-directories - pass - else: - found_directories.append(l) - elif os.path.isfile(l): - #we're looking for binaries, and with binaries we do not need links, thus we can optimize a bit - if not os.path.islink(l): - prv = os.stat(l)[stat.ST_MODE] - if prv & stat.S_IXUSR == stat.S_IXUSR or \ - prv & stat.S_IXGRP == stat.S_IXGRP or \ - prv & stat.S_IXOTH == stat.S_IXOTH: - found_files.append(l) - except Exception as e: - logger.debug(yellow('Exception during binaries collecting: %s' %str(e))) - - if found_directories: - found_files += collect_binaries_from_dir(found_directories, mask, logger) - - return found_files + ''' Collects all binaries from specified list of directories. + mask is list of pathes, that are ommited in scanning, can be eighter single file or entire directory + Returns list of binaries + ''' + + found_directories = [] # contains list of directories found; allow us to reduce number of fnc calls + found_files = [] + + for d in dirs: + if d in mask: + continue + + try: + for l in os.listdir(d): + l = os.path.join(d, l) + if d in mask: + continue + + if os.path.isdir(l): + if os.path.islink(l): + #we do not want scan symlink-directories + pass + else: + found_directories.append(l) + elif os.path.isfile(l): + #we're looking for binaries, and with binaries we do not need links, thus we can optimize a bit + if not os.path.islink(l): + prv = os.stat(l)[stat.ST_MODE] + if prv & stat.S_IXUSR == stat.S_IXUSR or \ + prv & stat.S_IXGRP == stat.S_IXGRP or \ + prv & stat.S_IXOTH == stat.S_IXOTH: + found_files.append(l) + except Exception as e: + logger.debug(yellow('Exception during binaries collecting: %s' %str(e))) + + if found_directories: + found_files += collect_binaries_from_dir(found_directories, mask, logger) + + return found_files if __name__ == '__main__': - bin_dirs, lib_dirs = prepare_search_dirs(logging) + bin_dirs, lib_dirs = prepare_search_dirs(logging) - masked_dirs, masked_files, ld = parse_revdep_config() - lib_dirs = lib_dirs.union(ld) - bin_dirs = bin_dirs.union(ld) - masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) + masked_dirs, masked_files, ld = parse_revdep_config() + lib_dirs = lib_dirs.union(ld) + bin_dirs = bin_dirs.union(ld) + masked_dirs = masked_dirs.union(set(['/lib/modules', '/lib32/modules', '/lib64/modules',])) - libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs, logging) - binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging) + libraries, la_libraries, libraries_links, symlink_pairs = collect_libraries_from_dir(lib_dirs, masked_dirs, logging) + binaries = collect_binaries_from_dir(bin_dirs, masked_dirs, logging) - print 'Found: %i binaries and %i libraries.' %(len(binaries), len(libraries)) + print 'Found: %i binaries and %i libraries.' %(len(binaries), len(libraries)) diff --git a/pym/gentoolkit/revdep_rebuild/settings.py b/pym/gentoolkit/revdep_rebuild/settings.py index ea6b1ec..b47cb3e 100644 --- a/pym/gentoolkit/revdep_rebuild/settings.py +++ b/pym/gentoolkit/revdep_rebuild/settings.py @@ -4,23 +4,23 @@ import os import portage SETTINGS = { - 'DEFAULT_LD_FILE': os.path.join(portage.root, 'etc/ld.so.conf'), - 'DEFAULT_ENV_FILE': os.path.join(portage.root, 'etc/profile.env'), - 'REVDEP_CONFDIR': os.path.join(portage.root, 'etc/revdep-rebuild/'), - 'PKG_DIR': os.path.join(portage.root, 'var/db/pkg/'), - 'DEFAULT_TMP_DIR': '/tmp/revdep-rebuild', #cache default location + 'DEFAULT_LD_FILE': os.path.join(portage.root, 'etc/ld.so.conf'), + 'DEFAULT_ENV_FILE': os.path.join(portage.root, 'etc/profile.env'), + 'REVDEP_CONFDIR': os.path.join(portage.root, 'etc/revdep-rebuild/'), + 'PKG_DIR': os.path.join(portage.root, 'var/db/pkg/'), + 'DEFAULT_TMP_DIR': '/tmp/revdep-rebuild', #cache default location - 'USE_TMP_FILES': True, #if program should use temporary files from previous run - 'CMD_MAX_ARGS': 1000, # number of maximum allowed files to be parsed at once + 'USE_TMP_FILES': True, #if program should use temporary files from previous run + 'CMD_MAX_ARGS': 1000, # number of maximum allowed files to be parsed at once - 'PRETEND': False, #pretend only - 'EXACT': False, #exact package version - 'USE_TMP_FILES': True, #if program should use temporary files from previous run + 'PRETEND': False, #pretend only + 'EXACT': False, #exact package version + 'USE_TMP_FILES': True, #if program should use temporary files from previous run - 'IS_DEV': True, #True for dev. version, False for stable - #used when IS_DEV is True, False forces to call emerge with --pretend - # can be set True from the cli with the --no-pretend option - 'NO_PRETEND': False, - 'VERBOSITY': 1, - } + 'IS_DEV': True, #True for dev. version, False for stable + #used when IS_DEV is True, False forces to call emerge with --pretend + # can be set True from the cli with the --no-pretend option + 'NO_PRETEND': False, + 'VERBOSITY': 1, + } diff --git a/pym/gentoolkit/revdep_rebuild/stuff.py b/pym/gentoolkit/revdep_rebuild/stuff.py index d153908..b1b6709 100644 --- a/pym/gentoolkit/revdep_rebuild/stuff.py +++ b/pym/gentoolkit/revdep_rebuild/stuff.py @@ -6,41 +6,41 @@ import portage # util. functions def call_program(args): - ''' Calls program with specified parameters and returns stdout ''' - subp = subprocess.Popen(args, stdout=subprocess.PIPE, \ - stderr=subprocess.PIPE) - stdout, stderr = subp.communicate() - return stdout + ''' Calls program with specified parameters and returns stdout ''' + subp = subprocess.Popen(args, stdout=subprocess.PIPE, \ + stderr=subprocess.PIPE) + stdout, stderr = subp.communicate() + return stdout def scan(params, files, max_args): - ''' Calls scanelf with given params and files to scan. - @param params is list of parameters that should be passed into scanelf app. - @param files list of files to scan. - @param max_args number of files to process at once + ''' Calls scanelf with given params and files to scan. + @param params is list of parameters that should be passed into scanelf app. + @param files list of files to scan. + @param max_args number of files to process at once - When files count is greater CMD_MAX_ARGS, it'll be divided - into several parts + When files count is greater CMD_MAX_ARGS, it'll be divided + into several parts - @return scanelf output (joined if was called several times) - ''' - out = [] - for i in range(0, len(files), max_args): - out += call_program(['scanelf'] + params + files[i:i+max_args]).strip().split('\n') - return out + @return scanelf output (joined if was called several times) + ''' + out = [] + for i in range(0, len(files), max_args): + out += call_program(['scanelf'] + params + files[i:i+max_args]).strip().split('\n') + return out def exithandler(signum, frame): - sys.exit(1) + sys.exit(1) def get_masking_status(ebuild): - try: - status = portage.getmaskingstatus(ebuild) - except KeyError: - status = ['deprecated'] - return status + try: + status = portage.getmaskingstatus(ebuild) + except KeyError: + status = ['deprecated'] + return status if __name__ == '__main__': - print "There is nothing to run here." + print "There is nothing to run here."