From: Brian Dolbec Date: Sat, 19 Jan 2013 05:12:07 +0000 (-0800) Subject: Initial creation of a defaults file and Split up support.py X-Git-Url: http://git.tremily.us/?a=commitdiff_plain;h=bbab9584d15c03e8befab2dc49fe3c958cc5a0fc;p=catalyst.git Initial creation of a defaults file and Split up support.py Split out hash and contents to their own classes, files. --- diff --git a/catalyst/contents.py b/catalyst/contents.py new file mode 100644 index 00000000..0ecabbd1 --- /dev/null +++ b/catalyst/contents.py @@ -0,0 +1,72 @@ + +from collections import namedtuple +from subprocess import Popen, PIPE + +from support import CatalystError, warn + + +class ContentsMap(object): + '''Class to encompass all known commands to list + the contents of an archive''' + + + fields = ['func', 'cmd'] + + + def __init__(self, defs=None): + '''Class init + + @param defs: dictionary of Key:[function, cmd] + ''' + if defs is None: + defs = {} + #self.contents = {} + self.contents_map = {} + + # create the archive type namedtuple classes + for name in list(defs): + #obj = self.contents[name] = namedtuple(name, self.fields) + obj = namedtuple(name, self.fields) + obj.__slots__ = () + self.contents_map[name] = obj._make(defs[name]) + del obj + + + def generate_contents(self, file_, getter="auto", verbose=False): + try: + archive = getter + if archive == 'auto' and file_.endswith('.iso'): + archive = 'isoinfo_l' + if (archive in ['tar_tv','auto']): + if file_.endswith('.tgz') or file_.endswith('.tar.gz'): + archive = 'tar_tvz' + elif file_.endswith('.tbz2') or file_.endswith('.tar.bz2'): + archive = 'tar_tvj' + elif file_.endswith('.tar'): + archive = 'tar_tv' + + if archive == 'auto': + warn('File %r has unknown type for automatic detection.' + % (file_, )) + return None + else: + getter = archive + func = getattr(self, '_%s_' % self.contents_map[getter].func) + return func(file_, self.contents_map[getter].cmd, verbose) + except: + raise CatalystError,\ + "Error generating contents, is appropriate utility " +\ + "(%s) installed on your system?" \ + % (self.contents_map[getter].cmd) + + + @staticmethod + def _calc_contents_(file_, cmd, verbose): + _cmd = (cmd % {'file': file_ }).split() + proc = Popen(_cmd, stdout=PIPE, stderr=PIPE) + results = proc.communicate() + result = "\n".join(results) + if verbose: + print result + return result + diff --git a/catalyst/defaults.py b/catalyst/defaults.py new file mode 100644 index 00000000..50d1d185 --- /dev/null +++ b/catalyst/defaults.py @@ -0,0 +1,68 @@ + + +# these should never be touched +required_build_targets = ["generic_target", "generic_stage_target"] + +# new build types should be added here +valid_build_targets = ["stage1_target", "stage2_target", "stage3_target", + "stage4_target", "grp_target", "livecd_stage1_target", "livecd_stage2_target", + "embedded_target", "tinderbox_target", "snapshot_target", "netboot_target", + "netboot2_target" + ] + +required_config_file_values = ["storedir", "sharedir", "distdir", "portdir"] + +valid_config_file_values = required_config_file_values[:] +valid_config_file_values.extend(["PKGCACHE", "KERNCACHE", "CCACHE", "DISTCC", + "ICECREAM", "ENVSCRIPT", "AUTORESUME", "FETCH", "CLEAR_AUTORESUME", + "options", "DEBUG", "VERBOSE", "PURGE", "PURGEONLY", "SNAPCACHE", + "snapshot_cache", "hash_function", "digests", "contents", "SEEDCACHE" + ]) + +verbosity = 1 + +# Use hash_utils.HashMap.fields for the value legend +# fields = ["func", "cmd", "args", "id"] +hash_definitions = { + "adler32" :["calc_hash2", "shash", ["-a", "ADLER32"], "ADLER32"], + "crc32" :["calc_hash2", "shash", ["-a", "CRC32"], "CRC32"], + "crc32b" :["calc_hash2", "shash", ["-a", "CRC32B"], "CRC32B"], + "gost" :["calc_hash2", "shash", ["-a", "GOST"], "GOST"], + "haval128" :["calc_hash2", "shash", ["-a", "HAVAL128"], "HAVAL128"], + "haval160" :["calc_hash2", "shash", ["-a", "HAVAL160"], "HAVAL160"], + "haval192" :["calc_hash2", "shash", ["-a", "HAVAL192"], "HAVAL192"], + "haval224" :["calc_hash2", "shash", ["-a", "HAVAL224"], "HAVAL224"], + "haval256" :["calc_hash2", "shash", ["-a", "HAVAL256"], "HAVAL256"], + "md2" :["calc_hash2", "shash", ["-a", "MD2"], "MD2"], + "md4" :["calc_hash2", "shash", ["-a", "MD4"], "MD4"], + "md5" :["calc_hash2", "shash", ["-a", "MD5"], "MD5"], + "ripemd128":["calc_hash2", "shash", ["-a", "RIPEMD128"], "RIPEMD128"], + "ripemd160":["calc_hash2", "shash", ["-a", "RIPEMD160"], "RIPEMD160"], + "ripemd256":["calc_hash2", "shash", ["-a", "RIPEMD256"], "RIPEMD256"], + "ripemd320":["calc_hash2", "shash", ["-a", "RIPEMD320"], "RIPEMD320"], + "sha1" :["calc_hash2", "shash", ["-a", "SHA1"], "SHA1"], + "sha224" :["calc_hash2", "shash", ["-a", "SHA224"], "SHA224"], + "sha256" :["calc_hash2", "shash", ["-a", "SHA256"], "SHA256"], + "sha384" :["calc_hash2", "shash", ["-a", "SHA384"], "SHA384"], + "sha512" :["calc_hash2", "shash", ["-a", "SHA512"], "SHA512"], + "snefru128":["calc_hash2", "shash", ["-a", "SNEFRU128"], "SNEFRU128"], + "snefru256":["calc_hash2", "shash", ["-a", "SNEFRU256"], "SNEFRU256"], + "tiger" :["calc_hash2", "shash", ["-a", "TIGER"], "TIGER"], + "tiger128" :["calc_hash2", "shash", ["-a", "TIGER128"], "TIGER128"], + "tiger160" :["calc_hash2", "shash", ["-a", "TIGER160"], "TIGER160"], + "whirlpool":["calc_hash2", "shash", ["-a", "WHIRLPOOL"], "WHIRLPOOL"], + } + +# use contents.ContentsMap.fields for the value legend +# Key:[function, cmd] +contents_definitions = { + # 'find' is disabled because it requires the source path, which is not + # always available + #"find" :["calc_contents","find %(path)s"], + "tar_tv":["calc_contents","tar tvf %(file)s"], + "tar_tvz":["calc_contents","tar tvzf %(file)s"], + "tar_tvj":["calc_contents","tar -I lbzip2 -tvf %(file)s"], + "isoinfo_l":["calc_contents","isoinfo -l -i %(file)s"], + # isoinfo_f should be a last resort only + "isoinfo_f":["calc_contents","isoinfo -f -i %(file)s"], +} diff --git a/catalyst/hash_utils.py b/catalyst/hash_utils.py new file mode 100644 index 00000000..0dfe98b6 --- /dev/null +++ b/catalyst/hash_utils.py @@ -0,0 +1,106 @@ + +import os +from collections import namedtuple +from subprocess import Popen, PIPE + +from support import CatalystError + + +class HashMap(object): + '''Class for handling + Catalyst's hash generation''' + + fields = ["func", "cmd", "args", "id"] + + + def __init__(self, hashes=None): + '''Class init + + @param hashes: dictionary of Key:[function, cmd, cmd_args, Print string] + @param fields: list of ordered field names for the hashes + eg: ["func", "cmd", "args", "id"] + ''' + if hashes is None: + hashes = {} + #self.hashes = {} + self.hash_map = {} + + # create the hash definition namedtuple classes + for name in list(hashes): + #obj = self.hashes[name] = namedtuple(name, self.fields) + obj = namedtuple(name, self.fields) + obj.__slots__ = () + self.hash_map[name] = obj._make(hashes[name]) + del obj + + + def generate_hash(self, file_, hash_="crc32", verbose=False): + '''Prefered method of generating a hash for the passed in file_ + + @param file_: the file to generate the hash for + @param hash_: the hash algorythm to use + @param verbose: boolean + @returns the hash result + ''' + try: + return getattr(self, self.hash_map[hash_].func)( + file_, + hash_, + verbose + ) + except: + raise CatalystError,"Error generating hash, is appropriate " + \ + "utility installed on your system?" + + + def calc_hash(self, file_, hash_, verbose=False): + ''' + Calculate the hash for "file_" + + @param file_: the file to generate the hash for + @param hash_: the hash algorythm to use + @param verbose: boolean + @returns the hash result + ''' + _hash = self.hash_map[hash_] + args = [_hash.cmd] + args.extend(_hash.args) + args.append(file_) + source = Popen(args, stdout=PIPE) + mylines = source.communicate()[0] + mylines=mylines[0].split() + result=mylines[0] + if verbose: + print _hash_.id + " (%s) = %s" % (file_, result) + return result + + + def calc_hash2(self, file_, hash_type, verbose=False): + ''' + Calculate the hash for "file_" + + @param file_: the file to generate the hash for + @param hash_: the hash algorythm to use + @param verbose: boolean + @returns the hash result + ''' + _hash = self.hash_map[hash_type] + args = [_hash.cmd] + args.extend(_hash.args) + args.append(file_) + #print("DEBUG: calc_hash2; args =", args) + source = Popen(args, stdout=PIPE) + output = source.communicate() + lines = output[0].split('\n') + #print("DEBUG: calc_hash2; output =", output) + header = lines[0] + h_f = lines[1].split() + hash_result = h_f[0] + short_file = os.path.split(h_f[1])[1] + result = header + hash_result + " " + short_file + "\n" + if verbose: + print header+" (%s) = %s" % (short_file, result) + return result + + + diff --git a/catalyst/main.py b/catalyst/main.py index f319bf30..2bf75659 100644 --- a/catalyst/main.py +++ b/catalyst/main.py @@ -17,8 +17,15 @@ sys.path.append(__selfpath__ + "/modules") import catalyst.config import catalyst.util -from catalyst.support import (required_build_targets, - valid_build_targets, CatalystError, hash_map, find_binary, LockInUse) +from catalyst.support import CatalystError, find_binary, LockInUse +from defaults import (required_build_targets, valid_build_targets, + hash_definitions + ) + +from hash_utils import HashMap +from defaults import contents_definitions +from contents import ContentsMap + __maintainer__="Catalyst " __version__="2.0.12.2" @@ -184,7 +191,8 @@ def parse_config(myconfig): if "digests" in myconf: conf_values["digests"]=myconf["digests"] if "contents" in myconf: - conf_values["contents"]=myconf["contents"] + # replace '-' with '_' (for compatibility with existing configs) + conf_values["contents"] = myconf["contents"].replace("-", '_') if "envscript" in myconf: print "Envscript support enabled." @@ -225,9 +233,10 @@ def import_modules(): raise CatalystError,"Can't find " + x + ".py plugin in " + \ module_dir - except ImportError: + except ImportError as e: print "!!! catalyst: Python modules not found in "+\ module_dir + "; exiting." + print e sys.exit(1) return targetmap @@ -354,40 +363,47 @@ def main(): # import configuration file and import our main module using those settings parse_config(myconfig) - # Start checking that digests are valid now that the hash_map was imported - # from catalyst.support + # initialize our contents generator + contents_map = ContentsMap(contents_definitions) + conf_values["contents_map"] = contents_map + + # initialze our hash and contents generators + hash_map = HashMap(hash_definitions) + conf_values["hash_map"] = hash_map + + # Start checking that digests are valid now that hash_map is initialized if "digests" in conf_values: for i in conf_values["digests"].split(): - if i not in hash_map: + if i not in hash_definitions: print print i+" is not a valid digest entry" print "Valid digest entries:" - print hash_map.keys() + print hash_definitions.keys() print print "Catalyst aborting...." sys.exit(2) - if find_binary(hash_map[i][1]) == None: + if find_binary(hash_map.hash_map[i].cmd) == None: print - print "digest="+i - print "\tThe "+hash_map[i][1]+\ + print "digest=" + i + print "\tThe " + hash_map.hash_map[i].cmd + \ " binary was not found. It needs to be in your system path" print print "Catalyst aborting...." sys.exit(2) if "hash_function" in conf_values: - if conf_values["hash_function"] not in hash_map: + if conf_values["hash_function"] not in hash_definitions: print print conf_values["hash_function"]+\ " is not a valid hash_function entry" print "Valid hash_function entries:" - print hash_map.keys() + print hash_definitions.keys() print print "Catalyst aborting...." sys.exit(2) - if find_binary(hash_map[conf_values["hash_function"]][1]) == None: + if find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd) == None: print print "hash_function="+conf_values["hash_function"] - print "\tThe "+hash_map[conf_values["hash_function"]][1]+\ + print "\tThe "+hash_map.hash_map[conf_values["hash_function"]].cmd + \ " binary was not found. It needs to be in your system path" print print "Catalyst aborting...." diff --git a/catalyst/support.py b/catalyst/support.py index 072b9850..fe2c7007 100644 --- a/catalyst/support.py +++ b/catalyst/support.py @@ -1,6 +1,9 @@ import sys,string,os,types,re,signal,traceback,time #import md5,sha + +from catalyst.defaults import verbosity + selinux_capable = False #userpriv_capable = (os.getuid() == 0) #fakeroot_capable = False @@ -60,120 +63,6 @@ def hexify(str): return r # hexify() -def generate_contents(file,contents_function="auto",verbose=False): - try: - _ = contents_function - if _ == 'auto' and file.endswith('.iso'): - _ = 'isoinfo-l' - if (_ in ['tar-tv','auto']): - if file.endswith('.tgz') or file.endswith('.tar.gz'): - _ = 'tar-tvz' - elif file.endswith('.tbz2') or file.endswith('.tar.bz2'): - _ = 'tar-tvj' - elif file.endswith('.tar'): - _ = 'tar-tv' - - if _ == 'auto': - warn('File %r has unknown type for automatic detection.' % (file, )) - return None - else: - contents_function = _ - _ = contents_map[contents_function] - return _[0](file,_[1],verbose) - except: - raise CatalystError,\ - "Error generating contents, is appropriate utility (%s) installed on your system?" \ - % (contents_function, ) - -def calc_contents(file,cmd,verbose): - args={ 'file': file } - cmd=cmd % dict(args) - a=os.popen(cmd) - mylines=a.readlines() - a.close() - result="".join(mylines) - if verbose: - print result - return result - -# This has map must be defined after the function calc_content -# It is possible to call different functions from this but they must be defined -# before hash_map -# Key,function,cmd -contents_map={ - # 'find' is disabled because it requires the source path, which is not - # always available - #"find" :[calc_contents,"find %(path)s"], - "tar-tv":[calc_contents,"tar tvf %(file)s"], - "tar-tvz":[calc_contents,"tar tvzf %(file)s"], - "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"], - "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"], - # isoinfo-f should be a last resort only - "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"], -} - -def generate_hash(file,hash_function="crc32",verbose=False): - try: - return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\ - hash_map[hash_function][3],verbose) - except: - raise CatalystError,"Error generating hash, is appropriate utility installed on your system?" - -def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False): - a=os.popen(cmd+" "+cmd_args+" "+file) - mylines=a.readlines() - a.close() - mylines=mylines[0].split() - result=mylines[0] - if verbose: - print id_string+" (%s) = %s" % (file, result) - return result - -def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False): - a=os.popen(cmd+" "+cmd_args+" "+file) - header=a.readline() - mylines=a.readline().split() - hash=mylines[0] - short_file=os.path.split(mylines[1])[1] - a.close() - result=header+hash+" "+short_file+"\n" - if verbose: - print header+" (%s) = %s" % (short_file, result) - return result - -# This has map must be defined after the function calc_hash -# It is possible to call different functions from this but they must be defined -# before hash_map -# Key,function,cmd,cmd_args,Print string -hash_map={ - "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\ - "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\ - "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\ - "gost":[calc_hash2,"shash","-a GOST","GOST"],\ - "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\ - "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\ - "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\ - "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\ - "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\ - "md2":[calc_hash2,"shash","-a MD2","MD2"],\ - "md4":[calc_hash2,"shash","-a MD4","MD4"],\ - "md5":[calc_hash2,"shash","-a MD5","MD5"],\ - "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\ - "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\ - "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\ - "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\ - "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\ - "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\ - "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\ - "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\ - "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\ - "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\ - "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\ - "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\ - "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\ - "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\ - "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\ - } def read_from_clst(file): line = '' @@ -190,39 +79,6 @@ def read_from_clst(file): return myline # read_from_clst -# these should never be touched -required_build_targets=["generic_target","generic_stage_target"] - -# new build types should be added here -valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target", - "livecd_stage1_target","livecd_stage2_target","embedded_target", - "tinderbox_target","snapshot_target","netboot_target","netboot2_target"] - -required_config_file_values=["storedir","sharedir","distdir","portdir"] -valid_config_file_values=required_config_file_values[:] -valid_config_file_values.append("PKGCACHE") -valid_config_file_values.append("KERNCACHE") -valid_config_file_values.append("CCACHE") -valid_config_file_values.append("DISTCC") -valid_config_file_values.append("ICECREAM") -valid_config_file_values.append("ENVSCRIPT") -valid_config_file_values.append("AUTORESUME") -valid_config_file_values.append("FETCH") -valid_config_file_values.append("CLEAR_AUTORESUME") -valid_config_file_values.append("options") -valid_config_file_values.append("DEBUG") -valid_config_file_values.append("VERBOSE") -valid_config_file_values.append("PURGE") -valid_config_file_values.append("PURGEONLY") -valid_config_file_values.append("SNAPCACHE") -valid_config_file_values.append("snapshot_cache") -valid_config_file_values.append("hash_function") -valid_config_file_values.append("digests") -valid_config_file_values.append("contents") -valid_config_file_values.append("SEEDCACHE") - -verbosity=1 - def list_bashify(mylist): if type(mylist)==types.StringType: mypack=[mylist] diff --git a/catalyst/targets/generic_stage_target.py b/catalyst/targets/generic_stage_target.py index 17956a38..6b6e801a 100644 --- a/catalyst/targets/generic_stage_target.py +++ b/catalyst/targets/generic_stage_target.py @@ -4,6 +4,7 @@ from generic_target import * from stat import * from catalyst.lock import LockDir + class generic_stage_target(generic_target): """ This class does all of the chroot setup, copying of files, etc. It is @@ -1629,6 +1630,7 @@ class generic_stage_target(generic_target): if os.path.exists(file+".CONTENTS"): os.remove(file+".CONTENTS") if "contents" in self.settings: + contents_map = self.settings["contents_map"] if os.path.exists(file): myf=open(file+".CONTENTS","w") keys={} @@ -1637,7 +1639,7 @@ class generic_stage_target(generic_target): array=keys.keys() array.sort() for j in array: - contents=generate_contents(file,contents_function=j,\ + contents = contents_map.generate_contents(file, j, verbose="VERBOSE" in self.settings) if contents: myf.write(contents) @@ -1647,6 +1649,7 @@ class generic_stage_target(generic_target): if os.path.exists(file+".DIGESTS"): os.remove(file+".DIGESTS") if "digests" in self.settings: + hash_map = self.settings["hash_map"] if os.path.exists(file): myf=open(file+".DIGESTS","w") keys={} @@ -1657,14 +1660,14 @@ class generic_stage_target(generic_target): for f in [file, file+'.CONTENTS']: if os.path.exists(f): if "all" in array: - for k in hash_map.keys(): - hash=generate_hash(f,hash_function=k,verbose=\ - "VERBOSE" in self.settings) + for k in list(hash_map.hash_map): + hash = hash_map.generate_hash(f,hash_=k, + verbose = "VERBOSE" in self.settings) myf.write(hash) else: for j in array: - hash=generate_hash(f,hash_function=j,verbose=\ - "VERBOSE" in self.settings) + hash = hash_map.generate_hash(f,hash_=j, + verbose = "VERBOSE" in self.settings) myf.write(hash) myf.close() diff --git a/catalyst/targets/stage2_target.py b/catalyst/targets/stage2_target.py index 15acdeec..6377f5da 100644 --- a/catalyst/targets/stage2_target.py +++ b/catalyst/targets/stage2_target.py @@ -23,8 +23,11 @@ class stage2_target(generic_stage_target): if os.path.isfile(self.settings["source_path"]): if os.path.exists(self.settings["source_path"]): # XXX: Is this even necessary if the previous check passes? - self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\ - hash_function=self.settings["hash_function"],verbose=False) + self.settings["source_path_hash"] = \ + self.settings["hash_map"].generate_hash( + self.settings["source_path"],\ + hash_=self.settings["hash_function"], + verbose=False) print "Source path set to "+self.settings["source_path"] if os.path.isdir(self.settings["source_path"]): print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf" diff --git a/etc/catalyst.conf b/etc/catalyst.conf index b74c546c..0a0cf33d 100644 --- a/etc/catalyst.conf +++ b/etc/catalyst.conf @@ -15,12 +15,12 @@ digests="md5 sha1 sha512 whirlpool" # Creates a .CONTENTS file listing the contents of the file. Pick from any of # the supported options below: # auto - strongly recommended -# tar-tv - does 'tar tvf FILE' -# tar-tvz - does 'tar tvzf FILE' -# tar-tvy - does 'tar tvyf FILE' -# isoinfo-l - does 'isoinfo -l -i FILE' -# isoinfo-f - does 'isoinfo -f -i FILE' -# 'isoinfo-f' is the only option not chosen by the automatic algorithm. +# tar_tv - does 'tar tvf FILE' +# tar_tvz - does 'tar tvzf FILE' +# tar_tvy - does 'tar tvyf FILE' +# isoinfo_l - does 'isoinfo -l -i FILE' +# isoinfo_f - does 'isoinfo -f -i FILE' +# 'isoinfo_f' is the only option not chosen by the automatic algorithm. # If this variable is empty, no .CONTENTS will be generated at all. contents="auto"