--- /dev/null
+
+import os
+from collections import namedtuple
+from subprocess import Popen, PIPE
+
+from support import CatalystError
+
+
+# Use HashMap.fields for the value legend
+# fields = ["func", "cmd", "args", "id"]
+HASH_DEFINITIONS = {
+ "adler32" :["calc_hash2", "shash", ["-a", "ADLER32"], "ADLER32"],
+ "crc32" :["calc_hash2", "shash", ["-a", "CRC32"], "CRC32"],
+ "crc32b" :["calc_hash2", "shash", ["-a", "CRC32B"], "CRC32B"],
+ "gost" :["calc_hash2", "shash", ["-a", "GOST"], "GOST"],
+ "haval128" :["calc_hash2", "shash", ["-a", "HAVAL128"], "HAVAL128"],
+ "haval160" :["calc_hash2", "shash", ["-a", "HAVAL160"], "HAVAL160"],
+ "haval192" :["calc_hash2", "shash", ["-a", "HAVAL192"], "HAVAL192"],
+ "haval224" :["calc_hash2", "shash", ["-a", "HAVAL224"], "HAVAL224"],
+ "haval256" :["calc_hash2", "shash", ["-a", "HAVAL256"], "HAVAL256"],
+ "md2" :["calc_hash2", "shash", ["-a", "MD2"], "MD2"],
+ "md4" :["calc_hash2", "shash", ["-a", "MD4"], "MD4"],
+ "md5" :["calc_hash2", "shash", ["-a", "MD5"], "MD5"],
+ "ripemd128":["calc_hash2", "shash", ["-a", "RIPEMD128"], "RIPEMD128"],
+ "ripemd160":["calc_hash2", "shash", ["-a", "RIPEMD160"], "RIPEMD160"],
+ "ripemd256":["calc_hash2", "shash", ["-a", "RIPEMD256"], "RIPEMD256"],
+ "ripemd320":["calc_hash2", "shash", ["-a", "RIPEMD320"], "RIPEMD320"],
+ "sha1" :["calc_hash2", "shash", ["-a", "SHA1"], "SHA1"],
+ "sha224" :["calc_hash2", "shash", ["-a", "SHA224"], "SHA224"],
+ "sha256" :["calc_hash2", "shash", ["-a", "SHA256"], "SHA256"],
+ "sha384" :["calc_hash2", "shash", ["-a", "SHA384"], "SHA384"],
+ "sha512" :["calc_hash2", "shash", ["-a", "SHA512"], "SHA512"],
+ "snefru128":["calc_hash2", "shash", ["-a", "SNEFRU128"], "SNEFRU128"],
+ "snefru256":["calc_hash2", "shash", ["-a", "SNEFRU256"], "SNEFRU256"],
+ "tiger" :["calc_hash2", "shash", ["-a", "TIGER"], "TIGER"],
+ "tiger128" :["calc_hash2", "shash", ["-a", "TIGER128"], "TIGER128"],
+ "tiger160" :["calc_hash2", "shash", ["-a", "TIGER160"], "TIGER160"],
+ "whirlpool":["calc_hash2", "shash", ["-a", "WHIRLPOOL"], "WHIRLPOOL"],
+ }
+
+
+class HashMap(object):
+ '''Class for handling
+ Catalyst's hash generation'''
+
+ fields = ["func", "cmd", "args", "id"]
+
+
+ def __init__(self, hashes=None):
+ '''Class init
+
+ @param hashes: dictionary of Key:[function, cmd, cmd_args, Print string]
+ @param fields: list of ordered field names for the hashes
+ eg: ["func", "cmd", "args", "id"]
+ '''
+ if hashes is None:
+ hashes = {}
+ self.hash_map = {}
+
+ # create the hash definition namedtuple classes
+ for name in list(hashes):
+ obj = namedtuple(name, self.fields)
+ obj.__slots__ = ()
+ self.hash_map[name] = obj._make(hashes[name])
+ del obj
+
+
+ def generate_hash(self, file_, hash_="crc32", verbose=False):
+ '''Prefered method of generating a hash for the passed in file_
+
+ @param file_: the file to generate the hash for
+ @param hash_: the hash algorythm to use
+ @param verbose: boolean
+ @returns the hash result
+ '''
+ try:
+ return getattr(self, self.hash_map[hash_].func)(
+ file_,
+ hash_,
+ verbose
+ )
+ except:
+ raise CatalystError,"Error generating hash, is appropriate " + \
+ "utility installed on your system?"
+
+
+ def calc_hash(self, file_, hash_, verbose=False):
+ '''
+ Calculate the hash for "file_"
+
+ @param file_: the file to generate the hash for
+ @param hash_: the hash algorythm to use
+ @param verbose: boolean
+ @returns the hash result
+ '''
+ _hash = self.hash_map[hash_]
+ args = [_hash.cmd]
+ args.extend(_hash.args)
+ args.append(file_)
+ source = Popen(args, stdout=PIPE)
+ mylines = source.communicate()[0]
+ mylines=mylines[0].split()
+ result=mylines[0]
+ if verbose:
+ print _hash.id + " (%s) = %s" % (file_, result)
+ return result
+
+
+ def calc_hash2(self, file_, hash_type, verbose=False):
+ '''
+ Calculate the hash for "file_"
+
+ @param file_: the file to generate the hash for
+ @param hash_: the hash algorythm to use
+ @param verbose: boolean
+ @returns the hash result
+ '''
+ _hash = self.hash_map[hash_type]
+ args = [_hash.cmd]
+ args.extend(_hash.args)
+ args.append(file_)
+ #print("DEBUG: calc_hash2; args =", args)
+ source = Popen(args, stdout=PIPE)
+ output = source.communicate()
+ lines = output[0].split('\n')
+ #print("DEBUG: calc_hash2; output =", output)
+ header = lines[0]
+ h_f = lines[1].split()
+ hash_result = h_f[0]
+ short_file = os.path.split(h_f[1])[1]
+ result = header + "\n" + hash_result + " " + short_file + "\n"
+ if verbose:
+ print header + " (%s) = %s" % (short_file, result)
+ return result
+
+
+
import catalyst.config
import catalyst.util
from catalyst.support import (required_build_targets,
- valid_build_targets, CatalystError, hash_map, find_binary, LockInUse)
+ valid_build_targets, CatalystError, find_binary, LockInUse)
+
+from hash_utils import HashMap, HASH_DEFINITIONS
+
conf_values={}
# import configuration file and import our main module using those settings
parse_config(myconfig)
- # Start checking that digests are valid now that the hash_map was imported
- # from catalyst.support
+ # initialze our hash and contents generators
+ hash_map = HashMap(HASH_DEFINITIONS)
+ conf_values["hash_map"] = hash_map
+
+ # Start checking that digests are valid now that hash_map is initialized
if "digests" in conf_values:
for i in conf_values["digests"].split():
- if i not in hash_map:
+ if i not in HASH_DEFINITIONS:
print
print i+" is not a valid digest entry"
print "Valid digest entries:"
- print hash_map.keys()
+ print HASH_DEFINITIONS.keys()
print
print "Catalyst aborting...."
sys.exit(2)
- if find_binary(hash_map[i][1]) == None:
+ if find_binary(hash_map.hash_map[i].cmd) == None:
print
- print "digest="+i
- print "\tThe "+hash_map[i][1]+\
+ print "digest=" + i
+ print "\tThe " + hash_map.hash_map[i].cmd + \
" binary was not found. It needs to be in your system path"
print
print "Catalyst aborting...."
sys.exit(2)
if "hash_function" in conf_values:
- if conf_values["hash_function"] not in hash_map:
+ if conf_values["hash_function"] not in HASH_DEFINITIONS:
print
print conf_values["hash_function"]+\
" is not a valid hash_function entry"
print "Valid hash_function entries:"
- print hash_map.keys()
+ print HASH_DEFINITIONS.keys()
print
print "Catalyst aborting...."
sys.exit(2)
- if find_binary(hash_map[conf_values["hash_function"]][1]) == None:
+ if find_binary(hash_map.hash_map[conf_values["hash_function"]].cmd) == None:
print
print "hash_function="+conf_values["hash_function"]
- print "\tThe "+hash_map[conf_values["hash_function"]][1]+\
+ print "\tThe "+hash_map.hash_map[conf_values["hash_function"]].cmd + \
" binary was not found. It needs to be in your system path"
print
print "Catalyst aborting...."
"isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
}
-def generate_hash(file,hash_function="crc32",verbose=False):
- try:
- return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
- hash_map[hash_function][3],verbose)
- except:
- raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
-
-def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
- a=os.popen(cmd+" "+cmd_args+" "+file)
- mylines=a.readlines()
- a.close()
- mylines=mylines[0].split()
- result=mylines[0]
- if verbose:
- print id_string+" (%s) = %s" % (file, result)
- return result
-
-def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
- a=os.popen(cmd+" "+cmd_args+" "+file)
- header=a.readline()
- mylines=a.readline().split()
- hash=mylines[0]
- short_file=os.path.split(mylines[1])[1]
- a.close()
- result=header+hash+" "+short_file+"\n"
- if verbose:
- print header+" (%s) = %s" % (short_file, result)
- return result
-
-# This has map must be defined after the function calc_hash
-# It is possible to call different functions from this but they must be defined
-# before hash_map
-# Key,function,cmd,cmd_args,Print string
-hash_map={
- "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
- "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
- "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
- "gost":[calc_hash2,"shash","-a GOST","GOST"],\
- "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
- "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
- "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
- "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
- "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
- "md2":[calc_hash2,"shash","-a MD2","MD2"],\
- "md4":[calc_hash2,"shash","-a MD4","MD4"],\
- "md5":[calc_hash2,"shash","-a MD5","MD5"],\
- "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
- "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
- "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
- "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
- "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
- "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
- "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
- "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
- "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
- "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
- "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
- "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
- "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
- "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
- "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
- }
def read_from_clst(file):
line = ''
if os.path.isfile(self.settings["source_path"]):
# XXX: Is this even necessary if the previous check passes?
if os.path.exists(self.settings["source_path"]):
- self.settings["source_path_hash"]=\
- generate_hash(self.settings["source_path"],\
- hash_function=self.settings["hash_function"],\
- verbose=False)
+ self.settings["source_path_hash"] = \
+ self.settings["hash_map"].generate_hash(
+ self.settings["source_path"],
+ hash_ = self.settings["hash_function"],
+ verbose = False)
print "Source path set to "+self.settings["source_path"]
if os.path.isdir(self.settings["source_path"]):
print "\tIf this is not desired, remove this directory or turn off"
self.settings["snapshot"] + ".tar.xz")
if os.path.exists(self.settings["snapshot_path"]):
- self.settings["snapshot_path_hash"]=\
- generate_hash(self.settings["snapshot_path"],\
- hash_function=self.settings["hash_function"],verbose=False)
+ self.settings["snapshot_path_hash"] = \
+ self.settings["hash_map"].generate_hash(
+ self.settings["snapshot_path"],
+ hash_ = self.settings["hash_function"],
+ verbose = False)
else:
self.settings["snapshot_path"]=normpath(self.settings["storedir"]+\
"/snapshots/" + self.settings["snapshot_name"] +
self.settings["snapshot"] + ".tar.bz2")
if os.path.exists(self.settings["snapshot_path"]):
- self.settings["snapshot_path_hash"]=\
- generate_hash(self.settings["snapshot_path"],\
- hash_function=self.settings["hash_function"],verbose=False)
+ self.settings["snapshot_path_hash"] = \
+ self.settings["hash_map"].generate_hash(
+ self.settings["snapshot_path"],
+ hash_ = self.settings["hash_function"],
+ verbose = False)
def set_snapcache_path(self):
if "SNAPCACHE" in self.settings:
if os.path.exists(file+".DIGESTS"):
os.remove(file+".DIGESTS")
if "digests" in self.settings:
+ hash_map = self.settings["hash_map"]
if os.path.exists(file):
myf=open(file+".DIGESTS","w")
keys={}
for f in [file, file+'.CONTENTS']:
if os.path.exists(f):
if "all" in array:
- for k in hash_map.keys():
- hash=generate_hash(f,hash_function=k,verbose=\
- "VERBOSE" in self.settings)
+ for k in list(hash_map.hash_map):
+ hash = hash_map.generate_hash(f, hash_ = k,
+ verbose = "VERBOSE" in self.settings)
myf.write(hash)
else:
for j in array:
- hash=generate_hash(f,hash_function=j,verbose=\
- "VERBOSE" in self.settings)
+ hash = hash_map.generate_hash(f, hash_ = j,
+ verbose = "VERBOSE" in self.settings)
myf.write(hash)
myf.close()
def set_source_path(self):
self.settings["source_path"]=normpath(self.settings["storedir"]+"/builds/"+self.settings["source_subpath"]+".tar.bz2")
if os.path.isfile(self.settings["source_path"]):
- self.settings["source_path_hash"]=generate_hash(self.settings["source_path"])
+ self.settings["source_path_hash"] = \
+ self.settings["hash_map"].generate_hash(
+ self.settings["source_path"])
else:
self.settings["source_path"]=normpath(self.settings["storedir"]+"/tmp/"+self.settings["source_subpath"]+"/")
if not os.path.exists(self.settings["source_path"]):
if os.path.isfile(self.settings["source_path"]):
if os.path.exists(self.settings["source_path"]):
# XXX: Is this even necessary if the previous check passes?
- self.settings["source_path_hash"]=generate_hash(self.settings["source_path"],\
- hash_function=self.settings["hash_function"],verbose=False)
+ self.settings["source_path_hash"] = \
+ self.settings["hash_map"].generate_hash(
+ self.settings["source_path"],\
+ hash_=self.settings["hash_function"],
+ verbose=False)
print "Source path set to "+self.settings["source_path"]
if os.path.isdir(self.settings["source_path"]):
print "\tIf this is not desired, remove this directory or turn of seedcache in the options of catalyst.conf"