2 import sys,string,os,types,re,signal,traceback,time
4 selinux_capable = False
5 #userpriv_capable = (os.getuid() == 0)
6 #fakeroot_capable = False
7 BASH_BINARY = "/bin/bash"
11 max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
15 # hokay, no resource module.
18 # pids this process knows of.
26 def cleanup(pids,block_exceptions=True):
27 """function to go through and reap the list of pids passed to it"""
33 os.kill(x,signal.SIGTERM)
34 if os.waitpid(x,os.WNOHANG)[1] == 0:
35 # feisty bugger, still alive.
36 os.kill(x,signal.SIGKILL)
42 if oe.errno not in (10,3):
49 try: spawned_pids.remove(x)
50 except IndexError: pass
54 # a function to turn a string of non-printable characters into a string of
57 hexStr = string.hexdigits
61 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
65 def generate_contents(file,contents_function="auto",verbose=False):
68 if _ == 'auto' and file.endswith('.iso'):
70 if (_ in ['tar-tv','auto']):
71 if file.endswith('.tgz') or file.endswith('.tar.gz'):
73 elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
75 elif file.endswith('.tar'):
79 warn('File %r has unknown type for automatic detection.' % (file, ))
83 _ = contents_map[contents_function]
84 return _[0](file,_[1],verbose)
87 "Error generating contents, is appropriate utility (%s) installed on your system?" \
88 % (contents_function, )
90 def calc_contents(file,cmd,verbose):
96 result="".join(mylines)
101 # This has map must be defined after the function calc_content
102 # It is possible to call different functions from this but they must be defined
106 # 'find' is disabled because it requires the source path, which is not
108 #"find" :[calc_contents,"find %(path)s"],
109 "tar-tv":[calc_contents,"tar tvf %(file)s"],
110 "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
111 "tar-tvj":[calc_contents,"tar tvjf %(file)s"],
112 "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
113 # isoinfo-f should be a last resort only
114 "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
117 def generate_hash(file,hash_function="crc32",verbose=False):
119 return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
120 hash_map[hash_function][3],verbose)
122 raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
124 def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
125 a=os.popen(cmd+" "+cmd_args+" "+file)
126 mylines=a.readlines()
128 mylines=mylines[0].split()
131 print id_string+" (%s) = %s" % (file, result)
134 def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
135 a=os.popen(cmd+" "+cmd_args+" "+file)
137 mylines=a.readline().split()
139 short_file=os.path.split(mylines[1])[1]
141 result=header+hash+" "+short_file+"\n"
143 print header+" (%s) = %s" % (short_file, result)
146 # This has map must be defined after the function calc_hash
147 # It is possible to call different functions from this but they must be defined
149 # Key,function,cmd,cmd_args,Print string
151 "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
152 "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
153 "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
154 "gost":[calc_hash2,"shash","-a GOST","GOST"],\
155 "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
156 "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
157 "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
158 "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
159 "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
160 "md2":[calc_hash2,"shash","-a MD2","MD2"],\
161 "md4":[calc_hash2,"shash","-a MD4","MD4"],\
162 "md5":[calc_hash2,"shash","-a MD5","MD5"],\
163 "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
164 "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
165 "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
166 "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
167 "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
168 "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
169 "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
170 "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
171 "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
172 "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
173 "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
174 "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
175 "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
176 "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
177 "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
180 def read_from_clst(file):
187 #raise CatalystError, "Could not open file "+file
188 for line in myf.readlines():
189 #line = string.replace(line, "\n", "") # drop newline
190 myline = myline + line
195 # these should never be touched
196 required_build_targets=["generic_target","generic_stage_target"]
198 # new build types should be added here
199 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
200 "livecd_stage1_target","livecd_stage2_target","embedded_target",
201 "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
203 required_config_file_values=["storedir","sharedir","distdir","portdir"]
204 valid_config_file_values=required_config_file_values[:]
205 valid_config_file_values.append("PKGCACHE")
206 valid_config_file_values.append("KERNCACHE")
207 valid_config_file_values.append("CCACHE")
208 valid_config_file_values.append("DISTCC")
209 valid_config_file_values.append("ICECREAM")
210 valid_config_file_values.append("ENVSCRIPT")
211 valid_config_file_values.append("AUTORESUME")
212 valid_config_file_values.append("FETCH")
213 valid_config_file_values.append("CLEAR_AUTORESUME")
214 valid_config_file_values.append("options")
215 valid_config_file_values.append("DEBUG")
216 valid_config_file_values.append("VERBOSE")
217 valid_config_file_values.append("PURGE")
218 valid_config_file_values.append("PURGEONLY")
219 valid_config_file_values.append("SNAPCACHE")
220 valid_config_file_values.append("snapshot_cache")
221 valid_config_file_values.append("hash_function")
222 valid_config_file_values.append("digests")
223 valid_config_file_values.append("contents")
224 valid_config_file_values.append("SEEDCACHE")
228 def list_bashify(mylist):
229 if type(mylist)==types.StringType:
233 for x in range(0,len(mypack)):
234 # surround args with quotes for passing to bash,
235 # allows things like "<" to remain intact
236 mypack[x]="'"+mypack[x]+"'"
237 mypack=string.join(mypack)
240 def list_to_string(mylist):
241 if type(mylist)==types.StringType:
245 for x in range(0,len(mypack)):
246 # surround args with quotes for passing to bash,
247 # allows things like "<" to remain intact
249 mypack=string.join(mypack)
252 class CatalystError(Exception):
253 def __init__(self, message):
255 (type,value)=sys.exc_info()[:2]
258 print traceback.print_exc(file=sys.stdout)
260 print "!!! catalyst: "+message
263 class LockInUse(Exception):
264 def __init__(self, message):
266 #(type,value)=sys.exc_info()[:2]
269 #kprint traceback.print_exc(file=sys.stdout)
271 print "!!! catalyst lock file in use: "+message
279 print "!!! catalyst: "+msg
282 def find_binary(myc):
283 """look through the environmental path for an executable file named whatever myc is"""
288 for x in p.split(":"):
289 #if it exists, and is executable
290 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
291 return "%s/%s" % (x,myc)
295 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
296 """spawn mycommand as an arguement to bash"""
299 opt_name=mycommand.split()[0]
300 if not env.has_key("BASH_ENV"):
301 env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
305 args.append(mycommand)
306 return spawn(args,env=env,opt_name=opt_name,**keywords)
308 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
309 # collect_fds=[1],fd_pipes=None,**keywords):
310 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
311 collect_fds=[1],fd_pipes=None,**keywords):
312 """call spawn, collecting the output to fd's specified in collect_fds list
313 emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
314 requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
315 'lets let log only stdin and let stderr slide by'.
317 emulate_gso was deprecated from the day it was added, so convert your code over.
318 spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
319 global selinux_capable
322 #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
323 # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
330 for x in collect_fds:
332 keywords["returnpid"]=True
334 mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
336 if type(mypid) != types.ListType:
338 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
341 mydata=fd.readlines()
344 mydata=string.join(mydata)
345 if len(mydata) and mydata[-1] == "\n":
347 retval=os.waitpid(mypid[0],0)[1]
350 return [retval,mydata]
351 retval=process_exit_code(retval)
352 return [retval, mydata]
355 # base spawn function
356 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
357 uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
358 selinux_context=None, raise_signals=False, func_call=False):
359 """base fork/execve function.
360 mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
361 environment, use the appropriate spawn call. This is a straight fork/exec code path.
362 Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
363 the forked process to said value. If path_lookup is on, a non-absolute command will be converted
364 to an absolute command, otherwise it returns None.
366 selinux_context is the desired context, dependant on selinux being available.
367 opt_name controls the name the processor goes by.
368 fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
369 current fd's raw fd #, desired #.
371 func_call is a boolean for specifying to execute a python function- use spawn_func instead.
372 raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
375 logfile overloads the specified fd's to write to a tee process which logs to logfile
376 returnpid returns the relevant pids (a list, including the logging process if logfile is on).
378 non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
379 raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
384 if type(mycommand)==types.StringType:
385 mycommand=mycommand.split()
387 if not os.access(myc, os.X_OK):
390 myc = find_binary(myc)
396 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
397 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
402 return process_exit_code(retval)
411 opt_name = mycommand[0]
413 myargs.extend(mycommand[1:])
415 mypid.append(os.fork())
418 spawned_pids.extend(mypid)
424 # this may look ugly, but basically it moves file descriptors around to ensure no
425 # handles that are needed are accidentally closed during the final dup2 calls.
427 if type(fd_pipes)==types.DictType:
432 #build list of which fds will be where, and where they are at currently
435 src_fd.append(fd_pipes[x])
437 # run through said list dup'ing descriptors so that they won't be waxed
438 # by other dup calls.
439 for x in range(0,len(trg_fd)):
440 if trg_fd[x] == src_fd[x]:
442 if trg_fd[x] in src_fd[x+1:]:
443 new=os.dup2(trg_fd[x],max(src_fd) + 1)
447 src_fd[s.index(trg_fd[x])]=new
448 except SystemExit, e:
453 # transfer the fds to their final pre-exec position.
454 for x in range(0,len(trg_fd)):
455 if trg_fd[x] != src_fd[x]:
456 os.dup2(src_fd[x], trg_fd[x])
460 # wax all open descriptors that weren't requested be left open.
461 for x in range(0,max_fd_limit):
465 except SystemExit, e:
470 # note this order must be preserved- can't change gid/groups if you change uid first.
471 if selinux_capable and selinux_context:
473 selinux.setexec(selinux_context)
486 #print "execing", myc, myargs
488 # either use a passed in func for interpretting the results, or return if no exception.
489 # note the passed in list, and dict are expanded.
490 if len(mycommand) == 4:
491 os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
493 mycommand[0](*mycommand[1],**mycommand[2])
495 print "caught exception",e," in forked func",mycommand[0]
498 #os.execvp(myc,myargs)
499 os.execve(myc,myargs,env)
500 except SystemExit, e:
504 raise str(e)+":\n "+myc+" "+string.join(myargs)
505 print "func call failed"
507 # If the execve fails, we need to report it, and exit
508 # *carefully* --- report error here
511 return # should never get reached
513 # if we were logging, kill the pipes.
521 # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
522 # if the main pid (mycommand) returned badly.
524 retval=os.waitpid(mypid[-1],0)[1]
526 cleanup(mypid[0:-1],block_exceptions=False)
527 # at this point we've killed all other kid pids generated via this call.
531 return process_exit_code(retval,throw_signals=raise_signals)
537 def cmd(mycmd,myexc="",env={}):
540 retval=spawn_bash(mycmd,env)
542 raise CatalystError,myexc
546 def process_exit_code(retval,throw_signals=False):
547 """process a waitpid returned exit code, returning exit code if it exit'd, or the
548 signal if it died from signalling
549 if throw_signals is on, it raises a SystemExit if the process was signaled.
550 This is intended for usage with threads, although at the moment you can't signal individual
551 threads in python, only the master thread, so it's a questionable option."""
552 if (retval & 0xff)==0:
553 return retval >> 8 # return exit code
556 #use systemexit, since portage is stupid about exception catching.
558 return (retval & 0xff) << 8 # interrupted by signal
561 def file_locate(settings,filelist,expand=1):
562 #if expand=1, non-absolute paths will be accepted and
563 # expanded to os.getcwd()+"/"+localpath if file exists
564 for myfile in filelist:
565 if not settings.has_key(myfile):
566 #filenames such as cdtar are optional, so we don't assume the variable is defined.
569 if len(settings[myfile])==0:
570 raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
571 if settings[myfile][0]=="/":
572 if not os.path.exists(settings[myfile]):
573 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
574 elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
575 settings[myfile]=os.getcwd()+"/"+settings[myfile]
577 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
581 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
591 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
592 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
593 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
594 that the order of multiple-value items is preserved, but the order that the items themselves are
595 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
596 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
599 def parse_makeconf(mylines):
602 pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
603 while pos<len(mylines):
604 if len(mylines[pos])<=1:
608 if mylines[pos][0] in ["#"," ","\t"]:
609 #skip indented lines, comments
614 mobj=pat.match(myline)
617 clean_string = re.sub(r"\"",r"",mobj.group(2))
618 mymakeconf[mobj.group(1)]=clean_string
621 def read_makeconf(mymakeconffile):
622 if os.path.exists(mymakeconffile):
625 import snakeoil.fileutils
626 return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
630 return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
634 return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
636 myf=open(mymakeconffile,"r")
637 mylines=myf.readlines()
639 return parse_makeconf(mylines)
641 raise CatalystError, "Could not parse make.conf file "+mymakeconffile
646 def msg(mymsg,verblevel=1):
647 if verbosity>=verblevel:
650 def pathcompare(path1,path2):
651 # Change double slashes to slash
652 path1 = re.sub(r"//",r"/",path1)
653 path2 = re.sub(r"//",r"/",path2)
654 # Removing ending slash
655 path1 = re.sub("/$","",path1)
656 path2 = re.sub("/$","",path2)
663 "enhanced to handle bind mounts"
664 if os.path.ismount(path):
667 mylines=a.readlines()
671 if pathcompare(path,mysplit[2]):
675 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
676 "helper function to help targets parse additional arguments"
677 global valid_config_file_values
680 for x in addlargs.keys():
681 if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
682 messages.append("Argument \""+x+"\" not recognized.")
684 myspec[x]=addlargs[x]
686 for x in requiredspec:
687 if not myspec.has_key(x):
688 messages.append("Required argument \""+x+"\" not specified.")
691 raise CatalystError, '\n\tAlso: '.join(messages)
698 raise CatalystError, "Could not touch "+myfile+"."
700 def countdown(secs=5, doing="Starting"):
702 print ">>> Waiting",secs,"seconds before starting..."
703 print ">>> (Control-C to abort)...\n"+doing+" in: ",
707 sys.stdout.write(str(sec+1)+" ")
712 def normpath(mypath):
714 if mypath[-1] == "/":
716 newpath = os.path.normpath(mypath)
718 if newpath[:2] == "//":
719 newpath = newpath[1:]