2 import sys,string,os,types,re,signal,traceback,time
4 selinux_capable = False
5 #userpriv_capable = (os.getuid() == 0)
6 #fakeroot_capable = False
7 BASH_BINARY = "/bin/bash"
11 max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
15 # hokay, no resource module.
18 # pids this process knows of.
26 def cleanup(pids,block_exceptions=True):
27 """function to go through and reap the list of pids passed to it"""
33 os.kill(x,signal.SIGTERM)
34 if os.waitpid(x,os.WNOHANG)[1] == 0:
35 # feisty bugger, still alive.
36 os.kill(x,signal.SIGKILL)
42 if oe.errno not in (10,3):
49 try: spawned_pids.remove(x)
50 except IndexError: pass
54 # a function to turn a string of non-printable characters into a string of
57 hexStr = string.hexdigits
61 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
65 def generate_contents(file,contents_function="auto",verbose=False):
68 if _ == 'auto' and file.endswith('.iso'):
70 if (_ in ['tar-tv','auto']):
71 if file.endswith('.tgz') or file.endswith('.tar.gz'):
73 elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
75 elif file.endswith('.tar'):
79 warn('File %r has unknown type for automatic detection.' % (file, ))
83 _ = contents_map[contents_function]
84 return _[0](file,_[1],verbose)
87 "Error generating contents, is appropriate utility (%s) installed on your system?" \
88 % (contents_function, )
90 def calc_contents(file,cmd,verbose):
96 result="".join(mylines)
101 # This has map must be defined after the function calc_content
102 # It is possible to call different functions from this but they must be defined
106 # 'find' is disabled because it requires the source path, which is not
108 #"find" :[calc_contents,"find %(path)s"],
109 "tar-tv":[calc_contents,"tar tvf %(file)s"],
110 "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
111 "tar-tvj":[calc_contents,"tar tvjf %(file)s"],
112 "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
113 # isoinfo-f should be a last resort only
114 "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
117 def generate_hash(file,hash_function="crc32",verbose=False):
119 return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
120 hash_map[hash_function][3],verbose)
122 raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
124 def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
125 a=os.popen(cmd+" "+cmd_args+" "+file)
126 mylines=a.readlines()
128 mylines=mylines[0].split()
131 print id_string+" (%s) = %s" % (file, result)
134 def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
135 a=os.popen(cmd+" "+cmd_args+" "+file)
137 mylines=a.readline().split()
139 short_file=os.path.split(mylines[1])[1]
141 result=header+hash+" "+short_file+"\n"
143 print header+" (%s) = %s" % (short_file, result)
146 # This has map must be defined after the function calc_hash
147 # It is possible to call different functions from this but they must be defined
149 # Key,function,cmd,cmd_args,Print string
151 "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
152 "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
153 "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
154 "gost":[calc_hash2,"shash","-a GOST","GOST"],\
155 "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
156 "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
157 "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
158 "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
159 "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
160 "md2":[calc_hash2,"shash","-a MD2","MD2"],\
161 "md4":[calc_hash2,"shash","-a MD4","MD4"],\
162 "md5":[calc_hash2,"shash","-a MD5","MD5"],\
163 "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
164 "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
165 "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
166 "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
167 "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
168 "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
169 "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
170 "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
171 "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
172 "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
173 "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
174 "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
175 "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
176 "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
177 "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
180 def read_from_clst(file):
187 #raise CatalystError, "Could not open file "+file
188 for line in myf.readlines():
189 #line = string.replace(line, "\n", "") # drop newline
190 myline = myline + line
195 # these should never be touched
196 required_build_targets=["generic_target","generic_stage_target"]
198 # new build types should be added here
199 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
200 "livecd_stage1_target","livecd_stage2_target","embedded_target",
201 "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
203 required_config_file_values=["storedir","sharedir","distdir","portdir"]
204 valid_config_file_values=required_config_file_values[:]
205 valid_config_file_values.append("PKGCACHE")
206 valid_config_file_values.append("KERNCACHE")
207 valid_config_file_values.append("CCACHE")
208 valid_config_file_values.append("DISTCC")
209 valid_config_file_values.append("ICECREAM")
210 valid_config_file_values.append("ENVSCRIPT")
211 valid_config_file_values.append("AUTORESUME")
212 valid_config_file_values.append("FETCH")
213 valid_config_file_values.append("CLEAR_AUTORESUME")
214 valid_config_file_values.append("options")
215 valid_config_file_values.append("DEBUG")
216 valid_config_file_values.append("VERBOSE")
217 valid_config_file_values.append("PURGE")
218 valid_config_file_values.append("SNAPCACHE")
219 valid_config_file_values.append("snapshot_cache")
220 valid_config_file_values.append("hash_function")
221 valid_config_file_values.append("digests")
222 valid_config_file_values.append("contents")
223 valid_config_file_values.append("SEEDCACHE")
227 def list_bashify(mylist):
228 if type(mylist)==types.StringType:
232 for x in range(0,len(mypack)):
233 # surround args with quotes for passing to bash,
234 # allows things like "<" to remain intact
235 mypack[x]="'"+mypack[x]+"'"
236 mypack=string.join(mypack)
239 def list_to_string(mylist):
240 if type(mylist)==types.StringType:
244 for x in range(0,len(mypack)):
245 # surround args with quotes for passing to bash,
246 # allows things like "<" to remain intact
248 mypack=string.join(mypack)
251 class CatalystError(Exception):
252 def __init__(self, message):
254 (type,value)=sys.exc_info()[:2]
257 print traceback.print_exc(file=sys.stdout)
259 print "!!! catalyst: "+message
262 class LockInUse(Exception):
263 def __init__(self, message):
265 #(type,value)=sys.exc_info()[:2]
268 #kprint traceback.print_exc(file=sys.stdout)
270 print "!!! catalyst lock file in use: "+message
278 print "!!! catalyst: "+msg
281 def find_binary(myc):
282 """look through the environmental path for an executable file named whatever myc is"""
287 for x in p.split(":"):
288 #if it exists, and is executable
289 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
290 return "%s/%s" % (x,myc)
294 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
295 """spawn mycommand as an arguement to bash"""
298 opt_name=mycommand.split()[0]
299 if not env.has_key("BASH_ENV"):
300 env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
304 args.append(mycommand)
305 return spawn(args,env=env,opt_name=opt_name,**keywords)
307 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
308 # collect_fds=[1],fd_pipes=None,**keywords):
309 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
310 collect_fds=[1],fd_pipes=None,**keywords):
311 """call spawn, collecting the output to fd's specified in collect_fds list
312 emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
313 requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
314 'lets let log only stdin and let stderr slide by'.
316 emulate_gso was deprecated from the day it was added, so convert your code over.
317 spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
318 global selinux_capable
321 #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
322 # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
329 for x in collect_fds:
331 keywords["returnpid"]=True
333 mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
335 if type(mypid) != types.ListType:
337 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
340 mydata=fd.readlines()
343 mydata=string.join(mydata)
344 if len(mydata) and mydata[-1] == "\n":
346 retval=os.waitpid(mypid[0],0)[1]
349 return [retval,mydata]
350 retval=process_exit_code(retval)
351 return [retval, mydata]
354 # base spawn function
355 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
356 uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
357 selinux_context=None, raise_signals=False, func_call=False):
358 """base fork/execve function.
359 mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
360 environment, use the appropriate spawn call. This is a straight fork/exec code path.
361 Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
362 the forked process to said value. If path_lookup is on, a non-absolute command will be converted
363 to an absolute command, otherwise it returns None.
365 selinux_context is the desired context, dependant on selinux being available.
366 opt_name controls the name the processor goes by.
367 fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
368 current fd's raw fd #, desired #.
370 func_call is a boolean for specifying to execute a python function- use spawn_func instead.
371 raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
374 logfile overloads the specified fd's to write to a tee process which logs to logfile
375 returnpid returns the relevant pids (a list, including the logging process if logfile is on).
377 non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
378 raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
383 if type(mycommand)==types.StringType:
384 mycommand=mycommand.split()
386 if not os.access(myc, os.X_OK):
389 myc = find_binary(myc)
395 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
396 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
401 return process_exit_code(retval)
410 opt_name = mycommand[0]
412 myargs.extend(mycommand[1:])
414 mypid.append(os.fork())
417 spawned_pids.extend(mypid)
423 # this may look ugly, but basically it moves file descriptors around to ensure no
424 # handles that are needed are accidentally closed during the final dup2 calls.
426 if type(fd_pipes)==types.DictType:
431 #build list of which fds will be where, and where they are at currently
434 src_fd.append(fd_pipes[x])
436 # run through said list dup'ing descriptors so that they won't be waxed
437 # by other dup calls.
438 for x in range(0,len(trg_fd)):
439 if trg_fd[x] == src_fd[x]:
441 if trg_fd[x] in src_fd[x+1:]:
442 new=os.dup2(trg_fd[x],max(src_fd) + 1)
446 src_fd[s.index(trg_fd[x])]=new
447 except SystemExit, e:
452 # transfer the fds to their final pre-exec position.
453 for x in range(0,len(trg_fd)):
454 if trg_fd[x] != src_fd[x]:
455 os.dup2(src_fd[x], trg_fd[x])
459 # wax all open descriptors that weren't requested be left open.
460 for x in range(0,max_fd_limit):
464 except SystemExit, e:
469 # note this order must be preserved- can't change gid/groups if you change uid first.
470 if selinux_capable and selinux_context:
472 selinux.setexec(selinux_context)
483 #print "execing", myc, myargs
485 # either use a passed in func for interpretting the results, or return if no exception.
486 # note the passed in list, and dict are expanded.
487 if len(mycommand) == 4:
488 os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
490 mycommand[0](*mycommand[1],**mycommand[2])
492 print "caught exception",e," in forked func",mycommand[0]
495 #os.execvp(myc,myargs)
496 os.execve(myc,myargs,env)
497 except SystemExit, e:
501 raise str(e)+":\n "+myc+" "+string.join(myargs)
502 print "func call failed"
504 # If the execve fails, we need to report it, and exit
505 # *carefully* --- report error here
508 return # should never get reached
510 # if we were logging, kill the pipes.
518 # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
519 # if the main pid (mycommand) returned badly.
521 retval=os.waitpid(mypid[-1],0)[1]
523 cleanup(mypid[0:-1],block_exceptions=False)
524 # at this point we've killed all other kid pids generated via this call.
528 return process_exit_code(retval,throw_signals=raise_signals)
534 def cmd(mycmd,myexc="",env={}):
537 retval=spawn_bash(mycmd,env)
539 raise CatalystError,myexc
543 def process_exit_code(retval,throw_signals=False):
544 """process a waitpid returned exit code, returning exit code if it exit'd, or the
545 signal if it died from signalling
546 if throw_signals is on, it raises a SystemExit if the process was signaled.
547 This is intended for usage with threads, although at the moment you can't signal individual
548 threads in python, only the master thread, so it's a questionable option."""
549 if (retval & 0xff)==0:
550 return retval >> 8 # return exit code
553 #use systemexit, since portage is stupid about exception catching.
555 return (retval & 0xff) << 8 # interrupted by signal
558 def file_locate(settings,filelist,expand=1):
559 #if expand=1, non-absolute paths will be accepted and
560 # expanded to os.getcwd()+"/"+localpath if file exists
561 for myfile in filelist:
562 if not settings.has_key(myfile):
563 #filenames such as cdtar are optional, so we don't assume the variable is defined.
566 if len(settings[myfile])==0:
567 raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
568 if settings[myfile][0]=="/":
569 if not os.path.exists(settings[myfile]):
570 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
571 elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
572 settings[myfile]=os.getcwd()+"/"+settings[myfile]
574 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
578 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
588 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
589 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
590 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
591 that the order of multiple-value items is preserved, but the order that the items themselves are
592 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
593 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
596 def parse_spec(mylines):
599 trailing_comment=re.compile("#.*$")
600 white_space=re.compile("\s+")
602 myline = mylines.pop(0).strip()
604 # Force the line to be clean
605 # Remove Comments ( anything following # )
606 myline = trailing_comment.sub("", myline)
608 # Skip any blank lines
609 if not myline: continue
612 msearch = myline.find(':')
614 # If semicolon found assume its a new key
615 # This may cause problems if : are used for key values but works for now
617 # Split on the first semicolon creating two strings in the array mobjs
618 mobjs = myline.split(':', 1)
619 mobjs[1] = mobjs[1].strip()
621 # Check that this key doesn't exist already in the spec
622 if myspec.has_key(mobjs[0]):
623 raise Exception("You have a duplicate key (" + mobjs[0] + ") in your spec. Please fix it")
625 # Start a new array using the first element of mobjs
626 cur_array = [mobjs[0]]
628 # split on white space creating additional array elements
629 subarray = white_space.split(mobjs[1])
632 # Store as a string if only one element is found.
633 # this is to keep with original catalyst behavior
634 # eventually this may go away if catalyst just works
636 cur_array.append(subarray[0])
638 cur_array += subarray
640 # Else add on to the last key we were working on
642 mobjs = white_space.split(myline)
645 # XXX: Do we really still need this "single value is a string" behavior?
646 if len(cur_array) == 2:
647 myspec[cur_array[0]] = cur_array[1]
649 myspec[cur_array[0]] = cur_array[1:]
651 for x in myspec.keys():
652 # Delete empty key pairs
654 print "\n\tWARNING: No value set for key " + x + "...deleting"
658 def parse_makeconf(mylines):
661 pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
662 while pos<len(mylines):
663 if len(mylines[pos])<=1:
667 if mylines[pos][0] in ["#"," ","\t"]:
668 #skip indented lines, comments
673 mobj=pat.match(myline)
676 clean_string = re.sub(r"\"",r"",mobj.group(2))
677 mymakeconf[mobj.group(1)]=clean_string
680 def read_spec(myspecfile):
682 myf=open(myspecfile,"r")
684 raise CatalystError, "Could not open spec file "+myspecfile
685 mylines=myf.readlines()
687 return parse_spec(mylines)
689 def read_makeconf(mymakeconffile):
690 if os.path.exists(mymakeconffile):
693 import snakeoil.fileutils
694 return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
698 return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
700 myf=open(mymakeconffile,"r")
701 mylines=myf.readlines()
703 return parse_makeconf(mylines)
705 raise CatalystError, "Could not parse make.conf file "+mymakeconffile
710 def msg(mymsg,verblevel=1):
711 if verbosity>=verblevel:
714 def pathcompare(path1,path2):
715 # Change double slashes to slash
716 path1 = re.sub(r"//",r"/",path1)
717 path2 = re.sub(r"//",r"/",path2)
718 # Removing ending slash
719 path1 = re.sub("/$","",path1)
720 path2 = re.sub("/$","",path2)
727 "enhanced to handle bind mounts"
728 if os.path.ismount(path):
731 mylines=a.readlines()
735 if pathcompare(path,mysplit[2]):
739 def arg_parse(cmdline):
740 #global required_config_file_values
743 foo=string.split(x,"=",1)
745 raise CatalystError, "Invalid arg syntax: "+x
748 mydict[foo[0]]=foo[1]
750 # if all is well, we should return (we should have bailed before here if not)
753 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
754 "helper function to help targets parse additional arguments"
755 global valid_config_file_values
757 for x in addlargs.keys():
758 if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
759 raise CatalystError, "Argument \""+x+"\" not recognized."
761 myspec[x]=addlargs[x]
763 for x in requiredspec:
764 if not myspec.has_key(x):
765 raise CatalystError, "Required argument \""+x+"\" not specified."
767 def spec_dump(myspec):
768 for x in myspec.keys():
769 print x+": "+repr(myspec[x])
776 raise CatalystError, "Could not touch "+myfile+"."
778 def countdown(secs=5, doing="Starting"):
780 print ">>> Waiting",secs,"seconds before starting..."
781 print ">>> (Control-C to abort)...\n"+doing+" in: ",
785 sys.stdout.write(str(sec+1)+" ")
790 def normpath(mypath):
792 if mypath[-1] == "/":
794 newpath = os.path.normpath(mypath)
796 if newpath[:2] == "//":
797 newpath = newpath[1:]