1 # Copyright 1999-2005 Gentoo Foundation
2 # Distributed under the terms of the GNU General Public License v2
3 # $Header: /var/cvsroot/gentoo/src/catalyst/modules/catalyst_support.py,v 1.61 2005/12/02 19:37:02 wolf31o2 Exp $
5 import sys,string,os,types,re,signal,traceback,md5,sha,time
6 selinux_capable = False
7 #userpriv_capable = (os.getuid() == 0)
8 #fakeroot_capable = False
9 BASH_BINARY = "/bin/bash"
13 max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
17 # hokay, no resource module.
20 # pids this process knows of.
28 def cleanup(pids,block_exceptions=True):
29 """function to go through and reap the list of pids passed to it"""
35 os.kill(x,signal.SIGTERM)
36 if os.waitpid(x,os.WNOHANG)[1] == 0:
37 # feisty bugger, still alive.
38 os.kill(x,signal.SIGKILL)
44 if oe.errno not in (10,3):
51 try: spawned_pids.remove(x)
52 except IndexError: pass
56 # a function to turn a string of non-printable characters into a string of
59 hexStr = string.hexdigits
63 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
67 # A function to calculate the md5 sum of a file
68 def calc_md5(file,verbose=False):
71 for line in f.readlines():
74 md5sum = hexify(m.digest())
76 print "MD5 (%s) = %s" % (file, md5sum)
80 def calc_sha(file,verbose=False):
83 for line in f.readlines():
86 shaval = hexify(m.digest())
88 print "SHA (%s) = %s" % (file, shaval)
91 def read_from_clst(file):
98 #raise CatalystError, "Could not open file "+file
99 for line in myf.readlines():
100 line = string.replace(line, "\n", "") # drop newline
101 myline = myline + line
106 # these should never be touched
107 required_build_targets=["generic_target","generic_stage_target"]
109 # new build types should be added here
110 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
111 "livecd_stage1_target","livecd_stage2_target","embedded_target",
112 "tinderbox_target","snapshot_target","netboot_target"]
114 required_config_file_values=["storedir","sharedir","distdir","portdir"]
115 valid_config_file_values=required_config_file_values[:]
116 valid_config_file_values.append("PKGCACHE")
117 valid_config_file_values.append("KERNCACHE")
118 valid_config_file_values.append("CCACHE")
119 valid_config_file_values.append("DISTCC")
120 valid_config_file_values.append("ENVSCRIPT")
121 valid_config_file_values.append("AUTORESUME")
122 valid_config_file_values.append("SHA")
123 valid_config_file_values.append("MD5")
124 valid_config_file_values.append("FETCH")
125 valid_config_file_values.append("CLEAR_AUTORESUME")
126 valid_config_file_values.append("options")
127 valid_config_file_values.append("DEBUG")
128 valid_config_file_values.append("VERBOSE")
129 valid_config_file_values.append("PURGE")
130 valid_config_file_values.append("SNAPCACHE")
131 valid_config_file_values.append("snapshot_cache")
132 valid_config_file_values.append("SEEDCACHE")
136 def list_bashify(mylist):
137 if type(mylist)==types.StringType:
141 for x in range(0,len(mypack)):
142 # surround args with quotes for passing to bash,
143 # allows things like "<" to remain intact
144 mypack[x]="'"+mypack[x]+"'"
145 mypack=string.join(mypack)
148 def list_to_string(mylist):
149 if type(mylist)==types.StringType:
153 for x in range(0,len(mypack)):
154 # surround args with quotes for passing to bash,
155 # allows things like "<" to remain intact
157 mypack=string.join(mypack)
160 class CatalystError(Exception):
161 def __init__(self, message):
163 (type,value)=sys.exc_info()[:2]
166 print traceback.print_exc(file=sys.stdout)
168 print "!!! catalyst: "+message
171 class LockInUse(Exception):
172 def __init__(self, message):
174 #(type,value)=sys.exc_info()[:2]
177 #kprint traceback.print_exc(file=sys.stdout)
179 print "!!! catalyst lock file in use: "+message
187 print "!!! catalyst: "+msg
190 def find_binary(myc):
191 """look through the environmental path for an executable file named whatever myc is"""
196 for x in p.split(":"):
197 #if it exists, and is executable
198 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
199 return "%s/%s" % (x,myc)
203 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
204 """spawn mycommand as an arguement to bash"""
207 opt_name=mycommand.split()[0]
208 if not env.has_key("BASH_ENV"):
209 env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
213 args.append(mycommand)
214 return spawn(args,env=env,opt_name=opt_name,**keywords)
216 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
217 # collect_fds=[1],fd_pipes=None,**keywords):
218 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
219 collect_fds=[1],fd_pipes=None,**keywords):
220 """call spawn, collecting the output to fd's specified in collect_fds list
221 emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
222 requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
223 'lets let log only stdin and let stderr slide by'.
225 emulate_gso was deprecated from the day it was added, so convert your code over.
226 spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
227 global selinux_capable
230 #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
231 # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
238 for x in collect_fds:
240 keywords["returnpid"]=True
242 mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
244 if type(mypid) != types.ListType:
246 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
249 mydata=fd.readlines()
252 mydata=string.join(mydata)
253 if len(mydata) and mydata[-1] == "\n":
255 retval=os.waitpid(mypid[0],0)[1]
258 return [retval,mydata]
259 retval=process_exit_code(retval)
260 return [retval, mydata]
263 # base spawn function
264 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
265 uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
266 selinux_context=None, raise_signals=False, func_call=False):
267 """base fork/execve function.
268 mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
269 environment, use the appropriate spawn call. This is a straight fork/exec code path.
270 Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
271 the forked process to said value. If path_lookup is on, a non-absolute command will be converted
272 to an absolute command, otherwise it returns None.
274 selinux_context is the desired context, dependant on selinux being available.
275 opt_name controls the name the processor goes by.
276 fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
277 current fd's raw fd #, desired #.
279 func_call is a boolean for specifying to execute a python function- use spawn_func instead.
280 raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
283 logfile overloads the specified fd's to write to a tee process which logs to logfile
284 returnpid returns the relevant pids (a list, including the logging process if logfile is on).
286 non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
287 raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
292 if type(mycommand)==types.StringType:
293 mycommand=mycommand.split()
295 if not os.access(myc, os.X_OK):
298 myc = find_binary(myc)
304 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
305 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
310 return process_exit_code(retval)
319 opt_name = mycommand[0]
321 myargs.extend(mycommand[1:])
323 mypid.append(os.fork())
326 spawned_pids.extend(mypid)
332 # this may look ugly, but basically it moves file descriptors around to ensure no
333 # handles that are needed are accidentally closed during the final dup2 calls.
335 if type(fd_pipes)==types.DictType:
340 #build list of which fds will be where, and where they are at currently
343 src_fd.append(fd_pipes[x])
345 # run through said list dup'ing descriptors so that they won't be waxed
346 # by other dup calls.
347 for x in range(0,len(trg_fd)):
348 if trg_fd[x] == src_fd[x]:
350 if trg_fd[x] in src_fd[x+1:]:
351 new=os.dup2(trg_fd[x],max(src_fd) + 1)
355 src_fd[s.index(trg_fd[x])]=new
356 except SystemExit, e:
361 # transfer the fds to their final pre-exec position.
362 for x in range(0,len(trg_fd)):
363 if trg_fd[x] != src_fd[x]:
364 os.dup2(src_fd[x], trg_fd[x])
368 # wax all open descriptors that weren't requested be left open.
369 for x in range(0,max_fd_limit):
373 except SystemExit, e:
378 # note this order must be preserved- can't change gid/groups if you change uid first.
379 if selinux_capable and selinux_context:
381 selinux.setexec(selinux_context)
392 #print "execing", myc, myargs
394 # either use a passed in func for interpretting the results, or return if no exception.
395 # note the passed in list, and dict are expanded.
396 if len(mycommand) == 4:
397 os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
399 mycommand[0](*mycommand[1],**mycommand[2])
401 print "caught exception",e," in forked func",mycommand[0]
404 os.execvp(myc,myargs)
405 #os.execve(myc,myargs,env)
406 except SystemExit, e:
410 raise str(e)+":\n "+myc+" "+string.join(myargs)
411 print "func call failed"
413 # If the execve fails, we need to report it, and exit
414 # *carefully* --- report error here
417 return # should never get reached
419 # if we were logging, kill the pipes.
427 # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
428 # if the main pid (mycommand) returned badly.
430 retval=os.waitpid(mypid[-1],0)[1]
432 cleanup(mypid[0:-1],block_exceptions=False)
433 # at this point we've killed all other kid pids generated via this call.
437 return process_exit_code(retval,throw_signals=raise_signals)
443 def cmd(mycmd,myexc=""):
446 retval=spawn_bash(mycmd)
448 raise CatalystError,myexc
452 def process_exit_code(retval,throw_signals=False):
453 """process a waitpid returned exit code, returning exit code if it exit'd, or the
454 signal if it died from signalling
455 if throw_signals is on, it raises a SystemExit if the process was signaled.
456 This is intended for usage with threads, although at the moment you can't signal individual
457 threads in python, only the master thread, so it's a questionable option."""
458 if (retval & 0xff)==0:
459 return retval >> 8 # return exit code
462 #use systemexit, since portage is stupid about exception catching.
464 return (retval & 0xff) << 8 # interrupted by signal
467 def file_locate(settings,filelist,expand=1):
468 #if expand=1, non-absolute paths will be accepted and
469 # expanded to os.getcwd()+"/"+localpath if file exists
470 for myfile in filelist:
471 if not settings.has_key(myfile):
472 #filenames such as cdtar are optional, so we don't assume the variable is defined.
475 if len(settings[myfile])==0:
476 raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
477 if settings[myfile][0]=="/":
478 if not os.path.exists(settings[myfile]):
479 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
480 elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
481 settings[myfile]=os.getcwd()+"/"+settings[myfile]
483 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
487 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
497 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
498 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
499 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
500 that the order of multiple-value items is preserved, but the order that the items themselves are
501 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
502 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
505 def parse_spec(mylines):
508 colon=re.compile(":")
509 trailing_comment=re.compile("#.*\n")
510 newline=re.compile("\n")
511 leading_white_space=re.compile("^\s+")
512 white_space=re.compile("\s+")
513 while pos<len(mylines):
514 # Force the line to be clean
515 # Remove Comments ( anything following # )
516 mylines[pos]=trailing_comment.sub("",mylines[pos])
518 # Remove newline character \n
519 mylines[pos]=newline.sub("",mylines[pos])
521 # Remove leading white space
522 mylines[pos]=leading_white_space.sub("",mylines[pos])
524 # Skip any blank lines
525 if len(mylines[pos])<=1:
528 msearch=colon.search(mylines[pos])
530 # If semicolon found assume its a new key
531 # This may cause problems if : are used for key values but works for now
533 # Split on the first semicolon creating two strings in the array mobjs
534 mobjs = colon.split(mylines[pos],1)
535 # Start a new array using the first element of mobjs
538 # split on white space creating additional array elements
539 subarray=mobjs[1].split()
543 # Store as a string if only one element is found.
544 # this is to keep with original catalyst behavior
545 # eventually this may go away if catalyst just works
547 newarray.append(subarray[0])
549 newarray.append(mobjs[1].split())
551 # Else add on to the last key we were working on
553 mobjs = white_space.split(mylines[pos])
559 myspec[newarray[0]]=newarray[1]
561 myspec[newarray[0]]=newarray[1:]
563 for x in myspec.keys():
564 # Convert myspec[x] to an array of strings
566 if type(myspec[x])!=types.StringType:
568 if type(y)==types.ListType:
569 newarray.append(y[0])
570 if type(y)==types.StringType:
573 # Delete empty key pairs
574 if len(myspec[x])==0:
575 print "\n\tWARNING: No value set for key: "+x
576 print "\tdeleting key: "+x+"\n"
581 def parse_makeconf(mylines):
584 pat=re.compile("([a-zA-Z_]*)=(.*)")
585 while pos<len(mylines):
586 if len(mylines[pos])<=1:
590 if mylines[pos][0] in ["#"," ","\t"]:
591 #skip indented lines, comments
596 mobj=pat.match(myline)
599 clean_string = re.sub(r"\"",r"",mobj.group(2))
600 mymakeconf[mobj.group(1)]=clean_string
603 def read_spec(myspecfile):
605 myf=open(myspecfile,"r")
607 raise CatalystError, "Could not open spec file "+myspecfile
608 mylines=myf.readlines()
610 return parse_spec(mylines)
612 def read_makeconf(mymakeconffile):
613 if os.path.exists(mymakeconffile):
615 myf=open(mymakeconffile,"r")
616 mylines=myf.readlines()
618 return parse_makeconf(mylines)
620 raise CatalystError, "Could not open make.conf file "+myspecfile
625 def msg(mymsg,verblevel=1):
626 if verbosity>=verblevel:
629 def pathcompare(path1,path2):
630 # Change double slashes to slash
631 path1 = re.sub(r"//",r"/",path1)
632 path2 = re.sub(r"//",r"/",path2)
633 # Removing ending slash
634 path1 = re.sub("/$","",path1)
635 path2 = re.sub("/$","",path2)
642 "enhanced to handle bind mounts"
643 if os.path.ismount(path):
646 mylines=a.readlines()
650 if pathcompare(path,mysplit[2]):
654 def arg_parse(cmdline):
655 #global required_config_file_values
658 foo=string.split(x,"=",1)
660 raise CatalystError, "Invalid arg syntax: "+x
663 mydict[foo[0]]=foo[1]
665 # if all is well, we should return (we should have bailed before here if not)
668 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
669 "helper function to help targets parse additional arguments"
670 global valid_config_file_values
671 for x in addlargs.keys():
672 if x not in validspec and x not in valid_config_file_values:
673 raise CatalystError, "Argument \""+x+"\" not recognized."
675 myspec[x]=addlargs[x]
676 for x in requiredspec:
677 if not myspec.has_key(x):
678 raise CatalystError, "Required argument \""+x+"\" not specified."
680 def spec_dump(myspec):
681 for x in myspec.keys():
682 print x+": "+repr(myspec[x])
689 raise CatalystError, "Could not touch "+myfile+"."
691 def countdown(secs=5, doing="Starting"):
693 print ">>> Waiting",secs,"seconds before starting..."
694 print ">>> (Control-C to abort)...\n"+doing+" in: ",
698 sys.stdout.write(str(sec+1)+" ")
703 def normpath(mypath):
705 if mypath[-1] == "/":
707 newpath = os.path.normpath(mypath)
709 if newpath[:2] == "//":
710 newpath = newpath[1:]