2 import sys,string,os,types,re,signal,traceback,time
4 selinux_capable = False
5 #userpriv_capable = (os.getuid() == 0)
6 #fakeroot_capable = False
7 BASH_BINARY = "/bin/bash"
11 max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
15 # hokay, no resource module.
18 # pids this process knows of.
26 def cleanup(pids,block_exceptions=True):
27 """function to go through and reap the list of pids passed to it"""
33 os.kill(x,signal.SIGTERM)
34 if os.waitpid(x,os.WNOHANG)[1] == 0:
35 # feisty bugger, still alive.
36 os.kill(x,signal.SIGKILL)
42 if oe.errno not in (10,3):
49 try: spawned_pids.remove(x)
50 except IndexError: pass
54 # a function to turn a string of non-printable characters into a string of
57 hexStr = string.hexdigits
61 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
65 def generate_contents(file,contents_function="auto",verbose=False):
68 if _ == 'auto' and file.endswith('.iso'):
70 if (_ in ['tar-tv','auto']):
71 if file.endswith('.tgz') or file.endswith('.tar.gz'):
73 elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
75 elif file.endswith('.tar'):
79 warn('File %r has unknown type for automatic detection.' % (file, ))
83 _ = contents_map[contents_function]
84 return _[0](file,_[1],verbose)
87 "Error generating contents, is appropriate utility (%s) installed on your system?" \
88 % (contents_function, )
90 def calc_contents(file,cmd,verbose):
96 result="".join(mylines)
101 # This has map must be defined after the function calc_content
102 # It is possible to call different functions from this but they must be defined
106 # 'find' is disabled because it requires the source path, which is not
108 #"find" :[calc_contents,"find %(path)s"],
109 "tar-tv":[calc_contents,"tar tvf %(file)s"],
110 "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
111 "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
112 "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
113 # isoinfo-f should be a last resort only
114 "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
117 def generate_hash(file,hash_function="crc32",verbose=False):
119 return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
120 hash_map[hash_function][3],verbose)
122 raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
124 def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
125 a=os.popen(cmd+" "+cmd_args+" "+file)
126 mylines=a.readlines()
128 mylines=mylines[0].split()
131 print id_string+" (%s) = %s" % (file, result)
134 def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
135 a=os.popen(cmd+" "+cmd_args+" "+file)
137 mylines=a.readline().split()
139 short_file=os.path.split(mylines[1])[1]
141 result=header+hash+" "+short_file+"\n"
143 print header+" (%s) = %s" % (short_file, result)
146 # This has map must be defined after the function calc_hash
147 # It is possible to call different functions from this but they must be defined
149 # Key,function,cmd,cmd_args,Print string
151 "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
152 "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
153 "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
154 "gost":[calc_hash2,"shash","-a GOST","GOST"],\
155 "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
156 "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
157 "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
158 "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
159 "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
160 "md2":[calc_hash2,"shash","-a MD2","MD2"],\
161 "md4":[calc_hash2,"shash","-a MD4","MD4"],\
162 "md5":[calc_hash2,"shash","-a MD5","MD5"],\
163 "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
164 "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
165 "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
166 "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
167 "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
168 "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
169 "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
170 "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
171 "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
172 "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
173 "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
174 "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
175 "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
176 "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
177 "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
180 def read_from_clst(file):
187 #raise CatalystError, "Could not open file "+file
188 for line in myf.readlines():
189 #line = string.replace(line, "\n", "") # drop newline
190 myline = myline + line
195 # these should never be touched
196 required_build_targets=["generic_target","generic_stage_target"]
198 # new build types should be added here
199 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
200 "livecd_stage1_target","livecd_stage2_target","embedded_target",
201 "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
203 required_config_file_values=["storedir","sharedir","distdir","portdir"]
204 valid_config_file_values=required_config_file_values[:]
205 valid_config_file_values.append("PKGCACHE")
206 valid_config_file_values.append("KERNCACHE")
207 valid_config_file_values.append("CCACHE")
208 valid_config_file_values.append("DISTCC")
209 valid_config_file_values.append("ICECREAM")
210 valid_config_file_values.append("ENVSCRIPT")
211 valid_config_file_values.append("AUTORESUME")
212 valid_config_file_values.append("FETCH")
213 valid_config_file_values.append("CLEAR_AUTORESUME")
214 valid_config_file_values.append("options")
215 valid_config_file_values.append("DEBUG")
216 valid_config_file_values.append("VERBOSE")
217 valid_config_file_values.append("PURGE")
218 valid_config_file_values.append("PURGEONLY")
219 valid_config_file_values.append("SNAPCACHE")
220 valid_config_file_values.append("snapshot_cache")
221 valid_config_file_values.append("hash_function")
222 valid_config_file_values.append("digests")
223 valid_config_file_values.append("contents")
224 valid_config_file_values.append("SEEDCACHE")
228 def list_bashify(mylist):
229 if type(mylist)==types.StringType:
233 for x in range(0,len(mypack)):
234 # surround args with quotes for passing to bash,
235 # allows things like "<" to remain intact
236 mypack[x]="'"+mypack[x]+"'"
237 mypack=string.join(mypack)
240 def list_to_string(mylist):
241 if type(mylist)==types.StringType:
245 for x in range(0,len(mypack)):
246 # surround args with quotes for passing to bash,
247 # allows things like "<" to remain intact
249 mypack=string.join(mypack)
252 class CatalystError(Exception):
253 def __init__(self, message):
255 (type,value)=sys.exc_info()[:2]
258 print traceback.print_exc(file=sys.stdout)
260 print "!!! catalyst: "+message
263 class LockInUse(Exception):
264 def __init__(self, message):
266 #(type,value)=sys.exc_info()[:2]
269 #kprint traceback.print_exc(file=sys.stdout)
271 print "!!! catalyst lock file in use: "+message
279 print "!!! catalyst: "+msg
281 def find_binary(myc):
282 """look through the environmental path for an executable file named whatever myc is"""
287 for x in p.split(":"):
288 #if it exists, and is executable
289 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
290 return "%s/%s" % (x,myc)
293 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
294 """spawn mycommand as an arguement to bash"""
297 opt_name=mycommand.split()[0]
298 if "BASH_ENV" not in env:
299 env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
303 args.append(mycommand)
304 return spawn(args,env=env,opt_name=opt_name,**keywords)
306 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
307 # collect_fds=[1],fd_pipes=None,**keywords):
309 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
310 collect_fds=[1],fd_pipes=None,**keywords):
311 """call spawn, collecting the output to fd's specified in collect_fds list
312 emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
313 requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
314 'lets let log only stdin and let stderr slide by'.
316 emulate_gso was deprecated from the day it was added, so convert your code over.
317 spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
318 global selinux_capable
321 #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
322 # s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
329 for x in collect_fds:
331 keywords["returnpid"]=True
333 mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
335 if type(mypid) != types.ListType:
337 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
340 mydata=fd.readlines()
343 mydata=string.join(mydata)
344 if len(mydata) and mydata[-1] == "\n":
346 retval=os.waitpid(mypid[0],0)[1]
349 return [retval,mydata]
350 retval=process_exit_code(retval)
351 return [retval, mydata]
353 # base spawn function
354 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
355 uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
356 selinux_context=None, raise_signals=False, func_call=False):
357 """base fork/execve function.
358 mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
359 environment, use the appropriate spawn call. This is a straight fork/exec code path.
360 Can either have a tuple, or a string passed in. If uid/gid/groups/umask specified, it changes
361 the forked process to said value. If path_lookup is on, a non-absolute command will be converted
362 to an absolute command, otherwise it returns None.
364 selinux_context is the desired context, dependant on selinux being available.
365 opt_name controls the name the processor goes by.
366 fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
367 current fd's raw fd #, desired #.
369 func_call is a boolean for specifying to execute a python function- use spawn_func instead.
370 raise_signals is questionable. Basically throw an exception if signal'd. No exception is thrown
373 logfile overloads the specified fd's to write to a tee process which logs to logfile
374 returnpid returns the relevant pids (a list, including the logging process if logfile is on).
376 non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
377 raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
381 if type(mycommand)==types.StringType:
382 mycommand=mycommand.split()
384 if not os.access(myc, os.X_OK):
387 myc = find_binary(myc)
393 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
394 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
399 return process_exit_code(retval)
408 opt_name = mycommand[0]
410 myargs.extend(mycommand[1:])
412 mypid.append(os.fork())
415 spawned_pids.extend(mypid)
421 # this may look ugly, but basically it moves file descriptors around to ensure no
422 # handles that are needed are accidentally closed during the final dup2 calls.
424 if type(fd_pipes)==types.DictType:
429 #build list of which fds will be where, and where they are at currently
432 src_fd.append(fd_pipes[x])
434 # run through said list dup'ing descriptors so that they won't be waxed
435 # by other dup calls.
436 for x in range(0,len(trg_fd)):
437 if trg_fd[x] == src_fd[x]:
439 if trg_fd[x] in src_fd[x+1:]:
440 new=os.dup2(trg_fd[x],max(src_fd) + 1)
444 src_fd[s.index(trg_fd[x])]=new
445 except SystemExit, e:
450 # transfer the fds to their final pre-exec position.
451 for x in range(0,len(trg_fd)):
452 if trg_fd[x] != src_fd[x]:
453 os.dup2(src_fd[x], trg_fd[x])
457 # wax all open descriptors that weren't requested be left open.
458 for x in range(0,max_fd_limit):
462 except SystemExit, e:
467 # note this order must be preserved- can't change gid/groups if you change uid first.
468 if selinux_capable and selinux_context:
470 selinux.setexec(selinux_context)
483 #print "execing", myc, myargs
485 # either use a passed in func for interpretting the results, or return if no exception.
486 # note the passed in list, and dict are expanded.
487 if len(mycommand) == 4:
488 os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
490 mycommand[0](*mycommand[1],**mycommand[2])
492 print "caught exception",e," in forked func",mycommand[0]
495 #os.execvp(myc,myargs)
496 os.execve(myc,myargs,env)
497 except SystemExit, e:
501 raise str(e)+":\n "+myc+" "+string.join(myargs)
502 print "func call failed"
504 # If the execve fails, we need to report it, and exit
505 # *carefully* --- report error here
508 return # should never get reached
510 # if we were logging, kill the pipes.
518 # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
519 # if the main pid (mycommand) returned badly.
521 retval=os.waitpid(mypid[-1],0)[1]
523 cleanup(mypid[0:-1],block_exceptions=False)
524 # at this point we've killed all other kid pids generated via this call.
528 return process_exit_code(retval,throw_signals=raise_signals)
534 def cmd(mycmd,myexc="",env={}):
537 retval=spawn_bash(mycmd,env)
539 raise CatalystError,myexc
543 def process_exit_code(retval,throw_signals=False):
544 """process a waitpid returned exit code, returning exit code if it exit'd, or the
545 signal if it died from signalling
546 if throw_signals is on, it raises a SystemExit if the process was signaled.
547 This is intended for usage with threads, although at the moment you can't signal individual
548 threads in python, only the master thread, so it's a questionable option."""
549 if (retval & 0xff)==0:
550 return retval >> 8 # return exit code
553 #use systemexit, since portage is stupid about exception catching.
555 return (retval & 0xff) << 8 # interrupted by signal
557 def file_locate(settings,filelist,expand=1):
558 #if expand=1, non-absolute paths will be accepted and
559 # expanded to os.getcwd()+"/"+localpath if file exists
560 for myfile in filelist:
561 if myfile not in settings:
562 #filenames such as cdtar are optional, so we don't assume the variable is defined.
565 if len(settings[myfile])==0:
566 raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
567 if settings[myfile][0]=="/":
568 if not os.path.exists(settings[myfile]):
569 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
570 elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
571 settings[myfile]=os.getcwd()+"/"+settings[myfile]
573 raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
577 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
587 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
588 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
589 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
590 that the order of multiple-value items is preserved, but the order that the items themselves are
591 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
592 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
595 def parse_makeconf(mylines):
598 pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
599 while pos<len(mylines):
600 if len(mylines[pos])<=1:
604 if mylines[pos][0] in ["#"," ","\t"]:
605 #skip indented lines, comments
610 mobj=pat.match(myline)
613 clean_string = re.sub(r"\"",r"",mobj.group(2))
614 mymakeconf[mobj.group(1)]=clean_string
617 def read_makeconf(mymakeconffile):
618 if os.path.exists(mymakeconffile):
621 import snakeoil.fileutils
622 return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
626 return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
630 return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
632 myf=open(mymakeconffile,"r")
633 mylines=myf.readlines()
635 return parse_makeconf(mylines)
637 raise CatalystError, "Could not parse make.conf file "+mymakeconffile
642 def msg(mymsg,verblevel=1):
643 if verbosity>=verblevel:
646 def pathcompare(path1,path2):
647 # Change double slashes to slash
648 path1 = re.sub(r"//",r"/",path1)
649 path2 = re.sub(r"//",r"/",path2)
650 # Removing ending slash
651 path1 = re.sub("/$","",path1)
652 path2 = re.sub("/$","",path2)
659 "enhanced to handle bind mounts"
660 if os.path.ismount(path):
663 mylines=a.readlines()
667 if pathcompare(path,mysplit[2]):
671 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
672 "helper function to help targets parse additional arguments"
673 global valid_config_file_values
676 for x in addlargs.keys():
677 if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
678 messages.append("Argument \""+x+"\" not recognized.")
680 myspec[x]=addlargs[x]
682 for x in requiredspec:
684 messages.append("Required argument \""+x+"\" not specified.")
687 raise CatalystError, '\n\tAlso: '.join(messages)
694 raise CatalystError, "Could not touch "+myfile+"."
696 def countdown(secs=5, doing="Starting"):
698 print ">>> Waiting",secs,"seconds before starting..."
699 print ">>> (Control-C to abort)...\n"+doing+" in: ",
703 sys.stdout.write(str(sec+1)+" ")
708 def normpath(mypath):
710 if mypath[-1] == "/":
712 newpath = os.path.normpath(mypath)
714 if newpath[:2] == "//":
715 newpath = newpath[1:]