316dfa3dee468acb1ec6074901a8e7f13a3a0f2e
[catalyst.git] / modules / catalyst_support.py
1
2 import sys,string,os,types,re,signal,traceback,time
3 #import md5,sha
4 selinux_capable = False
5 #userpriv_capable = (os.getuid() == 0)
6 #fakeroot_capable = False
7 BASH_BINARY             = "/bin/bash"
8
9 try:
10         import resource
11         max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
12 except SystemExit, e:
13         raise
14 except:
15         # hokay, no resource module.
16         max_fd_limit=256
17
18 # pids this process knows of.
19 spawned_pids = []
20
21 try:
22         import urllib
23 except SystemExit, e:
24         raise
25
26 def cleanup(pids,block_exceptions=True):
27         """function to go through and reap the list of pids passed to it"""
28         global spawned_pids
29         if type(pids) == int:
30                 pids = [pids]
31         for x in pids:
32                 try:
33                         os.kill(x,signal.SIGTERM)
34                         if os.waitpid(x,os.WNOHANG)[1] == 0:
35                                 # feisty bugger, still alive.
36                                 os.kill(x,signal.SIGKILL)
37                                 os.waitpid(x,0)
38
39                 except OSError, oe:
40                         if block_exceptions:
41                                 pass
42                         if oe.errno not in (10,3):
43                                 raise oe
44                 except SystemExit:
45                         raise
46                 except Exception:
47                         if block_exceptions:
48                                 pass
49                 try:                    spawned_pids.remove(x)
50                 except IndexError:      pass
51
52
53
54 # a function to turn a string of non-printable characters into a string of
55 # hex characters
56 def hexify(str):
57         hexStr = string.hexdigits
58         r = ''
59         for ch in str:
60                 i = ord(ch)
61                 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
62         return r
63 # hexify()
64
65 def generate_contents(file,contents_function="auto",verbose=False):
66         try:
67                 _ = contents_function
68                 if _ == 'auto' and file.endswith('.iso'):
69                         _ = 'isoinfo-l'
70                 if (_ in ['tar-tv','auto']):
71                         if file.endswith('.tgz') or file.endswith('.tar.gz'):
72                                 _ = 'tar-tvz'
73                         elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
74                                 _ = 'tar-tvj'
75                         elif file.endswith('.tar'):
76                                 _ = 'tar-tv'
77
78                 if _ == 'auto':
79                         warn('File %r has unknown type for automatic detection.' % (file, ))
80                         return None
81                 else:
82                         contents_function = _
83                         _ = contents_map[contents_function]
84                         return _[0](file,_[1],verbose)
85         except:
86                 raise CatalystError,\
87                         "Error generating contents, is appropriate utility (%s) installed on your system?" \
88                         % (contents_function, )
89
90 def calc_contents(file,cmd,verbose):
91         args={ 'file': file }
92         cmd=cmd % dict(args)
93         a=os.popen(cmd)
94         mylines=a.readlines()
95         a.close()
96         result="".join(mylines)
97         if verbose:
98                 print result
99         return result
100
101 # This has map must be defined after the function calc_content
102 # It is possible to call different functions from this but they must be defined
103 # before hash_map
104 # Key,function,cmd
105 contents_map={
106         # 'find' is disabled because it requires the source path, which is not
107         # always available
108         #"find"         :[calc_contents,"find %(path)s"],
109         "tar-tv":[calc_contents,"tar tvf %(file)s"],
110         "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
111         "tar-tvj":[calc_contents,"tar -I lbzip2 -tvf %(file)s"],
112         "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
113         # isoinfo-f should be a last resort only
114         "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
115 }
116
117 def generate_hash(file,hash_function="crc32",verbose=False):
118         try:
119                 return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
120                         hash_map[hash_function][3],verbose)
121         except:
122                 raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
123
124 def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
125         a=os.popen(cmd+" "+cmd_args+" "+file)
126         mylines=a.readlines()
127         a.close()
128         mylines=mylines[0].split()
129         result=mylines[0]
130         if verbose:
131                 print id_string+" (%s) = %s" % (file, result)
132         return result
133
134 def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
135         a=os.popen(cmd+" "+cmd_args+" "+file)
136         header=a.readline()
137         mylines=a.readline().split()
138         hash=mylines[0]
139         short_file=os.path.split(mylines[1])[1]
140         a.close()
141         result=header+hash+"  "+short_file+"\n"
142         if verbose:
143                 print header+" (%s) = %s" % (short_file, result)
144         return result
145
146 # This has map must be defined after the function calc_hash
147 # It is possible to call different functions from this but they must be defined
148 # before hash_map
149 # Key,function,cmd,cmd_args,Print string
150 hash_map={
151          "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
152          "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
153          "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
154          "gost":[calc_hash2,"shash","-a GOST","GOST"],\
155          "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
156          "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
157          "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
158          "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
159          "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
160          "md2":[calc_hash2,"shash","-a MD2","MD2"],\
161          "md4":[calc_hash2,"shash","-a MD4","MD4"],\
162          "md5":[calc_hash2,"shash","-a MD5","MD5"],\
163          "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
164          "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
165          "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
166          "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
167          "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
168          "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
169          "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
170          "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
171          "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
172          "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
173          "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
174          "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
175          "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
176          "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
177          "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
178          }
179
180 def read_from_clst(file):
181         line = ''
182         myline = ''
183         try:
184                 myf=open(file,"r")
185         except:
186                 return -1
187                 #raise CatalystError, "Could not open file "+file
188         for line in myf.readlines():
189             #line = string.replace(line, "\n", "") # drop newline
190             myline = myline + line
191         myf.close()
192         return myline
193 # read_from_clst
194
195 # these should never be touched
196 required_build_targets=["generic_target","generic_stage_target"]
197
198 # new build types should be added here
199 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
200                         "livecd_stage1_target","livecd_stage2_target","embedded_target",
201                         "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
202
203 required_config_file_values=["storedir","sharedir","distdir","portdir"]
204 valid_config_file_values=required_config_file_values[:]
205 valid_config_file_values.append("PKGCACHE")
206 valid_config_file_values.append("KERNCACHE")
207 valid_config_file_values.append("CCACHE")
208 valid_config_file_values.append("DISTCC")
209 valid_config_file_values.append("ICECREAM")
210 valid_config_file_values.append("ENVSCRIPT")
211 valid_config_file_values.append("AUTORESUME")
212 valid_config_file_values.append("FETCH")
213 valid_config_file_values.append("CLEAR_AUTORESUME")
214 valid_config_file_values.append("options")
215 valid_config_file_values.append("DEBUG")
216 valid_config_file_values.append("VERBOSE")
217 valid_config_file_values.append("PURGE")
218 valid_config_file_values.append("PURGEONLY")
219 valid_config_file_values.append("SNAPCACHE")
220 valid_config_file_values.append("snapshot_cache")
221 valid_config_file_values.append("hash_function")
222 valid_config_file_values.append("digests")
223 valid_config_file_values.append("contents")
224 valid_config_file_values.append("SEEDCACHE")
225
226 verbosity=1
227
228 def list_bashify(mylist):
229         if type(mylist)==types.StringType:
230                 mypack=[mylist]
231         else:
232                 mypack=mylist[:]
233         for x in range(0,len(mypack)):
234                 # surround args with quotes for passing to bash,
235                 # allows things like "<" to remain intact
236                 mypack[x]="'"+mypack[x]+"'"
237         mypack=string.join(mypack)
238         return mypack
239
240 def list_to_string(mylist):
241         if type(mylist)==types.StringType:
242                 mypack=[mylist]
243         else:
244                 mypack=mylist[:]
245         for x in range(0,len(mypack)):
246                 # surround args with quotes for passing to bash,
247                 # allows things like "<" to remain intact
248                 mypack[x]=mypack[x]
249         mypack=string.join(mypack)
250         return mypack
251
252 class CatalystError(Exception):
253         def __init__(self, message):
254                 if message:
255                         (type,value)=sys.exc_info()[:2]
256                         if value!=None:
257                                 print
258                                 print traceback.print_exc(file=sys.stdout)
259                         print
260                         print "!!! catalyst: "+message
261                         print
262
263 class LockInUse(Exception):
264         def __init__(self, message):
265                 if message:
266                         #(type,value)=sys.exc_info()[:2]
267                         #if value!=None:
268                             #print
269                             #kprint traceback.print_exc(file=sys.stdout)
270                         print
271                         print "!!! catalyst lock file in use: "+message
272                         print
273
274 def die(msg=None):
275         warn(msg)
276         sys.exit(1)
277
278 def warn(msg):
279         print "!!! catalyst: "+msg
280
281 def find_binary(myc):
282         """look through the environmental path for an executable file named whatever myc is"""
283         # this sucks. badly.
284         p=os.getenv("PATH")
285         if p == None:
286                 return None
287         for x in p.split(":"):
288                 #if it exists, and is executable
289                 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
290                         return "%s/%s" % (x,myc)
291         return None
292
293 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
294         """spawn mycommand as an arguement to bash"""
295         args=[BASH_BINARY]
296         if not opt_name:
297             opt_name=mycommand.split()[0]
298         if "BASH_ENV" not in env:
299             env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
300         if debug:
301             args.append("-x")
302         args.append("-c")
303         args.append(mycommand)
304         return spawn(args,env=env,opt_name=opt_name,**keywords)
305
306 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
307 #        collect_fds=[1],fd_pipes=None,**keywords):
308
309 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
310         collect_fds=[1],fd_pipes=None,**keywords):
311         """call spawn, collecting the output to fd's specified in collect_fds list
312         emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
313         requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
314         'lets let log only stdin and let stderr slide by'.
315
316         emulate_gso was deprecated from the day it was added, so convert your code over.
317         spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
318         global selinux_capable
319         pr,pw=os.pipe()
320
321         #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
322         #        s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
323         #        raise Exception,s
324
325         if fd_pipes==None:
326                 fd_pipes={}
327                 fd_pipes[0] = 0
328
329         for x in collect_fds:
330                 fd_pipes[x] = pw
331         keywords["returnpid"]=True
332
333         mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
334         os.close(pw)
335         if type(mypid) != types.ListType:
336                 os.close(pr)
337                 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
338
339         fd=os.fdopen(pr,"r")
340         mydata=fd.readlines()
341         fd.close()
342         if emulate_gso:
343                 mydata=string.join(mydata)
344                 if len(mydata) and mydata[-1] == "\n":
345                         mydata=mydata[:-1]
346         retval=os.waitpid(mypid[0],0)[1]
347         cleanup(mypid)
348         if raw_exit_code:
349                 return [retval,mydata]
350         retval=process_exit_code(retval)
351         return [retval, mydata]
352
353 # base spawn function
354 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
355          uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
356          selinux_context=None, raise_signals=False, func_call=False):
357         """base fork/execve function.
358         mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
359         environment, use the appropriate spawn call.  This is a straight fork/exec code path.
360         Can either have a tuple, or a string passed in.  If uid/gid/groups/umask specified, it changes
361         the forked process to said value.  If path_lookup is on, a non-absolute command will be converted
362         to an absolute command, otherwise it returns None.
363
364         selinux_context is the desired context, dependant on selinux being available.
365         opt_name controls the name the processor goes by.
366         fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
367         current fd's raw fd #, desired #.
368
369         func_call is a boolean for specifying to execute a python function- use spawn_func instead.
370         raise_signals is questionable.  Basically throw an exception if signal'd.  No exception is thrown
371         if raw_input is on.
372
373         logfile overloads the specified fd's to write to a tee process which logs to logfile
374         returnpid returns the relevant pids (a list, including the logging process if logfile is on).
375
376         non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
377         raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
378
379         myc=''
380         if not func_call:
381                 if type(mycommand)==types.StringType:
382                         mycommand=mycommand.split()
383                 myc = mycommand[0]
384                 if not os.access(myc, os.X_OK):
385                         if not path_lookup:
386                                 return None
387                         myc = find_binary(myc)
388                         if myc == None:
389                             return None
390         mypid=[]
391         if logfile:
392                 pr,pw=os.pipe()
393                 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
394                 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
395                 if retval != 0:
396                         # he's dead jim.
397                         if raw_exit_code:
398                                 return retval
399                         return process_exit_code(retval)
400
401                 if fd_pipes == None:
402                         fd_pipes={}
403                         fd_pipes[0] = 0
404                 fd_pipes[1]=pw
405                 fd_pipes[2]=pw
406
407         if not opt_name:
408                 opt_name = mycommand[0]
409         myargs=[opt_name]
410         myargs.extend(mycommand[1:])
411         global spawned_pids
412         mypid.append(os.fork())
413         if mypid[-1] != 0:
414                 #log the bugger.
415                 spawned_pids.extend(mypid)
416
417         if mypid[-1] == 0:
418                 if func_call:
419                         spawned_pids = []
420
421                 # this may look ugly, but basically it moves file descriptors around to ensure no
422                 # handles that are needed are accidentally closed during the final dup2 calls.
423                 trg_fd=[]
424                 if type(fd_pipes)==types.DictType:
425                         src_fd=[]
426                         k=fd_pipes.keys()
427                         k.sort()
428
429                         #build list of which fds will be where, and where they are at currently
430                         for x in k:
431                                 trg_fd.append(x)
432                                 src_fd.append(fd_pipes[x])
433
434                         # run through said list dup'ing descriptors so that they won't be waxed
435                         # by other dup calls.
436                         for x in range(0,len(trg_fd)):
437                                 if trg_fd[x] == src_fd[x]:
438                                         continue
439                                 if trg_fd[x] in src_fd[x+1:]:
440                                         new=os.dup2(trg_fd[x],max(src_fd) + 1)
441                                         os.close(trg_fd[x])
442                                         try:
443                                                 while True:
444                                                         src_fd[s.index(trg_fd[x])]=new
445                                         except SystemExit, e:
446                                                 raise
447                                         except:
448                                                 pass
449
450                         # transfer the fds to their final pre-exec position.
451                         for x in range(0,len(trg_fd)):
452                                 if trg_fd[x] != src_fd[x]:
453                                         os.dup2(src_fd[x], trg_fd[x])
454                 else:
455                         trg_fd=[0,1,2]
456
457                 # wax all open descriptors that weren't requested be left open.
458                 for x in range(0,max_fd_limit):
459                         if x not in trg_fd:
460                                 try:
461                                         os.close(x)
462                                 except SystemExit, e:
463                                         raise
464                                 except:
465                                         pass
466
467                 # note this order must be preserved- can't change gid/groups if you change uid first.
468                 if selinux_capable and selinux_context:
469                         import selinux
470                         selinux.setexec(selinux_context)
471                 if gid:
472                         os.setgid(gid)
473                 if groups:
474                         os.setgroups(groups)
475                 if uid:
476                         os.setuid(uid)
477                 if umask:
478                         os.umask(umask)
479                 else:
480                         os.umask(022)
481
482                 try:
483                         #print "execing", myc, myargs
484                         if func_call:
485                                 # either use a passed in func for interpretting the results, or return if no exception.
486                                 # note the passed in list, and dict are expanded.
487                                 if len(mycommand) == 4:
488                                         os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
489                                 try:
490                                         mycommand[0](*mycommand[1],**mycommand[2])
491                                 except Exception,e:
492                                         print "caught exception",e," in forked func",mycommand[0]
493                                 sys.exit(0)
494
495                         #os.execvp(myc,myargs)
496                         os.execve(myc,myargs,env)
497                 except SystemExit, e:
498                         raise
499                 except Exception, e:
500                         if not func_call:
501                                 raise str(e)+":\n   "+myc+" "+string.join(myargs)
502                         print "func call failed"
503
504                 # If the execve fails, we need to report it, and exit
505                 # *carefully* --- report error here
506                 os._exit(1)
507                 sys.exit(1)
508                 return # should never get reached
509
510         # if we were logging, kill the pipes.
511         if logfile:
512                 os.close(pr)
513                 os.close(pw)
514
515         if returnpid:
516                 return mypid
517
518         # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
519         # if the main pid (mycommand) returned badly.
520         while len(mypid):
521                 retval=os.waitpid(mypid[-1],0)[1]
522                 if retval != 0:
523                         cleanup(mypid[0:-1],block_exceptions=False)
524                         # at this point we've killed all other kid pids generated via this call.
525                         # return now.
526                         if raw_exit_code:
527                                 return retval
528                         return process_exit_code(retval,throw_signals=raise_signals)
529                 else:
530                         mypid.pop(-1)
531         cleanup(mypid)
532         return 0
533
534 def cmd(mycmd,myexc="",env={}):
535         try:
536                 sys.stdout.flush()
537                 retval=spawn_bash(mycmd,env)
538                 if retval != 0:
539                         raise CatalystError,myexc
540         except:
541                 raise
542
543 def process_exit_code(retval,throw_signals=False):
544         """process a waitpid returned exit code, returning exit code if it exit'd, or the
545         signal if it died from signalling
546         if throw_signals is on, it raises a SystemExit if the process was signaled.
547         This is intended for usage with threads, although at the moment you can't signal individual
548         threads in python, only the master thread, so it's a questionable option."""
549         if (retval & 0xff)==0:
550                 return retval >> 8 # return exit code
551         else:
552                 if throw_signals:
553                         #use systemexit, since portage is stupid about exception catching.
554                         raise SystemExit()
555                 return (retval & 0xff) << 8 # interrupted by signal
556
557 def file_locate(settings,filelist,expand=1):
558         #if expand=1, non-absolute paths will be accepted and
559         # expanded to os.getcwd()+"/"+localpath if file exists
560         for myfile in filelist:
561                 if myfile not in settings:
562                         #filenames such as cdtar are optional, so we don't assume the variable is defined.
563                         pass
564                 else:
565                     if len(settings[myfile])==0:
566                             raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
567                     if settings[myfile][0]=="/":
568                             if not os.path.exists(settings[myfile]):
569                                     raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
570                     elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
571                             settings[myfile]=os.getcwd()+"/"+settings[myfile]
572                     else:
573                             raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
574 """
575 Spec file format:
576
577 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
578 file:
579
580 item1: value1
581 item2: foo bar oni
582 item3:
583         meep
584         bark
585         gleep moop
586
587 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
588 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
589 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
590 that the order of multiple-value items is preserved, but the order that the items themselves are
591 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
592 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
593 """
594
595 def parse_makeconf(mylines):
596         mymakeconf={}
597         pos=0
598         pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
599         while pos<len(mylines):
600                 if len(mylines[pos])<=1:
601                         #skip blanks
602                         pos += 1
603                         continue
604                 if mylines[pos][0] in ["#"," ","\t"]:
605                         #skip indented lines, comments
606                         pos += 1
607                         continue
608                 else:
609                         myline=mylines[pos]
610                         mobj=pat.match(myline)
611                         pos += 1
612                         if mobj.group(2):
613                             clean_string = re.sub(r"\"",r"",mobj.group(2))
614                             mymakeconf[mobj.group(1)]=clean_string
615         return mymakeconf
616
617 def read_makeconf(mymakeconffile):
618         if os.path.exists(mymakeconffile):
619                 try:
620                         try:
621                                 import snakeoil.fileutils
622                                 return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
623                         except ImportError:
624                                 try:
625                                         import portage.util
626                                         return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
627                                 except:
628                                         try:
629                                                 import portage_util
630                                                 return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
631                                         except ImportError:
632                                                 myf=open(mymakeconffile,"r")
633                                                 mylines=myf.readlines()
634                                                 myf.close()
635                                                 return parse_makeconf(mylines)
636                 except:
637                         raise CatalystError, "Could not parse make.conf file "+mymakeconffile
638         else:
639                 makeconf={}
640                 return makeconf
641
642 def msg(mymsg,verblevel=1):
643         if verbosity>=verblevel:
644                 print mymsg
645
646 def pathcompare(path1,path2):
647         # Change double slashes to slash
648         path1 = re.sub(r"//",r"/",path1)
649         path2 = re.sub(r"//",r"/",path2)
650         # Removing ending slash
651         path1 = re.sub("/$","",path1)
652         path2 = re.sub("/$","",path2)
653
654         if path1 == path2:
655                 return 1
656         return 0
657
658 def ismount(path):
659         "enhanced to handle bind mounts"
660         if os.path.ismount(path):
661                 return 1
662         a=os.popen("mount")
663         mylines=a.readlines()
664         a.close()
665         for line in mylines:
666                 mysplit=line.split()
667                 if pathcompare(path,mysplit[2]):
668                         return 1
669         return 0
670
671 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
672         "helper function to help targets parse additional arguments"
673         global valid_config_file_values
674
675         messages = []
676         for x in addlargs.keys():
677                 if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
678                         messages.append("Argument \""+x+"\" not recognized.")
679                 else:
680                         myspec[x]=addlargs[x]
681
682         for x in requiredspec:
683                 if x not in myspec:
684                         messages.append("Required argument \""+x+"\" not specified.")
685
686         if messages:
687                 raise CatalystError, '\n\tAlso: '.join(messages)
688
689 def touch(myfile):
690         try:
691                 myf=open(myfile,"w")
692                 myf.close()
693         except IOError:
694                 raise CatalystError, "Could not touch "+myfile+"."
695
696 def countdown(secs=5, doing="Starting"):
697         if secs:
698                 print ">>> Waiting",secs,"seconds before starting..."
699                 print ">>> (Control-C to abort)...\n"+doing+" in: ",
700                 ticks=range(secs)
701                 ticks.reverse()
702                 for sec in ticks:
703                         sys.stdout.write(str(sec+1)+" ")
704                         sys.stdout.flush()
705                         time.sleep(1)
706                 print
707
708 def normpath(mypath):
709         TrailingSlash=False
710         if mypath[-1] == "/":
711             TrailingSlash=True
712         newpath = os.path.normpath(mypath)
713         if len(newpath) > 1:
714                 if newpath[:2] == "//":
715                         newpath = newpath[1:]
716         if TrailingSlash:
717             newpath=newpath+'/'
718         return newpath