Replace content on master with content from catalyst_2
[catalyst.git] / modules / catalyst_support.py
1
2 import sys,string,os,types,re,signal,traceback,time
3 #import md5,sha
4 selinux_capable = False
5 #userpriv_capable = (os.getuid() == 0)
6 #fakeroot_capable = False
7 BASH_BINARY             = "/bin/bash"
8
9 try:
10         import resource
11         max_fd_limit=resource.getrlimit(RLIMIT_NOFILE)
12 except SystemExit, e:
13         raise
14 except:
15         # hokay, no resource module.
16         max_fd_limit=256
17
18 # pids this process knows of.
19 spawned_pids = []
20
21 try:
22         import urllib
23 except SystemExit, e:
24         raise
25
26 def cleanup(pids,block_exceptions=True):
27         """function to go through and reap the list of pids passed to it"""
28         global spawned_pids
29         if type(pids) == int:
30                 pids = [pids]
31         for x in pids:
32                 try:
33                         os.kill(x,signal.SIGTERM)
34                         if os.waitpid(x,os.WNOHANG)[1] == 0:
35                                 # feisty bugger, still alive.
36                                 os.kill(x,signal.SIGKILL)
37                                 os.waitpid(x,0)
38
39                 except OSError, oe:
40                         if block_exceptions:
41                                 pass
42                         if oe.errno not in (10,3):
43                                 raise oe
44                 except SystemExit:
45                         raise
46                 except Exception:
47                         if block_exceptions:
48                                 pass
49                 try:                    spawned_pids.remove(x)
50                 except IndexError:      pass
51
52
53
54 # a function to turn a string of non-printable characters into a string of
55 # hex characters
56 def hexify(str):
57         hexStr = string.hexdigits
58         r = ''
59         for ch in str:
60                 i = ord(ch)
61                 r = r + hexStr[(i >> 4) & 0xF] + hexStr[i & 0xF]
62         return r
63 # hexify()
64
65 def generate_contents(file,contents_function="auto",verbose=False):
66         try:
67                 _ = contents_function
68                 if _ == 'auto' and file.endswith('.iso'):
69                         _ = 'isoinfo-l'
70                 if (_ in ['tar-tv','auto']):
71                         if file.endswith('.tgz') or file.endswith('.tar.gz'):
72                                 _ = 'tar-tvz'
73                         elif file.endswith('.tbz2') or file.endswith('.tar.bz2'):
74                                 _ = 'tar-tvj'
75                         elif file.endswith('.tar'):
76                                 _ = 'tar-tv'
77
78                 if _ == 'auto':
79                         warn('File %r has unknown type for automatic detection.' % (file, ))
80                         return None
81                 else:
82                         contents_function = _
83                         _ = contents_map[contents_function]
84                         return _[0](file,_[1],verbose)
85         except:
86                 raise CatalystError,\
87                         "Error generating contents, is appropriate utility (%s) installed on your system?" \
88                         % (contents_function, )
89
90 def calc_contents(file,cmd,verbose):
91         args={ 'file': file }
92         cmd=cmd % dict(args)
93         a=os.popen(cmd)
94         mylines=a.readlines()
95         a.close()
96         result="".join(mylines)
97         if verbose:
98                 print result
99         return result
100
101 # This has map must be defined after the function calc_content
102 # It is possible to call different functions from this but they must be defined
103 # before hash_map
104 # Key,function,cmd
105 contents_map={
106         # 'find' is disabled because it requires the source path, which is not
107         # always available
108         #"find"         :[calc_contents,"find %(path)s"],
109         "tar-tv":[calc_contents,"tar tvf %(file)s"],
110         "tar-tvz":[calc_contents,"tar tvzf %(file)s"],
111         "tar-tvj":[calc_contents,"tar tvjf %(file)s"],
112         "isoinfo-l":[calc_contents,"isoinfo -l -i %(file)s"],
113         # isoinfo-f should be a last resort only
114         "isoinfo-f":[calc_contents,"isoinfo -f -i %(file)s"],
115 }
116
117 def generate_hash(file,hash_function="crc32",verbose=False):
118         try:
119                 return hash_map[hash_function][0](file,hash_map[hash_function][1],hash_map[hash_function][2],\
120                         hash_map[hash_function][3],verbose)
121         except:
122                 raise CatalystError,"Error generating hash, is appropriate utility installed on your system?"
123
124 def calc_hash(file,cmd,cmd_args,id_string="MD5",verbose=False):
125         a=os.popen(cmd+" "+cmd_args+" "+file)
126         mylines=a.readlines()
127         a.close()
128         mylines=mylines[0].split()
129         result=mylines[0]
130         if verbose:
131                 print id_string+" (%s) = %s" % (file, result)
132         return result
133
134 def calc_hash2(file,cmd,cmd_args,id_string="MD5",verbose=False):
135         a=os.popen(cmd+" "+cmd_args+" "+file)
136         header=a.readline()
137         mylines=a.readline().split()
138         hash=mylines[0]
139         short_file=os.path.split(mylines[1])[1]
140         a.close()
141         result=header+hash+"  "+short_file+"\n"
142         if verbose:
143                 print header+" (%s) = %s" % (short_file, result)
144         return result
145
146 # This has map must be defined after the function calc_hash
147 # It is possible to call different functions from this but they must be defined
148 # before hash_map
149 # Key,function,cmd,cmd_args,Print string
150 hash_map={
151          "adler32":[calc_hash2,"shash","-a ADLER32","ADLER32"],\
152          "crc32":[calc_hash2,"shash","-a CRC32","CRC32"],\
153          "crc32b":[calc_hash2,"shash","-a CRC32B","CRC32B"],\
154          "gost":[calc_hash2,"shash","-a GOST","GOST"],\
155          "haval128":[calc_hash2,"shash","-a HAVAL128","HAVAL128"],\
156          "haval160":[calc_hash2,"shash","-a HAVAL160","HAVAL160"],\
157          "haval192":[calc_hash2,"shash","-a HAVAL192","HAVAL192"],\
158          "haval224":[calc_hash2,"shash","-a HAVAL224","HAVAL224"],\
159          "haval256":[calc_hash2,"shash","-a HAVAL256","HAVAL256"],\
160          "md2":[calc_hash2,"shash","-a MD2","MD2"],\
161          "md4":[calc_hash2,"shash","-a MD4","MD4"],\
162          "md5":[calc_hash2,"shash","-a MD5","MD5"],\
163          "ripemd128":[calc_hash2,"shash","-a RIPEMD128","RIPEMD128"],\
164          "ripemd160":[calc_hash2,"shash","-a RIPEMD160","RIPEMD160"],\
165          "ripemd256":[calc_hash2,"shash","-a RIPEMD256","RIPEMD256"],\
166          "ripemd320":[calc_hash2,"shash","-a RIPEMD320","RIPEMD320"],\
167          "sha1":[calc_hash2,"shash","-a SHA1","SHA1"],\
168          "sha224":[calc_hash2,"shash","-a SHA224","SHA224"],\
169          "sha256":[calc_hash2,"shash","-a SHA256","SHA256"],\
170          "sha384":[calc_hash2,"shash","-a SHA384","SHA384"],\
171          "sha512":[calc_hash2,"shash","-a SHA512","SHA512"],\
172          "snefru128":[calc_hash2,"shash","-a SNEFRU128","SNEFRU128"],\
173          "snefru256":[calc_hash2,"shash","-a SNEFRU256","SNEFRU256"],\
174          "tiger":[calc_hash2,"shash","-a TIGER","TIGER"],\
175          "tiger128":[calc_hash2,"shash","-a TIGER128","TIGER128"],\
176          "tiger160":[calc_hash2,"shash","-a TIGER160","TIGER160"],\
177          "whirlpool":[calc_hash2,"shash","-a WHIRLPOOL","WHIRLPOOL"],\
178          }
179
180 def read_from_clst(file):
181         line = ''
182         myline = ''
183         try:
184                 myf=open(file,"r")
185         except:
186                 return -1
187                 #raise CatalystError, "Could not open file "+file
188         for line in myf.readlines():
189             #line = string.replace(line, "\n", "") # drop newline
190             myline = myline + line
191         myf.close()
192         return myline
193 # read_from_clst
194
195 # these should never be touched
196 required_build_targets=["generic_target","generic_stage_target"]
197
198 # new build types should be added here
199 valid_build_targets=["stage1_target","stage2_target","stage3_target","stage4_target","grp_target",
200                         "livecd_stage1_target","livecd_stage2_target","embedded_target",
201                         "tinderbox_target","snapshot_target","netboot_target","netboot2_target"]
202
203 required_config_file_values=["storedir","sharedir","distdir","portdir"]
204 valid_config_file_values=required_config_file_values[:]
205 valid_config_file_values.append("PKGCACHE")
206 valid_config_file_values.append("KERNCACHE")
207 valid_config_file_values.append("CCACHE")
208 valid_config_file_values.append("DISTCC")
209 valid_config_file_values.append("ICECREAM")
210 valid_config_file_values.append("ENVSCRIPT")
211 valid_config_file_values.append("AUTORESUME")
212 valid_config_file_values.append("FETCH")
213 valid_config_file_values.append("CLEAR_AUTORESUME")
214 valid_config_file_values.append("options")
215 valid_config_file_values.append("DEBUG")
216 valid_config_file_values.append("VERBOSE")
217 valid_config_file_values.append("PURGE")
218 valid_config_file_values.append("PURGEONLY")
219 valid_config_file_values.append("SNAPCACHE")
220 valid_config_file_values.append("snapshot_cache")
221 valid_config_file_values.append("hash_function")
222 valid_config_file_values.append("digests")
223 valid_config_file_values.append("contents")
224 valid_config_file_values.append("SEEDCACHE")
225
226 verbosity=1
227
228 def list_bashify(mylist):
229         if type(mylist)==types.StringType:
230                 mypack=[mylist]
231         else:
232                 mypack=mylist[:]
233         for x in range(0,len(mypack)):
234                 # surround args with quotes for passing to bash,
235                 # allows things like "<" to remain intact
236                 mypack[x]="'"+mypack[x]+"'"
237         mypack=string.join(mypack)
238         return mypack
239
240 def list_to_string(mylist):
241         if type(mylist)==types.StringType:
242                 mypack=[mylist]
243         else:
244                 mypack=mylist[:]
245         for x in range(0,len(mypack)):
246                 # surround args with quotes for passing to bash,
247                 # allows things like "<" to remain intact
248                 mypack[x]=mypack[x]
249         mypack=string.join(mypack)
250         return mypack
251
252 class CatalystError(Exception):
253         def __init__(self, message):
254                 if message:
255                         (type,value)=sys.exc_info()[:2]
256                         if value!=None:
257                                 print 
258                                 print traceback.print_exc(file=sys.stdout)
259                         print
260                         print "!!! catalyst: "+message
261                         print
262                         
263 class LockInUse(Exception):
264         def __init__(self, message):
265                 if message:
266                         #(type,value)=sys.exc_info()[:2]
267                         #if value!=None:
268                             #print
269                             #kprint traceback.print_exc(file=sys.stdout)
270                         print
271                         print "!!! catalyst lock file in use: "+message
272                         print
273
274 def die(msg=None):
275         warn(msg)
276         sys.exit(1)
277
278 def warn(msg):
279         print "!!! catalyst: "+msg
280
281
282 def find_binary(myc):
283         """look through the environmental path for an executable file named whatever myc is"""
284         # this sucks. badly.
285         p=os.getenv("PATH")
286         if p == None:
287                 return None
288         for x in p.split(":"):
289                 #if it exists, and is executable
290                 if os.path.exists("%s/%s" % (x,myc)) and os.stat("%s/%s" % (x,myc))[0] & 0x0248:
291                         return "%s/%s" % (x,myc)
292         return None
293
294
295 def spawn_bash(mycommand,env={},debug=False,opt_name=None,**keywords):
296         """spawn mycommand as an arguement to bash"""
297         args=[BASH_BINARY]
298         if not opt_name:
299             opt_name=mycommand.split()[0]
300         if not env.has_key("BASH_ENV"):
301             env["BASH_ENV"] = "/etc/spork/is/not/valid/profile.env"
302         if debug:
303             args.append("-x")
304         args.append("-c")
305         args.append(mycommand)
306         return spawn(args,env=env,opt_name=opt_name,**keywords)
307
308 #def spawn_get_output(mycommand,spawn_type=spawn,raw_exit_code=False,emulate_gso=True, \
309 #        collect_fds=[1],fd_pipes=None,**keywords):
310 def spawn_get_output(mycommand,raw_exit_code=False,emulate_gso=True, \
311         collect_fds=[1],fd_pipes=None,**keywords):
312         """call spawn, collecting the output to fd's specified in collect_fds list
313         emulate_gso is a compatability hack to emulate commands.getstatusoutput's return, minus the
314         requirement it always be a bash call (spawn_type controls the actual spawn call), and minus the
315         'lets let log only stdin and let stderr slide by'.
316
317         emulate_gso was deprecated from the day it was added, so convert your code over.
318         spawn_type is the passed in function to call- typically spawn_bash, spawn, spawn_sandbox, or spawn_fakeroot"""
319         global selinux_capable
320         pr,pw=os.pipe()
321
322         #if type(spawn_type) not in [types.FunctionType, types.MethodType]:
323         #        s="spawn_type must be passed a function, not",type(spawn_type),spawn_type
324         #        raise Exception,s
325
326         if fd_pipes==None:
327                 fd_pipes={}
328                 fd_pipes[0] = 0
329
330         for x in collect_fds:
331                 fd_pipes[x] = pw
332         keywords["returnpid"]=True
333
334         mypid=spawn_bash(mycommand,fd_pipes=fd_pipes,**keywords)
335         os.close(pw)
336         if type(mypid) != types.ListType:
337                 os.close(pr)
338                 return [mypid, "%s: No such file or directory" % mycommand.split()[0]]
339
340         fd=os.fdopen(pr,"r")
341         mydata=fd.readlines()
342         fd.close()
343         if emulate_gso:
344                 mydata=string.join(mydata)
345                 if len(mydata) and mydata[-1] == "\n":
346                         mydata=mydata[:-1]
347         retval=os.waitpid(mypid[0],0)[1]
348         cleanup(mypid)
349         if raw_exit_code:
350                 return [retval,mydata]
351         retval=process_exit_code(retval)
352         return [retval, mydata]
353
354
355 # base spawn function
356 def spawn(mycommand,env={},raw_exit_code=False,opt_name=None,fd_pipes=None,returnpid=False,\
357          uid=None,gid=None,groups=None,umask=None,logfile=None,path_lookup=True,\
358          selinux_context=None, raise_signals=False, func_call=False):
359         """base fork/execve function.
360         mycommand is the desired command- if you need a command to execute in a bash/sandbox/fakeroot
361         environment, use the appropriate spawn call.  This is a straight fork/exec code path.
362         Can either have a tuple, or a string passed in.  If uid/gid/groups/umask specified, it changes
363         the forked process to said value.  If path_lookup is on, a non-absolute command will be converted
364         to an absolute command, otherwise it returns None.
365         
366         selinux_context is the desired context, dependant on selinux being available.
367         opt_name controls the name the processor goes by.
368         fd_pipes controls which file descriptor numbers are left open in the forked process- it's a dict of
369         current fd's raw fd #, desired #.
370         
371         func_call is a boolean for specifying to execute a python function- use spawn_func instead.
372         raise_signals is questionable.  Basically throw an exception if signal'd.  No exception is thrown
373         if raw_input is on.
374         
375         logfile overloads the specified fd's to write to a tee process which logs to logfile
376         returnpid returns the relevant pids (a list, including the logging process if logfile is on).
377         
378         non-returnpid calls to spawn will block till the process has exited, returning the exitcode/signal
379         raw_exit_code controls whether the actual waitpid result is returned, or intrepretted."""
380
381
382         myc=''
383         if not func_call:
384                 if type(mycommand)==types.StringType:
385                         mycommand=mycommand.split()
386                 myc = mycommand[0]
387                 if not os.access(myc, os.X_OK):
388                         if not path_lookup:
389                                 return None
390                         myc = find_binary(myc)
391                         if myc == None:
392                             return None
393         mypid=[]
394         if logfile:
395                 pr,pw=os.pipe()
396                 mypid.extend(spawn(('tee','-i','-a',logfile),returnpid=True,fd_pipes={0:pr,1:1,2:2}))
397                 retval=os.waitpid(mypid[-1],os.WNOHANG)[1]
398                 if retval != 0:
399                         # he's dead jim.
400                         if raw_exit_code:
401                                 return retval
402                         return process_exit_code(retval)
403                 
404                 if fd_pipes == None:
405                         fd_pipes={}
406                         fd_pipes[0] = 0
407                 fd_pipes[1]=pw
408                 fd_pipes[2]=pw
409
410         if not opt_name:
411                 opt_name = mycommand[0]
412         myargs=[opt_name]
413         myargs.extend(mycommand[1:])
414         global spawned_pids
415         mypid.append(os.fork())
416         if mypid[-1] != 0:
417                 #log the bugger.
418                 spawned_pids.extend(mypid)
419
420         if mypid[-1] == 0:
421                 if func_call:
422                         spawned_pids = []
423
424                 # this may look ugly, but basically it moves file descriptors around to ensure no
425                 # handles that are needed are accidentally closed during the final dup2 calls.
426                 trg_fd=[]
427                 if type(fd_pipes)==types.DictType:
428                         src_fd=[]
429                         k=fd_pipes.keys()
430                         k.sort()
431
432                         #build list of which fds will be where, and where they are at currently
433                         for x in k:
434                                 trg_fd.append(x)
435                                 src_fd.append(fd_pipes[x])
436         
437                         # run through said list dup'ing descriptors so that they won't be waxed
438                         # by other dup calls.
439                         for x in range(0,len(trg_fd)):
440                                 if trg_fd[x] == src_fd[x]:
441                                         continue
442                                 if trg_fd[x] in src_fd[x+1:]:
443                                         new=os.dup2(trg_fd[x],max(src_fd) + 1)
444                                         os.close(trg_fd[x])
445                                         try:
446                                                 while True:
447                                                         src_fd[s.index(trg_fd[x])]=new
448                                         except SystemExit, e:
449                                                 raise
450                                         except:
451                                                 pass
452
453                         # transfer the fds to their final pre-exec position.
454                         for x in range(0,len(trg_fd)):
455                                 if trg_fd[x] != src_fd[x]:
456                                         os.dup2(src_fd[x], trg_fd[x])
457                 else:
458                         trg_fd=[0,1,2]
459                 
460                 # wax all open descriptors that weren't requested be left open.
461                 for x in range(0,max_fd_limit):
462                         if x not in trg_fd:
463                                 try:
464                                         os.close(x)
465                                 except SystemExit, e:
466                                         raise
467                                 except:
468                                         pass
469
470                 # note this order must be preserved- can't change gid/groups if you change uid first.
471                 if selinux_capable and selinux_context:
472                         import selinux
473                         selinux.setexec(selinux_context)
474                 if gid:
475                         os.setgid(gid)
476                 if groups:
477                         os.setgroups(groups)
478                 if uid:
479                         os.setuid(uid)
480                 if umask:
481                         os.umask(umask)
482                 else:
483                         os.umask(022)
484
485                 try:
486                         #print "execing", myc, myargs
487                         if func_call:
488                                 # either use a passed in func for interpretting the results, or return if no exception.
489                                 # note the passed in list, and dict are expanded.
490                                 if len(mycommand) == 4:
491                                         os._exit(mycommand[3](mycommand[0](*mycommand[1],**mycommand[2])))
492                                 try:
493                                         mycommand[0](*mycommand[1],**mycommand[2])
494                                 except Exception,e:
495                                         print "caught exception",e," in forked func",mycommand[0]
496                                 sys.exit(0)
497
498                         #os.execvp(myc,myargs)
499                         os.execve(myc,myargs,env)
500                 except SystemExit, e:
501                         raise
502                 except Exception, e:
503                         if not func_call:
504                                 raise str(e)+":\n   "+myc+" "+string.join(myargs)
505                         print "func call failed"
506
507                 # If the execve fails, we need to report it, and exit
508                 # *carefully* --- report error here
509                 os._exit(1)
510                 sys.exit(1)
511                 return # should never get reached
512
513         # if we were logging, kill the pipes.
514         if logfile:
515                 os.close(pr)
516                 os.close(pw)
517
518         if returnpid:
519                 return mypid
520
521         # loop through pids (typically one, unless logging), either waiting on their death, or waxing them
522         # if the main pid (mycommand) returned badly.
523         while len(mypid):
524                 retval=os.waitpid(mypid[-1],0)[1]
525                 if retval != 0:
526                         cleanup(mypid[0:-1],block_exceptions=False)
527                         # at this point we've killed all other kid pids generated via this call.
528                         # return now.
529                         if raw_exit_code:
530                                 return retval
531                         return process_exit_code(retval,throw_signals=raise_signals)
532                 else:
533                         mypid.pop(-1)
534         cleanup(mypid)
535         return 0
536
537 def cmd(mycmd,myexc="",env={}):
538         try:
539                 sys.stdout.flush()
540                 retval=spawn_bash(mycmd,env)
541                 if retval != 0:
542                         raise CatalystError,myexc
543         except:
544                 raise
545
546 def process_exit_code(retval,throw_signals=False):
547         """process a waitpid returned exit code, returning exit code if it exit'd, or the
548         signal if it died from signalling
549         if throw_signals is on, it raises a SystemExit if the process was signaled.
550         This is intended for usage with threads, although at the moment you can't signal individual
551         threads in python, only the master thread, so it's a questionable option."""
552         if (retval & 0xff)==0:
553                 return retval >> 8 # return exit code
554         else:
555                 if throw_signals:
556                         #use systemexit, since portage is stupid about exception catching.
557                         raise SystemExit()
558                 return (retval & 0xff) << 8 # interrupted by signal
559
560
561 def file_locate(settings,filelist,expand=1):
562         #if expand=1, non-absolute paths will be accepted and
563         # expanded to os.getcwd()+"/"+localpath if file exists
564         for myfile in filelist:
565                 if not settings.has_key(myfile):
566                         #filenames such as cdtar are optional, so we don't assume the variable is defined.
567                         pass
568                 else:
569                     if len(settings[myfile])==0:
570                             raise CatalystError, "File variable \""+myfile+"\" has a length of zero (not specified.)"
571                     if settings[myfile][0]=="/":
572                             if not os.path.exists(settings[myfile]):
573                                     raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]
574                     elif expand and os.path.exists(os.getcwd()+"/"+settings[myfile]):
575                             settings[myfile]=os.getcwd()+"/"+settings[myfile]
576                     else:
577                             raise CatalystError, "Cannot locate specified "+myfile+": "+settings[myfile]+" (2nd try)"
578 """
579 Spec file format:
580
581 The spec file format is a very simple and easy-to-use format for storing data. Here's an example
582 file:
583
584 item1: value1
585 item2: foo bar oni
586 item3:
587         meep
588         bark
589         gleep moop
590         
591 This file would be interpreted as defining three items: item1, item2 and item3. item1 would contain
592 the string value "value1". Item2 would contain an ordered list [ "foo", "bar", "oni" ]. item3
593 would contain an ordered list as well: [ "meep", "bark", "gleep", "moop" ]. It's important to note
594 that the order of multiple-value items is preserved, but the order that the items themselves are
595 defined are not preserved. In other words, "foo", "bar", "oni" ordering is preserved but "item1"
596 "item2" "item3" ordering is not, as the item strings are stored in a dictionary (hash).
597 """
598
599 def parse_makeconf(mylines):
600         mymakeconf={}
601         pos=0
602         pat=re.compile("([0-9a-zA-Z_]*)=(.*)")
603         while pos<len(mylines):
604                 if len(mylines[pos])<=1:
605                         #skip blanks
606                         pos += 1
607                         continue
608                 if mylines[pos][0] in ["#"," ","\t"]:
609                         #skip indented lines, comments
610                         pos += 1
611                         continue
612                 else:
613                         myline=mylines[pos]
614                         mobj=pat.match(myline)
615                         pos += 1
616                         if mobj.group(2):
617                             clean_string = re.sub(r"\"",r"",mobj.group(2))
618                             mymakeconf[mobj.group(1)]=clean_string
619         return mymakeconf
620
621 def read_makeconf(mymakeconffile):
622         if os.path.exists(mymakeconffile):
623                 try:
624                         try:
625                                 import snakeoil.fileutils
626                                 return snakeoil.fileutils.read_bash_dict(mymakeconffile, sourcing_command="source")
627                         except ImportError:
628                                 try:
629                                         import portage.util
630                                         return portage.util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
631                                 except:
632                                         try:
633                                                 import portage_util
634                                                 return portage_util.getconfig(mymakeconffile, tolerant=1, allow_sourcing=True)
635                                         except ImportError:
636                                                 myf=open(mymakeconffile,"r")
637                                                 mylines=myf.readlines()
638                                                 myf.close()
639                                                 return parse_makeconf(mylines)
640                 except:
641                         raise CatalystError, "Could not parse make.conf file "+mymakeconffile
642         else:
643                 makeconf={}
644                 return makeconf
645         
646 def msg(mymsg,verblevel=1):
647         if verbosity>=verblevel:
648                 print mymsg
649
650 def pathcompare(path1,path2):
651         # Change double slashes to slash
652         path1 = re.sub(r"//",r"/",path1)
653         path2 = re.sub(r"//",r"/",path2)
654         # Removing ending slash
655         path1 = re.sub("/$","",path1)
656         path2 = re.sub("/$","",path2)
657         
658         if path1 == path2:
659                 return 1
660         return 0
661
662 def ismount(path):
663         "enhanced to handle bind mounts"
664         if os.path.ismount(path):
665                 return 1
666         a=os.popen("mount")
667         mylines=a.readlines()
668         a.close()
669         for line in mylines:
670                 mysplit=line.split()
671                 if pathcompare(path,mysplit[2]):
672                         return 1
673         return 0
674
675 def addl_arg_parse(myspec,addlargs,requiredspec,validspec):
676         "helper function to help targets parse additional arguments"
677         global valid_config_file_values
678         
679         messages = []
680         for x in addlargs.keys():
681                 if x not in validspec and x not in valid_config_file_values and x not in requiredspec:
682                         messages.append("Argument \""+x+"\" not recognized.")
683                 else:
684                         myspec[x]=addlargs[x]
685         
686         for x in requiredspec:
687                 if not myspec.has_key(x):
688                         messages.append("Required argument \""+x+"\" not specified.")
689
690         if messages:
691                 raise CatalystError, '\n\tAlso: '.join(messages)
692         
693 def touch(myfile):
694         try:
695                 myf=open(myfile,"w")
696                 myf.close()
697         except IOError:
698                 raise CatalystError, "Could not touch "+myfile+"."
699
700 def countdown(secs=5, doing="Starting"):
701         if secs:
702                 print ">>> Waiting",secs,"seconds before starting..."
703                 print ">>> (Control-C to abort)...\n"+doing+" in: ",
704                 ticks=range(secs)
705                 ticks.reverse()
706                 for sec in ticks:
707                         sys.stdout.write(str(sec+1)+" ")
708                         sys.stdout.flush()
709                         time.sleep(1)
710                 print
711
712 def normpath(mypath):
713         TrailingSlash=False
714         if mypath[-1] == "/":
715             TrailingSlash=True
716         newpath = os.path.normpath(mypath)
717         if len(newpath) > 1:
718                 if newpath[:2] == "//":
719                         newpath = newpath[1:]
720         if TrailingSlash:
721             newpath=newpath+'/'
722         return newpath
723