X-Git-Url: http://git.tremily.us/?a=blobdiff_plain;f=SConstruct;h=08c7683b644b640da639a957281f57f2cc18866d;hb=53fca3d683cd298983e74a2068e18acff133fb1f;hp=226513b9586fd0eeec3ba3d3ded540aa04869c61;hpb=59e64828f020d3f654718821030a23200947bacc;p=scons.git diff --git a/SConstruct b/SConstruct index 226513b9..08c7683b 100644 --- a/SConstruct +++ b/SConstruct @@ -3,13 +3,14 @@ # # See the README file for an overview of how SCons is built and tested. # +from __future__ import generators ### KEEP FOR COMPATIBILITY FIXERS -# When this gets changed, you also need to change test/option-v.py -# so it looks for the right string. -copyright_years = '2001, 2002, 2003, 2004, 2005, 2006, 2007' +# When this gets changed, you must also change the copyright_years string +# in QMTest/TestSCons.py so the test scripts look for the right string. +copyright_years = '2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010' # This gets inserted into the man pages to reflect the month of release. -month_year = 'January 2007' +month_year = 'January 2010' # # __COPYRIGHT__ @@ -40,14 +41,15 @@ import os import os.path import re import stat -import string import sys import tempfile project = 'scons' -default_version = '0.98.0' +default_version = '1.3.0' copyright = "Copyright (c) %s The SCons Foundation" % copyright_years +platform = distutils.util.get_platform() + SConsignFile() # @@ -55,15 +57,20 @@ SConsignFile() # is available on this system. # def whereis(file): - for dir in string.split(os.environ['PATH'], os.pathsep): + exts = [''] + if platform == "win32": + exts += ['.exe'] + for dir in os.environ['PATH'].split(os.pathsep): f = os.path.join(dir, file) - if os.path.isfile(f): - try: - st = os.stat(f) - except: - continue - if stat.S_IMODE(st[stat.ST_MODE]) & 0111: - return f + for ext in exts: + f_ext = f + ext + if os.path.isfile(f_ext): + try: + st = os.stat(f_ext) + except: + continue + if stat.S_IMODE(st[stat.ST_MODE]) & 0111: + return f_ext return None # @@ -76,7 +83,8 @@ dh_builddeb = whereis('dh_builddeb') fakeroot = whereis('fakeroot') gzip = whereis('gzip') rpmbuild = whereis('rpmbuild') or whereis('rpm') -svn = whereis('svn') +hg = os.path.exists('.hg') and whereis('hg') +svn = os.path.exists('.svn') and whereis('svn') unzip = whereis('unzip') zip = whereis('zip') @@ -98,18 +106,51 @@ if not developer: build_system = ARGUMENTS.get('BUILD_SYSTEM') if not build_system: import socket - build_system = string.split(socket.gethostname(), '.')[0] + build_system = socket.gethostname().split('.')[0] version = ARGUMENTS.get('VERSION', '') if not version: version = default_version +hg_status_lines = [] +svn_status_lines = [] + +if hg: + cmd = "%s status --all 2> /dev/null" % hg + hg_status_lines = os.popen(cmd, "r").readlines() + +if svn: + cmd = "%s status --verbose 2> /dev/null" % svn + svn_status_lines = os.popen(cmd, "r").readlines() + revision = ARGUMENTS.get('REVISION', '') +def generate_build_id(revision): + return revision + +if not revision and hg: + hg_heads = os.popen("%s heads 2> /dev/null" % hg, "r").read() + cs = re.search('changeset:\s+(\S+)', hg_heads) + if cs: + revision = cs.group(1) + b = re.search('branch:\s+(\S+)', hg_heads) + if b: + revision = b.group(1) + ':' + revision + def generate_build_id(revision): + result = revision + if [l for l in hg_status_lines if l[0] in 'AMR!']: + result = result + '[MODIFIED]' + return result + if not revision and svn: svn_info = os.popen("%s info 2> /dev/null" % svn, "r").read() m = re.search('Revision: (\d+)', svn_info) if m: revision = m.group(1) + def generate_build_id(revision): + result = 'r' + revision + if [l for l in svn_status_lines if l[0] in 'ACDMR']: + result = result + '[MODIFIED]' + return result checkpoint = ARGUMENTS.get('CHECKPOINT', '') if checkpoint: @@ -118,34 +159,23 @@ if checkpoint: checkpoint = time.strftime('d%Y%m%d', time.localtime(time.time())) elif checkpoint == 'r': checkpoint = 'r' + revision - version = version + checkpoint - -svn_status = None -svn_status_lines = [] - -if svn: - svn_status = os.popen("%s status --verbose 2> /dev/null" % svn, "r").read() - svn_status_lines = svn_status[:-1].split('\n') + version = version + '.' + checkpoint build_id = ARGUMENTS.get('BUILD_ID') if build_id is None: if revision: - build_id = 'r' + revision - if filter(lambda l: l[0] in 'ACDMR', svn_status_lines): - build_id = build_id + '[MODIFIED]' + build_id = generate_build_id(revision) else: build_id = '' python_ver = sys.version[0:3] -platform = distutils.util.get_platform() - # Re-exporting LD_LIBRARY_PATH is necessary if the Python version was # built with the --enable-shared option. ENV = { 'PATH' : os.environ['PATH'] } for key in ['LOGNAME', 'PYTHONPATH', 'LD_LIBRARY_PATH']: - if os.environ.has_key(key): + if key in os.environ: ENV[key] = os.environ[key] build_dir = ARGUMENTS.get('BUILDDIR', 'build') @@ -250,8 +280,7 @@ runtest.py -p option to run tests against what's been actually packaged: """) -aliases = packaging_flavors + [('doc', 'The SCons documentation.')] -aliases.sort() +aliases = sorted(packaging_flavors + [('doc', 'The SCons documentation.')]) for alias, help_text in aliases: tw = textwrap.TextWrapper( @@ -335,16 +364,16 @@ def SCons_revision(target, source, env): # Note: We construct the __*__ substitution strings here # so that they don't get replaced when this file gets # copied into the tree for packaging. - contents = string.replace(contents, '__BUILD' + '__', env['BUILD']) - contents = string.replace(contents, '__BUILDSYS' + '__', env['BUILDSYS']) - contents = string.replace(contents, '__COPYRIGHT' + '__', env['COPYRIGHT']) - contents = string.replace(contents, '__DATE' + '__', env['DATE']) - contents = string.replace(contents, '__DEVELOPER' + '__', env['DEVELOPER']) - contents = string.replace(contents, '__FILE' + '__', str(source[0])) - contents = string.replace(contents, '__MONTH_YEAR'+ '__', env['MONTH_YEAR']) - contents = string.replace(contents, '__REVISION' + '__', env['REVISION']) - contents = string.replace(contents, '__VERSION' + '__', env['VERSION']) - contents = string.replace(contents, '__NULL' + '__', '') + contents = contents.replace('__BUILD' + '__', env['BUILD']) + contents = contents.replace('__BUILDSYS' + '__', env['BUILDSYS']) + contents = contents.replace('__COPYRIGHT' + '__', env['COPYRIGHT']) + contents = contents.replace('__DATE' + '__', env['DATE']) + contents = contents.replace('__DEVELOPER' + '__', env['DEVELOPER']) + contents = contents.replace('__FILE' + '__', str(source[0])) + contents = contents.replace('__MONTH_YEAR'+ '__', env['MONTH_YEAR']) + contents = contents.replace('__REVISION' + '__', env['REVISION']) + contents = contents.replace('__VERSION' + '__', env['VERSION']) + contents = contents.replace('__NULL' + '__', '') open(t, 'wb').write(contents) os.chmod(t, os.stat(s)[0]) @@ -377,7 +406,7 @@ def soelim(target, source, env): tfp.close() def soscan(node, env, path): - c = node.get_contents() + c = node.get_text_contents() return re.compile(r"^[\.']so\s+(\S+)", re.M).findall(c) soelimbuilder = Builder(action = Action(soelim), @@ -678,7 +707,7 @@ for p in [ scons ]: pkg_version = "%s-%s" % (pkg, version) src = 'src' - if p.has_key('src_subdir'): + if 'src_subdir' in p: src = os.path.join(src, p['src_subdir']) build = os.path.join(build_dir, pkg) @@ -691,7 +720,10 @@ for p in [ scons ]: platform_zip = os.path.join(build, 'dist', "%s.%s.zip" % (pkg_version, platform)) - win32_exe = os.path.join(build, 'dist', "%s.win32.exe" % pkg_version) + if platform == "win-amd64": + win32_exe = os.path.join(build, 'dist', "%s.win-amd64.exe" % pkg_version) + else: + win32_exe = os.path.join(build, 'dist', "%s.win32.exe" % pkg_version) # # Update the environment with the relevant information @@ -714,15 +746,14 @@ for p in [ scons ]: # destination files. # manifest_in = File(os.path.join(src, 'MANIFEST.in')).rstr() - src_files = map(lambda x: x[:-1], - open(manifest_in).readlines()) + src_files = [x[:-1] for x in open(manifest_in).readlines()] raw_files = src_files[:] dst_files = src_files[:] rpm_files = [] MANIFEST_in_list = [] - if p.has_key('subpkgs'): + if 'subpkgs' in p: # # This package includes some sub-packages. Read up their # MANIFEST.in files, and add them to our source and destination @@ -734,9 +765,9 @@ for p in [ scons ]: isubdir = p['subinst_dirs'][sp['pkg']] MANIFEST_in = File(os.path.join(src, ssubdir, 'MANIFEST.in')).rstr() MANIFEST_in_list.append(MANIFEST_in) - files = map(lambda x: x[:-1], open(MANIFEST_in).readlines()) + files = [x[:-1] for x in open(MANIFEST_in).readlines()] raw_files.extend(files) - src_files.extend(map(lambda x, s=ssubdir: os.path.join(s, x), files)) + src_files.extend([os.path.join(subdir, x) for x in files]) for f in files: r = os.path.join(sp['rpm_dir'], f) rpm_files.append(r) @@ -745,7 +776,7 @@ for p in [ scons ]: for f in sp.get('extra_rpm_files', []): r = os.path.join(sp['rpm_dir'], f) rpm_files.append(r) - files = map(lambda x, i=isubdir: os.path.join(i, x), files) + files = [os.path.join(isubdir, x) for x in files] dst_files.extend(files) for k, f in sp['filemap'].items(): if f: @@ -799,8 +830,8 @@ for p in [ scons ]: # # Now go through and arrange to create whatever packages we can. # - build_src_files = map(lambda x, b=build: os.path.join(b, x), src_files) - apply(Local, build_src_files, {}) + build_src_files = [os.path.join(build, x) for x in src_files] + Local(*build_src_files) distutils_formats = [] @@ -811,7 +842,7 @@ for p in [ scons ]: AddPostAction(dist_distutils_targets, Chmod(dist_distutils_targets, 0644)) if not gzip: - print "gzip not found; skipping .tar.gz package for %s." % pkg + print "gzip not found in %s; skipping .tar.gz package for %s." % (os.environ['PATH'], pkg) else: distutils_formats.append('gztar') @@ -837,9 +868,8 @@ for p in [ scons ]: # but that gives heartburn to Cygwin's tar, so work around it # with separate zcat-tar-rm commands. # - unpack_tar_gz_files = map(lambda x, u=unpack_tar_gz_dir, pv=pkg_version: - os.path.join(u, pv, x), - src_files) + unpack_tar_gz_files = [os.path.join(unpack_tar_gz_dir, pkg_version, x) + for x in src_files] env.Command(unpack_tar_gz_files, dist_tar_gz, [ Delete(os.path.join(unpack_tar_gz_dir, pkg_version)), "$ZCAT $SOURCES > .temp", @@ -860,7 +890,7 @@ for p in [ scons ]: # like this because we put a preamble in it that will chdir() # to the directory in which setup.py exists. # - dfiles = map(lambda x, d=test_tar_gz_dir: os.path.join(d, x), dst_files) + dfiles = [os.path.join(test_tar_gz_dir, x) for x in dst_files] env.Command(dfiles, unpack_tar_gz_files, [ Delete(os.path.join(unpack_tar_gz_dir, pkg_version, 'build')), Delete("$TEST_TAR_GZ_DIR"), @@ -877,21 +907,9 @@ for p in [ scons ]: env.Command(ebuild, os.path.join('gentoo', 'scons.ebuild.in'), SCons_revision) def Digestify(target, source, env): import md5 - def hexdigest(s): - """Return a signature as a string of hex characters. - """ - # NOTE: This routine is a method in the Python 2.0 interface - # of the native md5 module, but we want SCons to operate all - # the way back to at least Python 1.5.2, which doesn't have it. - h = string.hexdigits - r = '' - for c in s: - i = ord(c) - r = r + h[(i >> 4) & 0xF] + h[i & 0xF] - return r src = source[0].rfile() contents = open(str(src)).read() - sig = hexdigest(md5.new(contents).digest()) + sig = md5.new(contents).hexdigest() bytes = os.stat(str(src))[6] open(str(target[0]), 'w').write("MD5 %s %s %d\n" % (sig, src.name, @@ -918,9 +936,8 @@ for p in [ scons ]: # Unpack the zip archive created by the distutils into # build/unpack-zip/scons-{version}. # - unpack_zip_files = map(lambda x, u=unpack_zip_dir, pv=pkg_version: - os.path.join(u, pv, x), - src_files) + unpack_zip_files = [os.path.join(unpack_zip_dir, pkg_version, x) + for x in src_files] env.Command(unpack_zip_files, dist_zip, [ Delete(os.path.join(unpack_zip_dir, pkg_version)), @@ -940,7 +957,7 @@ for p in [ scons ]: # like this because we put a preamble in it that will chdir() # to the directory in which setup.py exists. # - dfiles = map(lambda x, d=test_zip_dir: os.path.join(d, x), dst_files) + dfiles = [os.path.join(test_zip_dir, x) for x in dst_files] env.Command(dfiles, unpack_zip_files, [ Delete(os.path.join(unpack_zip_dir, pkg_version, 'build')), Delete("$TEST_ZIP_DIR"), @@ -977,12 +994,12 @@ for p in [ scons ]: maintain multiple lists. """ c = open(str(source[0]), 'rb').read() - c = string.replace(c, '__VERSION' + '__', env['VERSION']) - c = string.replace(c, '__RPM_FILES' + '__', env['RPM_FILES']) + c = c.replace('__VERSION' + '__', env['VERSION']) + c = c.replace('__RPM_FILES' + '__', env['RPM_FILES']) open(str(target[0]), 'wb').write(c) rpm_files.sort() - rpm_files_str = string.join(rpm_files, "\n") + "\n" + rpm_files_str = "\n".join(rpm_files) + "\n" rpm_spec_env = env.Clone(RPM_FILES = rpm_files_str) rpm_spec_action = Action(spec_function, varlist=['RPM_FILES']) rpm_spec_env.Command(specfile, specfile_in, rpm_spec_action) @@ -1003,8 +1020,7 @@ for p in [ scons ]: AddPostAction(dist_noarch_rpm, Chmod(dist_noarch_rpm, 0644)) AddPostAction(dist_src_rpm, Chmod(dist_src_rpm, 0644)) - dfiles = map(lambda x, d=test_rpm_dir: os.path.join(d, 'usr', x), - dst_files) + dfiles = [os.path.join(test_rpm_dir, 'usr', x) for x in dst_files] env.Command(dfiles, dist_noarch_rpm, "$RPM2CPIO $SOURCES | (cd $TEST_RPM_DIR && cpio -id)") @@ -1027,8 +1043,7 @@ for p in [ scons ]: if s[:len(old)] == old: s = new + s[len(old):] return os.path.join('usr', s) - dfiles = map(lambda x, t=test_deb_dir: os.path.join(t, x), - map(xxx, dst_files)) + dfiles = [os.path.join(test_deb_dir, xxx(x)) for x in dst_files] env.Command(dfiles, deb, "dpkg --fsys-tarfile $SOURCES | (cd $TEST_DEB_DIR && tar -xf -)") @@ -1051,7 +1066,7 @@ for p in [ scons ]: commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_dumb -f %s" % format) commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY sdist --formats=%s" % \ - string.join(distutils_formats, ',')) + ','.join(distutils_formats)) commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_wininst") @@ -1084,11 +1099,11 @@ for p in [ scons ]: local_script = os.path.join(build_dir_local, script) commands.append(Move(local_script + '.py', local_script)) - rf = filter(lambda x: not x in scripts, raw_files) - rf = map(lambda x, slv=s_l_v: os.path.join(slv, x), rf) + rf = [x for x in raw_files if not x in scripts] + rf = [os.path.join(s_l_v, x) for x in rf] for script in scripts: rf.append("%s.py" % script) - local_targets = map(lambda x, s=build_dir_local: os.path.join(s, x), rf) + local_targets = [os.path.join(build_dir_local, x) for x in rf] env.Command(local_targets, build_src_files, commands) @@ -1107,9 +1122,7 @@ for p in [ scons ]: local_targets, "cd %s && tar czf $( ${TARGET.abspath} $) *" % build_dir_local) - unpack_targets = map(lambda x, d=test_local_tar_gz_dir: - os.path.join(d, x), - rf) + unpack_targets = [os.path.join(test_local_tar_gz_dir, x) for x in rf] commands = [Delete(test_local_tar_gz_dir), Mkdir(test_local_tar_gz_dir), "cd %s && tar xzf $( ${SOURCE.abspath} $)" % test_local_tar_gz_dir] @@ -1120,9 +1133,7 @@ for p in [ scons ]: env.Command(dist_local_zip, local_targets, zipit, CD = build_dir_local, PSV = '.') - unpack_targets = map(lambda x, d=test_local_zip_dir: - os.path.join(d, x), - rf) + unpack_targets = [os.path.join(test_local_zip_dir, x) for x in rf] commands = [Delete(test_local_zip_dir), Mkdir(test_local_zip_dir), unzipit] @@ -1173,22 +1184,29 @@ SConscript('doc/SConscript') # source archive from the project files and files in the change. # -if not svn_status: - "Not building in a Subversion tree; skipping building src package." +sfiles = None +if hg_status_lines: + slines = [l for l in hg_status_lines if l[0] in 'ACM'] + sfiles = [l.split()[-1] for l in slines] +elif svn_status_lines: + slines = [l for l in svn_status_lines if l[0] in ' MA'] + sentries = [l.split()[-1] for l in slines] + sfiles = list(filter(os.path.isfile, sentries)) else: - slines = filter(lambda l: l[0] in ' MA', svn_status_lines) - sentries = map(lambda l: l.split()[-1], slines) - sfiles = filter(os.path.isfile, sentries) + "Not building in a Mercurial or Subversion tree; skipping building src package." +if sfiles: remove_patterns = [ + '.hgt/*', '.svnt/*', '*.aeignore', '*.cvsignore', + '*.hgignore', 'www/*', ] for p in remove_patterns: - sfiles = filter(lambda s, p=p: not fnmatch.fnmatch(s, p), sfiles) + sfiles = [s for s in sfiles if not fnmatch.fnmatch(s, p)] if sfiles: ps = "%s-src" % project @@ -1205,7 +1223,7 @@ else: for file in sfiles: env.SCons_revision(os.path.join(b_ps, file), file) - b_ps_files = map(lambda x, d=b_ps: os.path.join(d, x), sfiles) + b_ps_files = [os.path.join(b_ps, x) for x in sfiles] cmds = [ Delete(b_psv), Copy(b_psv, b_ps), @@ -1214,7 +1232,7 @@ else: env.Command(b_psv_stamp, src_deps + b_ps_files, cmds) - apply(Local, b_ps_files, {}) + Local(*b_ps_files) if gzip: @@ -1224,9 +1242,8 @@ else: # # Unpack the archive into build/unpack/scons-{version}. # - unpack_tar_gz_files = map(lambda x, u=unpack_tar_gz_dir, psv=psv: - os.path.join(u, psv, x), - sfiles) + unpack_tar_gz_files = [os.path.join(unpack_tar_gz_dir, psv, x) + for x in sfiles] # # We'd like to replace the last three lines with the following: @@ -1255,8 +1272,7 @@ else: # like this because we put a preamble in it that will chdir() # to the directory in which setup.py exists. # - dfiles = map(lambda x, d=test_src_tar_gz_dir: os.path.join(d, x), - dst_files) + dfiles = [os.path.join(test_src_tar_gz_dir, x) for x in dst_files] scons_lib_dir = os.path.join(unpack_tar_gz_dir, psv, 'src', 'engine') ENV = env.Dictionary('ENV').copy() ENV['SCONS_LIB_DIR'] = scons_lib_dir @@ -1289,9 +1305,8 @@ else: # # Unpack the archive into build/unpack/scons-{version}. # - unpack_zip_files = map(lambda x, u=unpack_zip_dir, psv=psv: - os.path.join(u, psv, x), - sfiles) + unpack_zip_files = [os.path.join(unpack_zip_dir, psv, x) + for x in sfiles] env.Command(unpack_zip_files, src_zip, [ Delete(os.path.join(unpack_zip_dir, psv)), @@ -1311,8 +1326,7 @@ else: # like this because we put a preamble in it that will chdir() # to the directory in which setup.py exists. # - dfiles = map(lambda x, d=test_src_zip_dir: os.path.join(d, x), - dst_files) + dfiles = [os.path.join(test_src_zip_dir, x) for x in dst_files] scons_lib_dir = os.path.join(unpack_zip_dir, psv, 'src', 'engine') ENV = env.Dictionary('ENV').copy() ENV['SCONS_LIB_DIR'] = scons_lib_dir