#
# SConstruct file to build scons packages during development.
#
+# See the README file for an overview of how SCons is built and tested.
+#
+
+copyright_years = '2001, 2002, 2003, 2004'
#
-# Copyright (c) 2001, 2002 Steven Knight
+# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
import distutils.util
import os
import os.path
+import re
+import socket
import stat
import string
import sys
import time
project = 'scons'
-default_version = '0.06'
+default_version = '0.96'
+copyright = "Copyright (c) %s The SCons Foundation" % copyright_years
Default('.')
+SConsignFile()
+
#
-# An internal "whereis" routine to figure out if we have a
-# given program available. Put it in the "cons::" package
-# so subsidiary Conscript files can get at it easily, too.
+# An internal "whereis" routine to figure out if a given program
+# is available on this system.
#
-
def whereis(file):
for dir in string.split(os.environ['PATH'], os.pathsep):
f = os.path.join(dir, file)
#
# We let the presence or absence of various utilities determine
# whether or not we bother to build certain pieces of things.
-# This will allow people to still do SCons work even if they
+# This should allow people to still do SCons work even if they
# don't have Aegis or RPM installed, for example.
#
aegis = whereis('aegis')
dh_builddeb = whereis('dh_builddeb')
fakeroot = whereis('fakeroot')
gzip = whereis('gzip')
-rpm = whereis('rpm')
+rpmbuild = whereis('rpmbuild') or whereis('rpm')
unzip = whereis('unzip')
-
-# My installation on Red Hat doesn't like any debhelper version
-# beyond 2, so let's use 2 as the default on any non-Debian build.
-if os.path.isfile('/etc/debian_version'):
- dh_compat = 3
-else:
- dh_compat = 2
+zip = whereis('zip')
#
-# Now grab the information that we "build" into the files (using sed).
+# Now grab the information that we "build" into the files.
#
try:
date = ARGUMENTS['date']
except:
date = time.strftime("%Y/%m/%d %H:%M:%S", time.localtime(time.time()))
-
+
if ARGUMENTS.has_key('developer'):
developer = ARGUMENTS['developer']
elif os.environ.has_key('USERNAME'):
elif os.environ.has_key('USER'):
developer = os.environ['USER']
-try:
+if ARGUMENTS.has_key('build_system'):
+ build_system = ARGUMENTS['build_system']
+else:
+ build_system = string.split(socket.gethostname(), '.')[0]
+
+if ARGUMENTS.has_key('version'):
revision = ARGUMENTS['version']
-except:
- if aesub:
- revision = os.popen(aesub + " \\$version", "r").read()[:-1]
- else:
- revision = default_version
-
-a = string.split(revision, '.')
-arr = [a[0]]
-for s in a[1:]:
- if len(s) == 1:
- s = '0' + s
- arr.append(s)
-revision = string.join(arr, '.')
+elif aesub:
+ revision = os.popen(aesub + " \\$version", "r").read()[:-1]
+else:
+ revision = default_version
+
+# This is old code that adds an initial "0" to revision numbers < 10.
+#a = string.split(revision, '.')
+#arr = [a[0]]
+#for s in a[1:]:
+# if len(s) == 1:
+# s = '0' + s
+# arr.append(s)
+#revision = string.join(arr, '.')
# Here's how we'd turn the calculated $revision into our package $version.
# This makes it difficult to coordinate with other files (debian/changelog
#version = string.join(arr, '.')
version = default_version
-try:
+build_id = string.replace(revision, version + '.', '')
+
+if ARGUMENTS.has_key('change'):
change = ARGUMENTS['change']
-except:
- if aesub:
- change = os.popen(aesub + " \\$change", "r").read()[:-1]
- else:
- change = default_version
+elif aesub:
+ change = os.popen(aesub + " \\$change", "r").read()[:-1]
+else:
+ change = default_version
python_ver = sys.version[0:3]
platform = distutils.util.get_platform()
ENV = { 'PATH' : os.environ['PATH'] }
-if os.environ.has_key('AEGIS_PROJECT'):
- ENV['AEGIS_PROJECT'] = os.environ['AEGIS_PROJECT']
-
-lib_project = os.path.join("lib", project)
-
-unpack_tar_gz_dir = os.path.join(os.getcwd(), "build", "unpack-tar-gz")
-
-unpack_zip_dir = os.path.join(os.getcwd(), "build", "unpack-zip")
-
-test_tar_gz_dir = os.path.join(os.getcwd(), "build", "test-tar-gz")
-test_src_tar_gz_dir = os.path.join(os.getcwd(), "build", "test-src-tar-gz")
+for key in ['AEGIS_PROJECT', 'LOGNAME', 'PYTHONPATH']:
+ if os.environ.has_key(key):
+ ENV[key] = os.environ[key]
-test_zip_dir = os.path.join(os.getcwd(), "build", "test-zip")
-test_src_zip_dir = os.path.join(os.getcwd(), "build", "test-src-zip")
+cwd_build = os.path.join(os.getcwd(), "build")
-test_rpm_dir = os.path.join(os.getcwd(), "build", "test-rpm")
+test_deb_dir = os.path.join(cwd_build, "test-deb")
+test_rpm_dir = os.path.join(cwd_build, "test-rpm")
+test_tar_gz_dir = os.path.join(cwd_build, "test-tar-gz")
+test_src_tar_gz_dir = os.path.join(cwd_build, "test-src-tar-gz")
+test_local_tar_gz_dir = os.path.join(cwd_build, "test-local-tar-gz")
+test_zip_dir = os.path.join(cwd_build, "test-zip")
+test_src_zip_dir = os.path.join(cwd_build, "test-src-zip")
+test_local_zip_dir = os.path.join(cwd_build, "test-local-zip")
-test_deb_dir = os.path.join(os.getcwd(), "build", "test-deb")
+unpack_tar_gz_dir = os.path.join(cwd_build, "unpack-tar-gz")
+unpack_zip_dir = os.path.join(cwd_build, "unpack-zip")
if platform == "win32":
tar_hflag = ''
- python_project_subinst_dir = None
+ python_project_subinst_dir = os.path.join("Lib", "site-packages", project)
project_script_subinst_dir = 'Scripts'
else:
tar_hflag = 'h'
- python_project_subinst_dir = lib_project
+ python_project_subinst_dir = os.path.join("lib", project)
project_script_subinst_dir = 'bin'
+
+zcat = 'gzip -d -c'
+
#
# Figure out if we can handle .zip files.
#
+zipit = None
+unzipit = None
try:
import zipfile
def zipit(env, target, source):
- print "Zipping %s:" % target
+ print "Zipping %s:" % str(target[0])
def visit(arg, dirname, names):
for name in names:
- arg.write(os.path.join(dirname, name))
- os.chdir('build')
- zf = zipfile.ZipFile(target, 'w')
- os.path.walk(env['PSV'], visit, zf)
- os.chdir('..')
+ path = os.path.join(dirname, name)
+ if os.path.isfile(path):
+ arg.write(path)
+ zf = zipfile.ZipFile(str(target[0]), 'w')
+ olddir = os.getcwd()
+ os.chdir(env['CD'])
+ try: os.path.walk(env['PSV'], visit, zf)
+ finally: os.chdir(olddir)
+ zf.close()
def unzipit(env, target, source):
- print "Unzipping %s:" % source[0]
- zf = zipfile.ZipFile(source[0], 'r')
+ print "Unzipping %s:" % str(source[0])
+ zf = zipfile.ZipFile(str(source[0]), 'r')
for name in zf.namelist():
dest = os.path.join(env['UNPACK_ZIP_DIR'], name)
- open(dest, 'w').write(zf.read(name))
+ dir = os.path.dirname(dest)
+ try:
+ os.makedirs(dir)
+ except:
+ pass
+ print dest,name
+ # if the file exists, then delete it before writing
+ # to it so that we don't end up trying to write to a symlink:
+ if os.path.isfile(dest) or os.path.islink(dest):
+ os.unlink(dest)
+ if not os.path.isdir(dest):
+ open(dest, 'w').write(zf.read(name))
except:
- if unzip:
- zipit = "cd build && $ZIP $ZIPFLAGS dist/${TARGET.file} $PSV"
+ if unzip and zip:
+ zipit = "cd $CD && $ZIP $ZIPFLAGS $( ${TARGET.abspath} $) $PSV"
unzipit = "$UNZIP $UNZIPFLAGS $SOURCES"
def SCons_revision(target, source, env):
"""Interpolate specific values from the environment into a file.
-
+
This is used to copy files into a tree that gets packaged up
into the source file package.
"""
t = str(target[0])
- s = str(source[0])
- # Note: We don't use $VERSION from the environment so that
- # this routine will change when the version number changes
- # and things will get rebuilt properly.
- global version
- print "SCons_revision() < %s > %s" % (s, t)
- inf = open(s, 'rb')
- outf = open(t, 'wb')
- for line in inf.readlines():
- # Note: We construct the __*__ substitution strings here
- # so that they don't get replaced when this file gets
- # copied into the tree for packaging.
- line = string.replace(line, '_' + '_DATE__', env['DATE'])
- line = string.replace(line, '_' + '_DEVELOPER__', env['DEVELOPER'])
- line = string.replace(line, '_' + '_FILE__', s)
- line = string.replace(line, '_' + '_REVISION__', env['REVISION'])
- line = string.replace(line, '_' + '_VERSION__', version)
- outf.write(line)
- inf.close()
- outf.close()
+ s = source[0].rstr()
+ contents = open(s, 'rb').read()
+ # Note: We construct the __*__ substitution strings here
+ # so that they don't get replaced when this file gets
+ # copied into the tree for packaging.
+ contents = string.replace(contents, '__BUILD' + '__', env['BUILD'])
+ contents = string.replace(contents, '__BUILDSYS' + '__', env['BUILDSYS'])
+ contents = string.replace(contents, '__COPYRIGHT' + '__', env['COPYRIGHT'])
+ contents = string.replace(contents, '__DATE' + '__', env['DATE'])
+ contents = string.replace(contents, '__DEVELOPER' + '__', env['DEVELOPER'])
+ contents = string.replace(contents, '__FILE' + '__', str(source[0]))
+ contents = string.replace(contents, '__REVISION' + '__', env['REVISION'])
+ contents = string.replace(contents, '__VERSION' + '__', env['VERSION'])
+ contents = string.replace(contents, '__NULL' + '__', '')
+ open(t, 'wb').write(contents)
os.chmod(t, os.stat(s)[0])
-revbuilder = Builder(name = 'SCons_revision', action = SCons_revision)
+revbuilder = Builder(action = Action(SCons_revision,
+ varlist=['COPYRIGHT', 'VERSION']))
+
+def soelim(target, source, env):
+ """
+ Interpolate files included in [gnt]roff source files using the
+ .so directive.
+
+ This behaves somewhat like the soelim(1) wrapper around groff, but
+ makes us independent of whether the actual underlying implementation
+ includes an soelim() command or the corresponding command-line option
+ to groff(1). The key behavioral difference is that this doesn't
+ recursively include .so files from the include file. Not yet, anyway.
+ """
+ t = str(target[0])
+ s = str(source[0])
+ dir, f = os.path.split(s)
+ tfp = open(t, 'w')
+ sfp = open(s, 'r')
+ for line in sfp.readlines():
+ if line[:4] in ['.so ', "'so "]:
+ sofile = os.path.join(dir, line[4:-1])
+ tfp.write(open(sofile, 'r').read())
+ else:
+ tfp.write(line)
+ sfp.close()
+ tfp.close()
+
+def soscan(node, env, path):
+ c = node.get_contents()
+ return re.compile(r"^[\.']so\s+(\S+)", re.M).findall(c)
+
+soelimbuilder = Builder(action = Action(soelim),
+ source_scanner = Scanner(soscan))
+
+# When copying local files from a Repository (Aegis),
+# just make copies, don't symlink them.
+SetOption('duplicate', 'copy')
env = Environment(
ENV = ENV,
-
+
+ BUILD = build_id,
+ BUILDSYS = build_system,
+ COPYRIGHT = copyright,
DATE = date,
DEVELOPER = developer,
REVISION = revision,
VERSION = version,
- DH_COMPAT = dh_compat,
+ DH_COMPAT = 2,
TAR_HFLAG = tar_hflag,
- ZIP = whereis('zip'),
+ ZIP = zip,
ZIPFLAGS = '-r',
UNZIP = unzip,
UNZIPFLAGS = '-o -d $UNPACK_ZIP_DIR',
+ ZCAT = zcat,
+
+ RPMBUILD = rpmbuild,
+ RPM2CPIO = 'rpm2cpio',
+
TEST_DEB_DIR = test_deb_dir,
TEST_RPM_DIR = test_rpm_dir,
TEST_SRC_TAR_GZ_DIR = test_src_tar_gz_dir,
UNPACK_TAR_GZ_DIR = unpack_tar_gz_dir,
UNPACK_ZIP_DIR = unpack_zip_dir,
- BUILDERS = [ revbuilder ],
+ BUILDERS = { 'SCons_revision' : revbuilder,
+ 'SOElim' : soelimbuilder },
+
+ PYTHON = sys.executable,
+ PYTHONFLAGS = '-tt',
)
+Version_values = [Value(version), Value(build_id)]
+
#
# Define SCons packages.
#
# In the original, more complicated packaging scheme, we were going
# to have separate packages for:
#
-# python-scons only the build engine
-# scons-script only the script
-# scons the script plus the build engine
+# python-scons only the build engine
+# scons-script only the script
+# scons the script plus the build engine
#
# We're now only delivering a single "scons" package, but this is still
# "built" as two sub-packages (the build engine and the script), so
'pkg' : 'python-' + project,
'src_subdir' : 'engine',
'inst_subdir' : os.path.join('lib', 'python1.5', 'site-packages'),
+ 'rpm_dir' : '/usr/lib/scons',
'debian_deps' : [
- 'debian/rules',
- 'debian/control',
'debian/changelog',
+ 'debian/control',
'debian/copyright',
- 'debian/python-scons.postinst',
- 'debian/python-scons.prerm',
+ 'debian/dirs',
+ 'debian/docs',
+ 'debian/postinst',
+ 'debian/prerm',
+ 'debian/rules',
],
'files' : [ 'LICENSE.txt',
'filemap' : {
'LICENSE.txt' : '../LICENSE.txt'
},
+
+ 'buildermap' : {},
+
+ 'explicit_deps' : {
+ 'SCons/__init__.py' : Version_values,
+ },
}
#
#python2_scons = {
# 'pkg' : 'python2-' + project,
# 'src_subdir' : 'engine',
-# 'inst_subdir' : os.path.join('lib', 'python2.1', 'site-packages'),
+# 'inst_subdir' : os.path.join('lib', 'python2.2', 'site-packages'),
#
# 'debian_deps' : [
-# 'debian/rules',
-# 'debian/control',
-# 'debian/changelog',
-# 'debian/copyright',
-# 'debian/python2-scons.postinst',
-# 'debian/python2-scons.prerm',
+# 'debian/changelog',
+# 'debian/control',
+# 'debian/copyright',
+# 'debian/dirs',
+# 'debian/docs',
+# 'debian/postinst',
+# 'debian/prerm',
+# 'debian/rules',
# ],
#
# 'files' : [
# 'filemap' : {
# 'LICENSE.txt' : '../LICENSE.txt',
# },
+# 'buildermap' : {},
#}
#
'pkg' : project + '-script',
'src_subdir' : 'script',
'inst_subdir' : 'bin',
+ 'rpm_dir' : '/usr/bin',
'debian_deps' : [
- 'debian/rules',
- 'debian/control',
'debian/changelog',
+ 'debian/control',
'debian/copyright',
- 'debian/python-scons.postinst',
- 'debian/python-scons.prerm',
+ 'debian/dirs',
+ 'debian/docs',
+ 'debian/postinst',
+ 'debian/prerm',
+ 'debian/rules',
],
'files' : [
],
'filemap' : {
- 'LICENSE.txt' : '../LICENSE.txt',
- 'scons' : 'scons.py',
- }
+ 'LICENSE.txt' : '../LICENSE.txt',
+ 'scons' : 'scons.py',
+ 'sconsign' : 'sconsign.py',
+ },
+
+ 'buildermap' : {},
+
+ 'extra_rpm_files' : [
+ 'scons-' + version,
+ 'sconsign-' + version,
+ ],
+
+ 'explicit_deps' : {
+ 'scons' : Version_values,
+ 'sconsign' : Version_values,
+ },
}
scons = {
'pkg' : project,
- 'debian_deps' : [
- 'debian/rules',
- 'debian/control',
+ 'debian_deps' : [
'debian/changelog',
+ 'debian/control',
'debian/copyright',
- 'debian/scons.postinst',
- 'debian/scons.prerm',
+ 'debian/dirs',
+ 'debian/docs',
+ 'debian/postinst',
+ 'debian/prerm',
+ 'debian/rules',
],
- 'files' : [
+ 'files' : [
'CHANGES.txt',
'LICENSE.txt',
'README.txt',
'RELEASE.txt',
'os_spawnv_fix.diff',
'scons.1',
+ 'sconsign.1',
'script/scons.bat',
'setup.cfg',
'setup.py',
],
'filemap' : {
- 'scons.1' : '../doc/man/scons.1',
+ 'scons.1' : '../build/doc/man/scons.1',
+ 'sconsign.1' : '../build/doc/man/sconsign.1',
+ },
+
+ 'buildermap' : {
+ 'scons.1' : env.SOElim,
+ 'sconsign.1' : env.SOElim,
},
- 'subpkgs' : [ python_scons, scons_script ],
+ 'subpkgs' : [ python_scons, scons_script ],
- 'subinst_dirs' : {
+ 'subinst_dirs' : {
'python-' + project : python_project_subinst_dir,
project + '-script' : project_script_subinst_dir,
},
}
+scripts = ['scons', 'sconsign']
+
src_deps = []
src_files = []
# like this because we put a preamble in it that will chdir()
# to the directory in which setup.py exists.
#
- env.Update(PKG = pkg,
- PKG_VERSION = pkg_version,
- SETUP_PY = os.path.join(build, 'setup.py'))
+ setup_py = os.path.join(build, 'setup.py')
+ env.Replace(PKG = pkg,
+ PKG_VERSION = pkg_version,
+ SETUP_PY = setup_py)
+ Local(setup_py)
#
# Read up the list of source files from our MANIFEST.in.
# README.txt, or setup.py. Make a copy of the list for the
# destination files.
#
+ manifest_in = File(os.path.join(src, 'MANIFEST.in')).rstr()
src_files = map(lambda x: x[:-1],
- open(os.path.join(src, 'MANIFEST.in')).readlines())
+ open(manifest_in).readlines())
+ raw_files = src_files[:]
dst_files = src_files[:]
+ rpm_files = []
+
+ MANIFEST_in_list = []
if p.has_key('subpkgs'):
#
for sp in p['subpkgs']:
ssubdir = sp['src_subdir']
isubdir = p['subinst_dirs'][sp['pkg']]
- f = map(lambda x: x[:-1],
- open(os.path.join(src, ssubdir, 'MANIFEST.in')).readlines())
- src_files.extend(map(lambda x, s=ssubdir: os.path.join(s, x), f))
- if isubdir:
- f = map(lambda x, i=isubdir: os.path.join(i, x), f)
- dst_files.extend(f)
- for k in sp['filemap'].keys():
- f = sp['filemap'][k]
+ MANIFEST_in = File(os.path.join(src, ssubdir, 'MANIFEST.in')).rstr()
+ MANIFEST_in_list.append(MANIFEST_in)
+ files = map(lambda x: x[:-1], open(MANIFEST_in).readlines())
+ raw_files.extend(files)
+ src_files.extend(map(lambda x, s=ssubdir: os.path.join(s, x), files))
+ for f in files:
+ r = os.path.join(sp['rpm_dir'], f)
+ rpm_files.append(r)
+ if f[-3:] == ".py":
+ rpm_files.append(r + 'c')
+ for f in sp.get('extra_rpm_files', []):
+ r = os.path.join(sp['rpm_dir'], f)
+ rpm_files.append(r)
+ files = map(lambda x, i=isubdir: os.path.join(i, x), files)
+ dst_files.extend(files)
+ for k, f in sp['filemap'].items():
if f:
- k = os.path.join(sp['src_subdir'], k)
- p['filemap'][k] = os.path.join(sp['src_subdir'], f)
+ k = os.path.join(ssubdir, k)
+ p['filemap'][k] = os.path.join(ssubdir, f)
+ for f, deps in sp['explicit_deps'].items():
+ f = os.path.join(build, ssubdir, f)
+ env.Depends(f, deps)
#
# Now that we have the "normal" source files, add those files
# concocted to expand __FILE__, __VERSION__, etc.
#
for b in src_files:
- s = p['filemap'].get(b, b)
- env.SCons_revision(os.path.join(build, b), os.path.join(src, s))
+ s = p['filemap'].get(b, b)
+ builder = p['buildermap'].get(b, env.SCons_revision)
+ x = builder(os.path.join(build, b), os.path.join(src, s))
+ Local(x)
#
# NOW, finally, we can create the MANIFEST, which we do
# MANIFEST itself to the array, of course.
#
src_files.append("MANIFEST")
+ MANIFEST_in_list.append(os.path.join(src, 'MANIFEST.in'))
- def copy(target, source, **kw):
+ def write_src_files(target, source, **kw):
global src_files
- src_files.sort()
+ src_files.sort()
f = open(str(target[0]), 'wb')
for file in src_files:
f.write(file + "\n")
f.close()
return 0
env.Command(os.path.join(build, 'MANIFEST'),
- os.path.join(src, 'MANIFEST.in'),
- copy)
+ MANIFEST_in_list,
+ write_src_files)
#
# Now go through and arrange to create whatever packages we can.
#
build_src_files = map(lambda x, b=build: os.path.join(b, x), src_files)
+ apply(Local, build_src_files, {})
distutils_formats = []
#
# We'd like to replace the last three lines with the following:
#
- # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR
+ # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR
#
# but that gives heartburn to Cygwin's tar, so work around it
# with separate zcat-tar-rm commands.
os.path.join(u, pv, x),
src_files)
env.Command(unpack_tar_gz_files, tar_gz, [
- "rm -rf %s" % os.path.join(unpack_tar_gz_dir, pkg_version),
- "zcat $SOURCES > .temp",
+ Delete(os.path.join(unpack_tar_gz_dir, pkg_version)),
+ "$ZCAT $SOURCES > .temp",
"tar xf .temp -C $UNPACK_TAR_GZ_DIR",
- "rm -f .temp",
+ Delete(".temp"),
])
-
+
#
# Run setup.py in the unpacked subdirectory to "install" everything
# into our build/test subdirectory. The runtest.py script will set
#
dfiles = map(lambda x, d=test_tar_gz_dir: os.path.join(d, x), dst_files)
env.Command(dfiles, unpack_tar_gz_files, [
- "rm -rf %s" % os.path.join(unpack_tar_gz_dir, pkg_version, 'build'),
- "rm -rf $TEST_TAR_GZ_DIR",
- "python %s install --prefix=$TEST_TAR_GZ_DIR" % \
+ Delete(os.path.join(unpack_tar_gz_dir, pkg_version, 'build')),
+ Delete("$TEST_TAR_GZ_DIR"),
+ '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_TAR_GZ_DIR" --standalone-lib' % \
os.path.join(unpack_tar_gz_dir, pkg_version, 'setup.py'),
])
+ #
+ # Generate portage files for submission to Gentoo Linux.
+ #
+ gentoo = os.path.join('build', 'gentoo')
+ ebuild = os.path.join(gentoo, 'scons-%s.ebuild' % version)
+ digest = os.path.join(gentoo, 'files', 'digest-scons-%s' % version)
+ env.Command(ebuild, os.path.join('gentoo', 'scons.ebuild.in'), SCons_revision)
+ def Digestify(target, source, env):
+ import md5
+ def hexdigest(s):
+ """Return a signature as a string of hex characters.
+ """
+ # NOTE: This routine is a method in the Python 2.0 interface
+ # of the native md5 module, but we want SCons to operate all
+ # the way back to at least Python 1.5.2, which doesn't have it.
+ h = string.hexdigits
+ r = ''
+ for c in s:
+ i = ord(c)
+ r = r + h[(i >> 4) & 0xF] + h[i & 0xF]
+ return r
+ src = source[0].rfile()
+ contents = open(str(src)).read()
+ sig = hexdigest(md5.new(contents).digest())
+ bytes = os.stat(str(src))[6]
+ open(str(target[0]), 'w').write("MD5 %s %s %d\n" % (sig,
+ src.name,
+ bytes))
+ env.Command(digest, tar_gz, Digestify)
+
if zipit:
distutils_formats.append('zip')
os.path.join(u, pv, x),
src_files)
- env.Command(unpack_zip_files, zip, unzipit)
+ env.Command(unpack_zip_files, zip, [
+ Delete(os.path.join(unpack_zip_dir, pkg_version)),
+ unzipit,
+ ])
#
# Run setup.py in the unpacked subdirectory to "install" everything
#
dfiles = map(lambda x, d=test_zip_dir: os.path.join(d, x), dst_files)
env.Command(dfiles, unpack_zip_files, [
- "rm -rf %s" % os.path.join(unpack_zip_dir, pkg_version, 'build'),
- "rm -rf $TEST_ZIP_DIR",
- "python %s install --prefix=$TEST_ZIP_DIR" % \
+ Delete(os.path.join(unpack_zip_dir, pkg_version, 'build')),
+ Delete("$TEST_ZIP_DIR"),
+ '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_ZIP_DIR" --standalone-lib' % \
os.path.join(unpack_zip_dir, pkg_version, 'setup.py'),
])
- if rpm:
+ if rpmbuild:
topdir = os.path.join(os.getcwd(), build, 'build',
'bdist.' + platform, 'rpm')
- BUILDdir = os.path.join(topdir, 'BUILD', pkg + '-' + version)
- RPMSdir = os.path.join(topdir, 'RPMS', 'noarch')
- SOURCESdir = os.path.join(topdir, 'SOURCES')
- SPECSdir = os.path.join(topdir, 'SPECS')
- SRPMSdir = os.path.join(topdir, 'SRPMS')
+ buildroot = os.path.join(os.getcwd(), 'build', 'rpm-buildroot')
+
+ BUILDdir = os.path.join(topdir, 'BUILD', pkg + '-' + version)
+ RPMSdir = os.path.join(topdir, 'RPMS', 'noarch')
+ SOURCESdir = os.path.join(topdir, 'SOURCES')
+ SPECSdir = os.path.join(topdir, 'SPECS')
+ SRPMSdir = os.path.join(topdir, 'SRPMS')
+ specfile_in = os.path.join('rpm', "%s.spec.in" % pkg)
specfile = os.path.join(SPECSdir, "%s-1.spec" % pkg_version)
sourcefile = os.path.join(SOURCESdir, "%s.tar.gz" % pkg_version);
- rpm = os.path.join(RPMSdir, "%s-1.noarch.rpm" % pkg_version)
+ noarch_rpm = os.path.join(RPMSdir, "%s-1.noarch.rpm" % pkg_version)
src_rpm = os.path.join(SRPMSdir, "%s-1.src.rpm" % pkg_version)
- env.InstallAs(specfile, os.path.join('rpm', "%s.spec" % pkg))
+ def spec_function(target, source, env):
+ """Generate the RPM .spec file from the template file.
+
+ This fills in the %files portion of the .spec file with a
+ list generated from our MANIFEST(s), so we don't have to
+ maintain multiple lists.
+ """
+ c = open(str(source[0]), 'rb').read()
+ c = string.replace(c, '__RPM_FILES__', env['RPM_FILES'])
+ open(str(target[0]), 'wb').write(c)
+
+ rpm_files.sort()
+ rpm_files_str = string.join(rpm_files, "\n") + "\n"
+ rpm_spec_env = env.Copy(RPM_FILES = rpm_files_str)
+ rpm_spec_action = Action(spec_function, varlist=['RPM_FILES'])
+ rpm_spec_env.Command(specfile, specfile_in, rpm_spec_action)
+
env.InstallAs(sourcefile, tar_gz)
+ Local(sourcefile)
- targets = [ rpm, src_rpm ]
- cmd = "rpm --define '_topdir $(%s$)' -ba $SOURCES" % topdir
+ targets = [ noarch_rpm, src_rpm ]
+ cmd = "$RPMBUILD --define '_topdir $(%s$)' --buildroot %s -ba $SOURCES" % (topdir, buildroot)
if not os.path.isdir(BUILDdir):
cmd = ("$( mkdir -p %s; $)" % BUILDdir) + cmd
env.Command(targets, specfile, cmd)
dfiles = map(lambda x, d=test_rpm_dir: os.path.join(d, 'usr', x),
dst_files)
env.Command(dfiles,
- rpm,
- "rpm2cpio $SOURCES | (cd $TEST_RPM_DIR && cpio -id)")
+ noarch_rpm,
+ "$RPM2CPIO $SOURCES | (cd $TEST_RPM_DIR && cpio -id)")
if dh_builddeb and fakeroot:
# Our Debian packaging builds directly into build/dist,
# so we don't need to add the .debs to install_targets.
deb = os.path.join('build', 'dist', "%s_%s-1_all.deb" % (pkg, version))
+ for d in p['debian_deps']:
+ b = env.SCons_revision(os.path.join(build, d), d)
+ env.Depends(deb, b)
+ Local(b)
env.Command(deb, build_src_files, [
- "fakeroot make -f debian/rules VERSION=$VERSION DH_COMPAT=$DH_COMPAT ENVOKED_BY_CONSTRUCT=1 binary-$PKG",
- "env DH_COMPAT=$DH_COMPAT dh_clean"
+ "cd %s && fakeroot make -f debian/rules PYTHON=$PYTHON BUILDDEB_OPTIONS=--destdir=../../build/dist binary" % build,
])
- env.Depends(deb, p['debian_deps'])
- dfiles = map(lambda x, d=test_deb_dir: os.path.join(d, 'usr', x),
- dst_files)
+ old = os.path.join('lib', 'scons', '')
+ new = os.path.join('lib', 'python2.2', 'site-packages', '')
+ def xxx(s, old=old, new=new):
+ if s[:len(old)] == old:
+ s = new + s[len(old):]
+ return os.path.join('usr', s)
+ dfiles = map(lambda x, t=test_deb_dir: os.path.join(t, x),
+ map(xxx, dst_files))
env.Command(dfiles,
deb,
"dpkg --fsys-tarfile $SOURCES | (cd $TEST_DEB_DIR && tar -xf -)")
+
#
# Use the Python distutils to generate the appropriate packages.
#
commands = [
- "rm -rf %s %s" % (os.path.join(build, 'build', 'lib'),
- os.path.join(build, 'build', 'scripts'))
+ Delete(os.path.join(build, 'build', 'lib')),
+ Delete(os.path.join(build, 'build', 'scripts')),
]
if distutils_formats:
+ commands.append(Delete(os.path.join(build,
+ 'build',
+ 'bdist.' + platform,
+ 'dumb')))
for format in distutils_formats:
- commands.append("python $SETUP_PY bdist_dumb -f %s" % format)
+ commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_dumb -f %s" % format)
- commands.append("python $SETUP_PY sdist --formats=%s" % \
+ commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY sdist --formats=%s" % \
string.join(distutils_formats, ','))
- commands.append("python $SETUP_PY bdist_wininst")
+ commands.append("$PYTHON $PYTHONFLAGS $SETUP_PY bdist_wininst")
env.Command(distutils_targets, build_src_files, commands)
+ #
+ # Now create local packages for people who want to let people
+ # build their SCons-buildable packages without having to
+ # install SCons.
+ #
+ s_l_v = '%s-local-%s' % (pkg, version)
+
+ local = os.path.join('build', pkg + '-local')
+ cwd_local = os.path.join(os.getcwd(), local)
+ cwd_local_slv = os.path.join(os.getcwd(), local, s_l_v)
+
+ local_tar_gz = os.path.join('build', 'dist', "%s.tar.gz" % s_l_v)
+ local_zip = os.path.join('build', 'dist', "%s.zip" % s_l_v)
+
+ commands = [
+ Delete(local),
+ '$PYTHON $PYTHONFLAGS $SETUP_PY install "--install-script=%s" "--install-lib=%s" --no-install-man --no-compile --standalone-lib --no-version-script' % \
+ (cwd_local, cwd_local_slv),
+ ]
+
+ for script in scripts:
+ #commands.append("mv %s/%s %s/%s.py" % (local, script, local, script))
+ local_script = os.path.join(local, script)
+ commands.append(Move(local_script + '.py', local_script))
+
+ rf = filter(lambda x: not x in scripts, raw_files)
+ rf = map(lambda x, slv=s_l_v: os.path.join(slv, x), rf)
+ for script in scripts:
+ rf.append("%s.py" % script)
+ local_targets = map(lambda x, s=local: os.path.join(s, x), rf)
+
+ env.Command(local_targets, build_src_files, commands)
+
+ scons_LICENSE = os.path.join(local, 'scons-LICENSE')
+ env.SCons_revision(scons_LICENSE, 'LICENSE-local')
+ local_targets.append(scons_LICENSE)
+
+ scons_README = os.path.join(local, 'scons-README')
+ env.SCons_revision(scons_README, 'README-local')
+ local_targets.append(scons_README)
+
+ if gzip:
+ env.Command(local_tar_gz,
+ local_targets,
+ "cd %s && tar czf $( ${TARGET.abspath} $) *" % local)
+
+ unpack_targets = map(lambda x, d=test_local_tar_gz_dir:
+ os.path.join(d, x),
+ rf)
+ commands = [Delete(test_local_tar_gz_dir),
+ Mkdir(test_local_tar_gz_dir),
+ "cd %s && tar xzf $( ${SOURCE.abspath} $)" % test_local_tar_gz_dir]
+
+ env.Command(unpack_targets, local_tar_gz, commands)
+
+ if zipit:
+ env.Command(local_zip, local_targets, zipit,
+ CD = local, PSV = '.')
+
+ unpack_targets = map(lambda x, d=test_local_zip_dir:
+ os.path.join(d, x),
+ rf)
+ commands = [Delete(test_local_zip_dir),
+ Mkdir(test_local_zip_dir),
+ unzipit]
+
+ env.Command(unpack_targets, local_zip, unzipit,
+ UNPACK_ZIP_DIR = test_local_zip_dir)
+
#
# And, lastly, install the appropriate packages in the
# appropriate subdirectory.
#
- env.Install(os.path.join('build', 'dist'), install_targets)
+ b_d_files = env.Install(os.path.join('build', 'dist'), install_targets)
+ Local(b_d_files)
#
#
#
# Documentation.
#
-BuildDir('build/doc', 'doc')
-
Export('env', 'whereis')
-SConscript('build/doc/SConscript')
+SConscript('doc/SConscript')
#
# If we're running in the actual Aegis project, pack up a complete
for line in map(lambda x: x[:-1], os.popen(cmd, "r").readlines()):
a = string.split(line)
if a[1] == "remove":
- if a[3][0] == '(':
- df.append(a[4])
- else:
- df.append(a[3])
+ df.append(a[-1])
cmd = "aegis -list -terse pf 2>/dev/null"
pf = map(lambda x: x[:-1], os.popen(cmd, "r").readlines())
- cmd = "aegis -list -terse cf 2>/dev/null"
+ cmd = "aegis -list -terse -c %s cf 2>/dev/null" % change
cf = map(lambda x: x[:-1], os.popen(cmd, "r").readlines())
u = {}
for f in pf + cf:
u[f] = 1
for f in df:
- del u[f]
- sfiles = filter(lambda x: x[-9:] != '.aeignore' and x[-9:] != '.sconsign',
+ try:
+ del u[f]
+ except KeyError:
+ pass
+ sfiles = filter(lambda x: x[-9:] != '.aeignore' and
+ x[-9:] != '.sconsign' and
+ x[-10:] != '.cvsignore',
u.keys())
if sfiles:
src_tar_gz = os.path.join('build', 'dist', '%s.tar.gz' % psv)
src_zip = os.path.join('build', 'dist', '%s.zip' % psv)
+ Local(src_tar_gz, src_zip)
+
for file in sfiles:
env.SCons_revision(os.path.join(b_ps, file), file)
b_ps_files = map(lambda x, d=b_ps: os.path.join(d, x), sfiles)
cmds = [
- "rm -rf %s" % b_psv,
- "cp -rp %s %s" % (b_ps, b_psv),
- "find %s -name .sconsign -exec rm {} \\;" % b_psv,
- "touch $TARGET",
+ Delete(b_psv),
+ Copy(b_psv, b_ps),
+ Touch("$TARGET"),
]
env.Command(b_psv_stamp, src_deps + b_ps_files, cmds)
+ apply(Local, b_ps_files, {})
+
if gzip:
env.Command(src_tar_gz, b_psv_stamp,
"tar cz${TAR_HFLAG} -f $TARGET -C build %s" % psv)
-
+
#
# Unpack the archive into build/unpack/scons-{version}.
#
unpack_tar_gz_files = map(lambda x, u=unpack_tar_gz_dir, psv=psv:
os.path.join(u, psv, x),
sfiles)
-
+
#
# We'd like to replace the last three lines with the following:
#
- # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR
+ # tar zxf $SOURCES -C $UNPACK_TAR_GZ_DIR
#
# but that gives heartburn to Cygwin's tar, so work around it
# with separate zcat-tar-rm commands.
env.Command(unpack_tar_gz_files, src_tar_gz, [
- "rm -rf %s" % os.path.join(unpack_tar_gz_dir, psv),
- "zcat $SOURCES > .temp",
+ Delete(os.path.join(unpack_tar_gz_dir, psv)),
+ "$ZCAT $SOURCES > .temp",
"tar xf .temp -C $UNPACK_TAR_GZ_DIR",
- "rm -f .temp",
+ Delete(".temp"),
])
-
+
#
# Run setup.py in the unpacked subdirectory to "install" everything
# into our build/test subdirectory. The runtest.py script will set
#
dfiles = map(lambda x, d=test_src_tar_gz_dir: os.path.join(d, x),
dst_files)
- ENV = env.Dictionary('ENV')
- ENV['SCONS_LIB_DIR'] = os.path.join(unpack_tar_gz_dir, psv, 'src', 'engine')
+ scons_lib_dir = os.path.join(unpack_tar_gz_dir, psv, 'src', 'engine')
+ ENV = env.Dictionary('ENV').copy()
+ ENV['SCONS_LIB_DIR'] = scons_lib_dir
ENV['USERNAME'] = developer
- env.Copy(ENV = ENV).Command(dfiles, unpack_tar_gz_files, [
- "rm -rf %s" % os.path.join(unpack_tar_gz_dir,
- psv,
- 'build',
- 'scons',
- 'build'),
- "rm -rf $TEST_SRC_TAR_GZ_DIR",
- "cd %s && python %s %s" % \
+ env.Command(dfiles, unpack_tar_gz_files,
+ [
+ Delete(os.path.join(unpack_tar_gz_dir,
+ psv,
+ 'build',
+ 'scons',
+ 'build')),
+ Delete("$TEST_SRC_TAR_GZ_DIR"),
+ 'cd "%s" && $PYTHON $PYTHONFLAGS "%s" "%s"' % \
(os.path.join(unpack_tar_gz_dir, psv),
os.path.join('src', 'script', 'scons.py'),
os.path.join('build', 'scons')),
- "python %s install --prefix=$TEST_SRC_TAR_GZ_DIR" % \
+ '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_SRC_TAR_GZ_DIR" --standalone-lib' % \
os.path.join(unpack_tar_gz_dir,
psv,
'build',
'scons',
'setup.py'),
- ])
+ ],
+ ENV = ENV)
if zipit:
- env.Copy(PSV = psv).Command(src_zip, b_psv_stamp, zipit)
-
+ env.Command(src_zip, b_psv_stamp, zipit, CD = 'build', PSV = psv)
+
#
# Unpack the archive into build/unpack/scons-{version}.
#
unpack_zip_files = map(lambda x, u=unpack_zip_dir, psv=psv:
os.path.join(u, psv, x),
sfiles)
-
- env.Command(unpack_zip_files, src_zip, unzipit)
-
+
+ env.Command(unpack_zip_files, src_zip, [
+ Delete(os.path.join(unpack_zip_dir, psv)),
+ unzipit
+ ])
+
#
# Run setup.py in the unpacked subdirectory to "install" everything
# into our build/test subdirectory. The runtest.py script will set
#
dfiles = map(lambda x, d=test_src_zip_dir: os.path.join(d, x),
dst_files)
- ENV = env.Dictionary('ENV')
- ENV['SCONS_LIB_DIR'] = os.path.join(unpack_zip_dir, psv, 'src', 'engine')
+ scons_lib_dir = os.path.join(unpack_zip_dir, psv, 'src', 'engine')
+ ENV = env.Dictionary('ENV').copy()
+ ENV['SCONS_LIB_DIR'] = scons_lib_dir
ENV['USERNAME'] = developer
- env.Copy(ENV = ENV).Command(dfiles, unpack_zip_files, [
- "rm -rf %s" % os.path.join(unpack_zip_dir,
- psv,
- 'build',
- 'scons',
- 'build'),
- "rm -rf $TEST_SRC_ZIP_DIR",
- "cd %s && python %s %s" % \
+ env.Command(dfiles, unpack_zip_files,
+ [
+ Delete(os.path.join(unpack_zip_dir,
+ psv,
+ 'build',
+ 'scons',
+ 'build')),
+ Delete("$TEST_SRC_ZIP_DIR"),
+ 'cd "%s" && $PYTHON $PYTHONFLAGS "%s" "%s"' % \
(os.path.join(unpack_zip_dir, psv),
os.path.join('src', 'script', 'scons.py'),
os.path.join('build', 'scons')),
- "python %s install --prefix=$TEST_SRC_ZIP_DIR" % \
+ '$PYTHON $PYTHONFLAGS "%s" install "--prefix=$TEST_SRC_ZIP_DIR" --standalone-lib' % \
os.path.join(unpack_zip_dir,
psv,
'build',
'scons',
'setup.py'),
- ])
+ ],
+ ENV = ENV)