5 # Permission is hereby granted, free of charge, to any person obtaining
6 # a copy of this software and associated documentation files (the
7 # "Software"), to deal in the Software without restriction, including
8 # without limitation the rights to use, copy, modify, merge, publish,
9 # distribute, sublicense, and/or sell copies of the Software, and to
10 # permit persons to whom the Software is furnished to do so, subject to
11 # the following conditions:
13 # The above copyright notice and this permission notice shall be included
14 # in all copies or substantial portions of the Software.
16 # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17 # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18 # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19 # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20 # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21 # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22 # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25 __revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
33 _python_ = TestSCons._python_
35 test = TestSCons.TestSCons()
37 test.write('sleep_cat.py', """\
40 time.sleep(int(sys.argv[1]))
41 fp = open(sys.argv[2], 'wb')
42 for arg in sys.argv[3:]:
43 fp.write(open(arg, 'rb').read())
48 test.write('SConstruct', """
49 env = Environment(PYTHON = r'%(_python_)s',
50 SLEEP_CAT = r'sleep_cat.py',
51 CATCOM = '$PYTHON $SLEEP_CAT $SECONDS $TARGET $SOURCES',
52 SECONDS = ARGUMENTS.get('SLEEP', '0'))
53 f1 = env.Command('f1.out', 'f1.in', '$CATCOM')
54 f2 = env.Command('f2.out', 'f2.in', '$CATCOM')
55 f3 = env.Command('f3.out', 'f3.in', '$CATCOM')
56 f4 = env.Command('f4.out', 'f4.in', '$CATCOM')
57 env.Command('output', [f1, f2, f3, f4], '$CATCOM')
60 test.write('f1.in', "f1.in\n")
61 test.write('f2.in', "f2.in\n")
62 test.write('f3.in', "f3.in\n")
63 test.write('f4.in', "f4.in\n")
67 # Before anything else, make sure we get valid --debug=time results
68 # when just running the help option.
69 test.run(arguments = "-h --debug=time")
74 return float(re.search(match, s).group(1))
76 def within_tolerance(expected, actual, tolerance):
77 return abs((expected-actual)/actual) <= tolerance
79 def get_total_time(stdout):
80 return num(stdout, r'Total build time: (\d+\.\d+) seconds')
82 def get_sconscript_time(stdout):
83 return num(stdout, r'Total SConscript file execution time: (\d+\.\d+) seconds')
85 def get_scons_time(stdout):
86 return num(stdout, r'Total SCons execution time: (\d+\.\d+) seconds')
88 def get_command_time(stdout):
89 return num(stdout, r'Total command execution time: (\d+\.\d+) seconds')
93 # Try to make our results a little more accurate and repeatable by
94 # measuring Python overhead executing a minimal file, and reading the
95 # scons.py script itself from disk so that it's already been cached.
96 test.write('pass.py', "pass\n")
97 test.read(test.program)
99 start_time = time.time()
100 test.run(program=TestSCons.python, arguments=test.workpath('pass.py'))
101 overhead = time.time() - start_time
105 start_time = time.time()
106 test.run(arguments = "-j1 --debug=time . SLEEP=0")
107 complete_time = time.time() - start_time
111 expected_total_time = complete_time - overhead
113 pattern = r'Command execution time: (\d+\.\d+) seconds'
114 times = map(float, re.findall(pattern, test.stdout()))
115 expected_command_time = reduce(lambda x, y: x + y, times, 0.0)
118 stdout = test.stdout()
120 total_time = get_total_time(stdout)
121 sconscript_time = get_sconscript_time(stdout)
122 scons_time = get_scons_time(stdout)
123 command_time = get_command_time(stdout)
128 if not within_tolerance(expected_command_time, command_time, 0.01):
130 SCons -j1 reported a total command execution time of %(command_time)s,
131 but command execution times really totalled %(expected_command_time)s,
132 outside of the 1%% tolerance.
135 added_times = sconscript_time+scons_time+command_time
136 if not within_tolerance(total_time, added_times, 0.01):
138 SCons -j1 reported a total build time of %(total_time)s,
139 but the various execution times actually totalled %(added_times)s,
140 outside of the 1%% tolerance.
143 if not within_tolerance(total_time, expected_total_time, 0.15):
144 # This tolerance check seems empirically to work fine if there's
145 # a light load on the system, but on a heavily loaded system the
146 # timings get screwy and it can fail frequently. Some obvious
147 # attempts to work around the problem didn't, so just treat it as
150 Warning: SCons -j1 reported total build time of %(total_time)s,
151 but the actual measured build time was %(expected_total_time)s
152 (end-to-end time of %(complete_time)s less Python overhead of %(overhead)s),
153 outside of the 15%% tolerance.
156 if failures or warnings:
157 print string.join([test.stdout()] + failures + warnings, '\n')
161 test.run(arguments = "--debug=time . SLEEP=0")
163 command_time = get_command_time(test.stdout())
164 if command_time != 0.0:
165 print "Up-to-date run received non-zero command time of %s" % command_time
170 test.run(arguments = "-c")
172 test.run(arguments = "-j4 --debug=time . SLEEP=1")
176 stdout = test.stdout()
178 total_time = get_total_time(stdout)
179 sconscript_time = get_sconscript_time(stdout)
180 scons_time = get_scons_time(stdout)
181 command_time = get_command_time(stdout)
185 added_times = sconscript_time+scons_time+command_time
186 if not within_tolerance(total_time, added_times, 0.01):
188 SCons -j4 reported a total build time of %(total_time)s,
189 but the various execution times actually totalled %(added_times)s,
190 outside of the 1%% tolerance.
194 print string.join([test.stdout()] + failures, '\n')
197 test.run(arguments = "-j4 --debug=time . SLEEP=1")
199 command_time = get_command_time(test.stdout())
200 if command_time != 0.0:
201 print "Up-to-date run received non-zero command time of %s" % command_time