Wed, 31 Dec 2014 06:09:35 +0100
Cloned upstream origin tor-browser at tor-browser-31.3.0esr-4.5-1-build1
revision ID fc1c9ff7c1b2defdbc039f12214767608f46423f for hacking purpose.
michael@0 | 1 | #!/usr/bin/env python |
michael@0 | 2 | # This Source Code Form is subject to the terms of the Mozilla Public |
michael@0 | 3 | # License, v. 2.0. If a copy of the MPL was not distributed with this |
michael@0 | 4 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. |
michael@0 | 5 | |
michael@0 | 6 | import math, os, posixpath, shlex, shutil, subprocess, sys, traceback |
michael@0 | 7 | |
michael@0 | 8 | def add_libdir_to_path(): |
michael@0 | 9 | from os.path import dirname, exists, join, realpath |
michael@0 | 10 | js_src_dir = dirname(dirname(realpath(sys.argv[0]))) |
michael@0 | 11 | assert exists(join(js_src_dir,'jsapi.h')) |
michael@0 | 12 | sys.path.insert(0, join(js_src_dir, 'lib')) |
michael@0 | 13 | sys.path.insert(0, join(js_src_dir, 'tests', 'lib')) |
michael@0 | 14 | |
michael@0 | 15 | add_libdir_to_path() |
michael@0 | 16 | |
michael@0 | 17 | import jittests |
michael@0 | 18 | from tests import TBPL_FLAGS |
michael@0 | 19 | |
michael@0 | 20 | def main(argv): |
michael@0 | 21 | |
michael@0 | 22 | # If no multiprocessing is available, fallback to serial test execution |
michael@0 | 23 | max_jobs_default = 1 |
michael@0 | 24 | if jittests.HAVE_MULTIPROCESSING: |
michael@0 | 25 | try: |
michael@0 | 26 | max_jobs_default = jittests.cpu_count() |
michael@0 | 27 | except NotImplementedError: |
michael@0 | 28 | pass |
michael@0 | 29 | |
michael@0 | 30 | # The [TESTS] optional arguments are paths of test files relative |
michael@0 | 31 | # to the jit-test/tests directory. |
michael@0 | 32 | |
michael@0 | 33 | from optparse import OptionParser |
michael@0 | 34 | op = OptionParser(usage='%prog [options] JS_SHELL [TESTS]') |
michael@0 | 35 | op.add_option('-s', '--show-cmd', dest='show_cmd', action='store_true', |
michael@0 | 36 | help='show js shell command run') |
michael@0 | 37 | op.add_option('-f', '--show-failed-cmd', dest='show_failed', |
michael@0 | 38 | action='store_true', help='show command lines of failed tests') |
michael@0 | 39 | op.add_option('-o', '--show-output', dest='show_output', action='store_true', |
michael@0 | 40 | help='show output from js shell') |
michael@0 | 41 | op.add_option('-x', '--exclude', dest='exclude', action='append', |
michael@0 | 42 | help='exclude given test dir or path') |
michael@0 | 43 | op.add_option('--no-slow', dest='run_slow', action='store_false', |
michael@0 | 44 | help='do not run tests marked as slow') |
michael@0 | 45 | op.add_option('-t', '--timeout', dest='timeout', type=float, default=150.0, |
michael@0 | 46 | help='set test timeout in seconds') |
michael@0 | 47 | op.add_option('--no-progress', dest='hide_progress', action='store_true', |
michael@0 | 48 | help='hide progress bar') |
michael@0 | 49 | op.add_option('--tinderbox', dest='tinderbox', action='store_true', |
michael@0 | 50 | help='Tinderbox-parseable output format') |
michael@0 | 51 | op.add_option('--args', dest='shell_args', default='', |
michael@0 | 52 | help='extra args to pass to the JS shell') |
michael@0 | 53 | op.add_option('-w', '--write-failures', dest='write_failures', metavar='FILE', |
michael@0 | 54 | help='Write a list of failed tests to [FILE]') |
michael@0 | 55 | op.add_option('-r', '--read-tests', dest='read_tests', metavar='FILE', |
michael@0 | 56 | help='Run test files listed in [FILE]') |
michael@0 | 57 | op.add_option('-R', '--retest', dest='retest', metavar='FILE', |
michael@0 | 58 | help='Retest using test list file [FILE]') |
michael@0 | 59 | op.add_option('-g', '--debug', dest='debug', action='store_true', |
michael@0 | 60 | help='Run test in gdb') |
michael@0 | 61 | op.add_option('--valgrind', dest='valgrind', action='store_true', |
michael@0 | 62 | help='Enable the |valgrind| flag, if valgrind is in $PATH.') |
michael@0 | 63 | op.add_option('--valgrind-all', dest='valgrind_all', action='store_true', |
michael@0 | 64 | help='Run all tests with valgrind, if valgrind is in $PATH.') |
michael@0 | 65 | op.add_option('--jitflags', dest='jitflags', default='', |
michael@0 | 66 | help='Example: --jitflags=m,mn to run each test with "-m" and "-m -n" [default="%default"]. ' + |
michael@0 | 67 | 'Long flags, such as "--ion-eager", should be set using --args.') |
michael@0 | 68 | op.add_option('--avoid-stdio', dest='avoid_stdio', action='store_true', |
michael@0 | 69 | help='Use js-shell file indirection instead of piping stdio.') |
michael@0 | 70 | op.add_option('--write-failure-output', dest='write_failure_output', action='store_true', |
michael@0 | 71 | help='With --write-failures=FILE, additionally write the output of failed tests to [FILE]') |
michael@0 | 72 | op.add_option('--ion', dest='ion', action='store_true', |
michael@0 | 73 | help='Run tests once with --ion-eager and once with --baseline-eager (ignores --jitflags)') |
michael@0 | 74 | op.add_option('--tbpl', dest='tbpl', action='store_true', |
michael@0 | 75 | help='Run tests with all IonMonkey option combinations (ignores --jitflags)') |
michael@0 | 76 | op.add_option('-j', '--worker-count', dest='max_jobs', type=int, default=max_jobs_default, |
michael@0 | 77 | help='Number of tests to run in parallel (default %default)') |
michael@0 | 78 | op.add_option('--remote', action='store_true', |
michael@0 | 79 | help='Run tests on a remote device') |
michael@0 | 80 | op.add_option('--deviceIP', action='store', |
michael@0 | 81 | type='string', dest='device_ip', |
michael@0 | 82 | help='IP address of remote device to test') |
michael@0 | 83 | op.add_option('--devicePort', action='store', |
michael@0 | 84 | type=int, dest='device_port', default=20701, |
michael@0 | 85 | help='port of remote device to test') |
michael@0 | 86 | op.add_option('--deviceSerial', action='store', |
michael@0 | 87 | type='string', dest='device_serial', default=None, |
michael@0 | 88 | help='ADB device serial number of remote device to test') |
michael@0 | 89 | op.add_option('--deviceTransport', action='store', |
michael@0 | 90 | type='string', dest='device_transport', default='sut', |
michael@0 | 91 | help='The transport to use to communicate with device: [adb|sut]; default=sut') |
michael@0 | 92 | op.add_option('--remoteTestRoot', dest='remote_test_root', action='store', |
michael@0 | 93 | type='string', default='/data/local/tests', |
michael@0 | 94 | help='The remote directory to use as test root (eg. /data/local/tests)') |
michael@0 | 95 | op.add_option('--localLib', dest='local_lib', action='store', |
michael@0 | 96 | type='string', |
michael@0 | 97 | help='The location of libraries to push -- preferably stripped') |
michael@0 | 98 | op.add_option('--repeat', type=int, default=1, |
michael@0 | 99 | help='Repeat tests the given number of times.') |
michael@0 | 100 | op.add_option('--this-chunk', type=int, default=1, |
michael@0 | 101 | help='The test chunk to run.') |
michael@0 | 102 | op.add_option('--total-chunks', type=int, default=1, |
michael@0 | 103 | help='The total number of test chunks.') |
michael@0 | 104 | |
michael@0 | 105 | options, args = op.parse_args(argv) |
michael@0 | 106 | if len(args) < 1: |
michael@0 | 107 | op.error('missing JS_SHELL argument') |
michael@0 | 108 | # We need to make sure we are using backslashes on Windows. |
michael@0 | 109 | test_args = args[1:] |
michael@0 | 110 | |
michael@0 | 111 | if jittests.stdio_might_be_broken(): |
michael@0 | 112 | # Prefer erring on the side of caution and not using stdio if |
michael@0 | 113 | # it might be broken on this platform. The file-redirect |
michael@0 | 114 | # fallback should work on any platform, so at worst by |
michael@0 | 115 | # guessing wrong we might have slowed down the tests a bit. |
michael@0 | 116 | # |
michael@0 | 117 | # XXX technically we could check for broken stdio, but it |
michael@0 | 118 | # really seems like overkill. |
michael@0 | 119 | options.avoid_stdio = True |
michael@0 | 120 | |
michael@0 | 121 | if options.retest: |
michael@0 | 122 | options.read_tests = options.retest |
michael@0 | 123 | options.write_failures = options.retest |
michael@0 | 124 | |
michael@0 | 125 | test_list = [] |
michael@0 | 126 | read_all = True |
michael@0 | 127 | |
michael@0 | 128 | if test_args: |
michael@0 | 129 | read_all = False |
michael@0 | 130 | for arg in test_args: |
michael@0 | 131 | test_list += jittests.find_tests(arg) |
michael@0 | 132 | |
michael@0 | 133 | if options.read_tests: |
michael@0 | 134 | read_all = False |
michael@0 | 135 | try: |
michael@0 | 136 | f = open(options.read_tests) |
michael@0 | 137 | for line in f: |
michael@0 | 138 | test_list.append(os.path.join(jittests.TEST_DIR, line.strip('\n'))) |
michael@0 | 139 | f.close() |
michael@0 | 140 | except IOError: |
michael@0 | 141 | if options.retest: |
michael@0 | 142 | read_all = True |
michael@0 | 143 | else: |
michael@0 | 144 | sys.stderr.write("Exception thrown trying to read test file '%s'\n"% |
michael@0 | 145 | options.read_tests) |
michael@0 | 146 | traceback.print_exc() |
michael@0 | 147 | sys.stderr.write('---\n') |
michael@0 | 148 | |
michael@0 | 149 | if read_all: |
michael@0 | 150 | test_list = jittests.find_tests() |
michael@0 | 151 | |
michael@0 | 152 | if options.exclude: |
michael@0 | 153 | exclude_list = [] |
michael@0 | 154 | for exclude in options.exclude: |
michael@0 | 155 | exclude_list += jittests.find_tests(exclude) |
michael@0 | 156 | test_list = [ test for test in test_list if test not in set(exclude_list) ] |
michael@0 | 157 | |
michael@0 | 158 | if not test_list: |
michael@0 | 159 | print >> sys.stderr, "No tests found matching command line arguments." |
michael@0 | 160 | sys.exit(0) |
michael@0 | 161 | |
michael@0 | 162 | test_list = [jittests.Test.from_file(_, options) for _ in test_list] |
michael@0 | 163 | |
michael@0 | 164 | if not options.run_slow: |
michael@0 | 165 | test_list = [ _ for _ in test_list if not _.slow ] |
michael@0 | 166 | |
michael@0 | 167 | # If chunking is enabled, determine which tests are part of this chunk. |
michael@0 | 168 | # This code was adapted from testing/mochitest/runtestsremote.py. |
michael@0 | 169 | if options.total_chunks > 1: |
michael@0 | 170 | total_tests = len(test_list) |
michael@0 | 171 | tests_per_chunk = math.ceil(total_tests / float(options.total_chunks)) |
michael@0 | 172 | start = int(round((options.this_chunk - 1) * tests_per_chunk)) |
michael@0 | 173 | end = int(round(options.this_chunk * tests_per_chunk)) |
michael@0 | 174 | test_list = test_list[start:end] |
michael@0 | 175 | |
michael@0 | 176 | # The full test list is ready. Now create copies for each JIT configuration. |
michael@0 | 177 | job_list = [] |
michael@0 | 178 | if options.tbpl: |
michael@0 | 179 | # Running all bits would take forever. Instead, we test a few interesting combinations. |
michael@0 | 180 | for test in test_list: |
michael@0 | 181 | for variant in TBPL_FLAGS: |
michael@0 | 182 | new_test = test.copy() |
michael@0 | 183 | new_test.jitflags.extend(variant) |
michael@0 | 184 | job_list.append(new_test) |
michael@0 | 185 | elif options.ion: |
michael@0 | 186 | flags = [['--baseline-eager'], ['--ion-eager', '--ion-parallel-compile=off']] |
michael@0 | 187 | for test in test_list: |
michael@0 | 188 | for variant in flags: |
michael@0 | 189 | new_test = test.copy() |
michael@0 | 190 | new_test.jitflags.extend(variant) |
michael@0 | 191 | job_list.append(new_test) |
michael@0 | 192 | else: |
michael@0 | 193 | jitflags_list = jittests.parse_jitflags(options) |
michael@0 | 194 | for test in test_list: |
michael@0 | 195 | for jitflags in jitflags_list: |
michael@0 | 196 | new_test = test.copy() |
michael@0 | 197 | new_test.jitflags.extend(jitflags) |
michael@0 | 198 | job_list.append(new_test) |
michael@0 | 199 | |
michael@0 | 200 | prefix = [os.path.abspath(args[0])] + shlex.split(options.shell_args) |
michael@0 | 201 | prolog = os.path.join(jittests.LIB_DIR, 'prolog.js') |
michael@0 | 202 | if options.remote: |
michael@0 | 203 | prolog = posixpath.join(options.remote_test_root, 'jit-tests', 'jit-tests', 'lib', 'prolog.js') |
michael@0 | 204 | |
michael@0 | 205 | prefix += ['-f', prolog] |
michael@0 | 206 | |
michael@0 | 207 | # Avoid racing on the cache by having the js shell create a new cache |
michael@0 | 208 | # subdir for each process. The js shell takes care of deleting these |
michael@0 | 209 | # subdirs when the process exits. |
michael@0 | 210 | if options.max_jobs > 1 and jittests.HAVE_MULTIPROCESSING: |
michael@0 | 211 | prefix += ['--js-cache-per-process'] |
michael@0 | 212 | |
michael@0 | 213 | # Clean up any remnants from previous crashes etc |
michael@0 | 214 | shutil.rmtree(jittests.JS_CACHE_DIR, ignore_errors=True) |
michael@0 | 215 | os.mkdir(jittests.JS_CACHE_DIR) |
michael@0 | 216 | |
michael@0 | 217 | if options.debug: |
michael@0 | 218 | if len(job_list) > 1: |
michael@0 | 219 | print 'Multiple tests match command line arguments, debugger can only run one' |
michael@0 | 220 | for tc in job_list: |
michael@0 | 221 | print ' %s' % tc.path |
michael@0 | 222 | sys.exit(1) |
michael@0 | 223 | |
michael@0 | 224 | tc = job_list[0] |
michael@0 | 225 | cmd = ['gdb', '--args'] + tc.command(prefix, jittests.LIB_DIR) |
michael@0 | 226 | subprocess.call(cmd) |
michael@0 | 227 | sys.exit() |
michael@0 | 228 | |
michael@0 | 229 | try: |
michael@0 | 230 | ok = None |
michael@0 | 231 | if options.remote: |
michael@0 | 232 | ok = jittests.run_tests_remote(job_list, prefix, options) |
michael@0 | 233 | elif options.max_jobs > 1 and jittests.HAVE_MULTIPROCESSING: |
michael@0 | 234 | ok = jittests.run_tests_parallel(job_list, prefix, options) |
michael@0 | 235 | else: |
michael@0 | 236 | ok = jittests.run_tests(job_list, prefix, options) |
michael@0 | 237 | if not ok: |
michael@0 | 238 | sys.exit(2) |
michael@0 | 239 | except OSError: |
michael@0 | 240 | if not os.path.exists(prefix[0]): |
michael@0 | 241 | print >> sys.stderr, "JS shell argument: file does not exist: '%s'" % prefix[0] |
michael@0 | 242 | sys.exit(1) |
michael@0 | 243 | else: |
michael@0 | 244 | raise |
michael@0 | 245 | |
michael@0 | 246 | if __name__ == '__main__': |
michael@0 | 247 | main(sys.argv[1:]) |