testing/mochitest/tests/SimpleTest/TestRunner.js

Thu, 22 Jan 2015 13:21:57 +0100

author
Michael Schloh von Bennewitz <michael@schloh.com>
date
Thu, 22 Jan 2015 13:21:57 +0100
branch
TOR_BUG_9701
changeset 15
b8a032363ba2
permissions
-rw-r--r--

Incorporate requested changes from Mozilla in review:
https://bugzilla.mozilla.org/show_bug.cgi?id=1123480#c6

michael@0 1 /* -*- js-indent-level: 4 -*- */
michael@0 2 /*
michael@0 3 * e10s event dispatcher from content->chrome
michael@0 4 *
michael@0 5 * type = eventName (QuitApplication)
michael@0 6 * data = json object {"filename":filename} <- for LoggerInit
michael@0 7 */
michael@0 8 function getElement(id) {
michael@0 9 return ((typeof(id) == "string") ?
michael@0 10 document.getElementById(id) : id);
michael@0 11 }
michael@0 12
michael@0 13 this.$ = this.getElement;
michael@0 14
michael@0 15 function contentDispatchEvent(type, data, sync) {
michael@0 16 if (typeof(data) == "undefined") {
michael@0 17 data = {};
michael@0 18 }
michael@0 19
michael@0 20 var event = new CustomEvent("contentEvent", {
michael@0 21 bubbles: true,
michael@0 22 detail: {
michael@0 23 "sync": sync,
michael@0 24 "type": type,
michael@0 25 "data": JSON.stringify(data)
michael@0 26 }
michael@0 27 });
michael@0 28 document.dispatchEvent(event);
michael@0 29 }
michael@0 30
michael@0 31 function contentAsyncEvent(type, data) {
michael@0 32 contentDispatchEvent(type, data, 0);
michael@0 33 }
michael@0 34
michael@0 35 /* Helper Function */
michael@0 36 function extend(obj, /* optional */ skip) {
michael@0 37 // Extend an array with an array-like object starting
michael@0 38 // from the skip index
michael@0 39 if (!skip) {
michael@0 40 skip = 0;
michael@0 41 }
michael@0 42 if (obj) {
michael@0 43 var l = obj.length;
michael@0 44 var ret = [];
michael@0 45 for (var i = skip; i < l; i++) {
michael@0 46 ret.push(obj[i]);
michael@0 47 }
michael@0 48 }
michael@0 49 return ret;
michael@0 50 }
michael@0 51
michael@0 52 function flattenArguments(lst/* ...*/) {
michael@0 53 var res = [];
michael@0 54 var args = extend(arguments);
michael@0 55 while (args.length) {
michael@0 56 var o = args.shift();
michael@0 57 if (o && typeof(o) == "object" && typeof(o.length) == "number") {
michael@0 58 for (var i = o.length - 1; i >= 0; i--) {
michael@0 59 args.unshift(o[i]);
michael@0 60 }
michael@0 61 } else {
michael@0 62 res.push(o);
michael@0 63 }
michael@0 64 }
michael@0 65 return res;
michael@0 66 }
michael@0 67
michael@0 68 /**
michael@0 69 * TestRunner: A test runner for SimpleTest
michael@0 70 * TODO:
michael@0 71 *
michael@0 72 * * Avoid moving iframes: That causes reloads on mozilla and opera.
michael@0 73 *
michael@0 74 *
michael@0 75 **/
michael@0 76 var TestRunner = {};
michael@0 77 TestRunner.logEnabled = false;
michael@0 78 TestRunner._currentTest = 0;
michael@0 79 TestRunner._lastTestFinished = -1;
michael@0 80 TestRunner._loopIsRestarting = false;
michael@0 81 TestRunner.currentTestURL = "";
michael@0 82 TestRunner.originalTestURL = "";
michael@0 83 TestRunner._urls = [];
michael@0 84 TestRunner._lastAssertionCount = 0;
michael@0 85 TestRunner._expectedMinAsserts = 0;
michael@0 86 TestRunner._expectedMaxAsserts = 0;
michael@0 87
michael@0 88 TestRunner.timeout = 5 * 60 * 1000; // 5 minutes.
michael@0 89 TestRunner.maxTimeouts = 4; // halt testing after too many timeouts
michael@0 90 TestRunner.runSlower = false;
michael@0 91 TestRunner.dumpOutputDirectory = "";
michael@0 92 TestRunner.dumpAboutMemoryAfterTest = false;
michael@0 93 TestRunner.dumpDMDAfterTest = false;
michael@0 94 TestRunner.quiet = false;
michael@0 95 TestRunner.slowestTestTime = 0;
michael@0 96 TestRunner.slowestTestURL = "";
michael@0 97
michael@0 98 TestRunner._expectingProcessCrash = false;
michael@0 99
michael@0 100 /**
michael@0 101 * Make sure the tests don't hang indefinitely.
michael@0 102 **/
michael@0 103 TestRunner._numTimeouts = 0;
michael@0 104 TestRunner._currentTestStartTime = new Date().valueOf();
michael@0 105 TestRunner._timeoutFactor = 1;
michael@0 106
michael@0 107 TestRunner._checkForHangs = function() {
michael@0 108 function reportError(win, msg) {
michael@0 109 if ("SimpleTest" in win) {
michael@0 110 win.SimpleTest.ok(false, msg);
michael@0 111 } else if ("W3CTest" in win) {
michael@0 112 win.W3CTest.logFailure(msg);
michael@0 113 }
michael@0 114 }
michael@0 115
michael@0 116 function killTest(win) {
michael@0 117 if ("SimpleTest" in win) {
michael@0 118 win.SimpleTest.finish();
michael@0 119 } else if ("W3CTest" in win) {
michael@0 120 win.W3CTest.timeout();
michael@0 121 }
michael@0 122 }
michael@0 123
michael@0 124 if (TestRunner._currentTest < TestRunner._urls.length) {
michael@0 125 var runtime = new Date().valueOf() - TestRunner._currentTestStartTime;
michael@0 126 if (runtime >= TestRunner.timeout * TestRunner._timeoutFactor) {
michael@0 127 var frameWindow = $('testframe').contentWindow.wrappedJSObject ||
michael@0 128 $('testframe').contentWindow;
michael@0 129 reportError(frameWindow, "Test timed out.");
michael@0 130
michael@0 131 // If we have too many timeouts, give up. We don't want to wait hours
michael@0 132 // for results if some bug causes lots of tests to time out.
michael@0 133 if (++TestRunner._numTimeouts >= TestRunner.maxTimeouts) {
michael@0 134 TestRunner._haltTests = true;
michael@0 135
michael@0 136 TestRunner.currentTestURL = "(SimpleTest/TestRunner.js)";
michael@0 137 reportError(frameWindow, TestRunner.maxTimeouts + " test timeouts, giving up.");
michael@0 138 var skippedTests = TestRunner._urls.length - TestRunner._currentTest;
michael@0 139 reportError(frameWindow, "Skipping " + skippedTests + " remaining tests.");
michael@0 140 }
michael@0 141
michael@0 142 // Add a little (1 second) delay to ensure automation.py has time to notice
michael@0 143 // "Test timed out" log and process it (= take a screenshot).
michael@0 144 setTimeout(function delayedKillTest() { killTest(frameWindow); }, 1000);
michael@0 145
michael@0 146 if (TestRunner._haltTests)
michael@0 147 return;
michael@0 148 }
michael@0 149
michael@0 150 setTimeout(TestRunner._checkForHangs, 30000);
michael@0 151 }
michael@0 152 }
michael@0 153
michael@0 154 TestRunner.requestLongerTimeout = function(factor) {
michael@0 155 TestRunner._timeoutFactor = factor;
michael@0 156 }
michael@0 157
michael@0 158 /**
michael@0 159 * This is used to loop tests
michael@0 160 **/
michael@0 161 TestRunner.repeat = 0;
michael@0 162 TestRunner._currentLoop = 1;
michael@0 163
michael@0 164 TestRunner.expectAssertions = function(min, max) {
michael@0 165 if (typeof(max) == "undefined") {
michael@0 166 max = min;
michael@0 167 }
michael@0 168 if (typeof(min) != "number" || typeof(max) != "number" ||
michael@0 169 min < 0 || max < min) {
michael@0 170 throw "bad parameter to expectAssertions";
michael@0 171 }
michael@0 172 TestRunner._expectedMinAsserts = min;
michael@0 173 TestRunner._expectedMaxAsserts = max;
michael@0 174 }
michael@0 175
michael@0 176 /**
michael@0 177 * This function is called after generating the summary.
michael@0 178 **/
michael@0 179 TestRunner.onComplete = null;
michael@0 180
michael@0 181 /**
michael@0 182 * Adds a failed test case to a list so we can rerun only the failed tests
michael@0 183 **/
michael@0 184 TestRunner._failedTests = {};
michael@0 185 TestRunner._failureFile = "";
michael@0 186
michael@0 187 TestRunner.addFailedTest = function(testName) {
michael@0 188 if (TestRunner._failedTests[testName] == undefined) {
michael@0 189 TestRunner._failedTests[testName] = "";
michael@0 190 }
michael@0 191 };
michael@0 192
michael@0 193 TestRunner.setFailureFile = function(fileName) {
michael@0 194 TestRunner._failureFile = fileName;
michael@0 195 }
michael@0 196
michael@0 197 TestRunner.generateFailureList = function () {
michael@0 198 if (TestRunner._failureFile) {
michael@0 199 var failures = new SpecialPowersLogger(TestRunner._failureFile);
michael@0 200 failures.log(JSON.stringify(TestRunner._failedTests));
michael@0 201 failures.close();
michael@0 202 }
michael@0 203 };
michael@0 204
michael@0 205 /**
michael@0 206 * If logEnabled is true, this is the logger that will be used.
michael@0 207 **/
michael@0 208 TestRunner.logger = LogController;
michael@0 209
michael@0 210 TestRunner.log = function(msg) {
michael@0 211 if (TestRunner.logEnabled) {
michael@0 212 TestRunner.logger.log(msg);
michael@0 213 } else {
michael@0 214 dump(msg + "\n");
michael@0 215 }
michael@0 216 };
michael@0 217
michael@0 218 TestRunner.error = function(msg) {
michael@0 219 if (TestRunner.logEnabled) {
michael@0 220 TestRunner.logger.error(msg);
michael@0 221 } else {
michael@0 222 dump(msg + "\n");
michael@0 223 }
michael@0 224
michael@0 225 if (TestRunner.runUntilFailure) {
michael@0 226 TestRunner._haltTests = true;
michael@0 227 }
michael@0 228
michael@0 229 if (TestRunner.debugOnFailure) {
michael@0 230 // You've hit this line because you requested to break into the
michael@0 231 // debugger upon a testcase failure on your test run.
michael@0 232 debugger;
michael@0 233 }
michael@0 234 };
michael@0 235
michael@0 236 /**
michael@0 237 * Toggle element visibility
michael@0 238 **/
michael@0 239 TestRunner._toggle = function(el) {
michael@0 240 if (el.className == "noshow") {
michael@0 241 el.className = "";
michael@0 242 el.style.cssText = "";
michael@0 243 } else {
michael@0 244 el.className = "noshow";
michael@0 245 el.style.cssText = "width:0px; height:0px; border:0px;";
michael@0 246 }
michael@0 247 };
michael@0 248
michael@0 249 /**
michael@0 250 * Creates the iframe that contains a test
michael@0 251 **/
michael@0 252 TestRunner._makeIframe = function (url, retry) {
michael@0 253 var iframe = $('testframe');
michael@0 254 if (url != "about:blank" &&
michael@0 255 (("hasFocus" in document && !document.hasFocus()) ||
michael@0 256 ("activeElement" in document && document.activeElement != iframe))) {
michael@0 257
michael@0 258 contentAsyncEvent("Focus");
michael@0 259 window.focus();
michael@0 260 SpecialPowers.focus();
michael@0 261 iframe.focus();
michael@0 262 if (retry < 3) {
michael@0 263 window.setTimeout('TestRunner._makeIframe("'+url+'", '+(retry+1)+')', 1000);
michael@0 264 return;
michael@0 265 }
michael@0 266
michael@0 267 TestRunner.log("Error: Unable to restore focus, expect failures and timeouts.");
michael@0 268 }
michael@0 269 window.scrollTo(0, $('indicator').offsetTop);
michael@0 270 iframe.src = url;
michael@0 271 iframe.name = url;
michael@0 272 iframe.width = "500";
michael@0 273 return iframe;
michael@0 274 };
michael@0 275
michael@0 276 /**
michael@0 277 * Returns the current test URL.
michael@0 278 * We use this to tell whether the test has navigated to another test without
michael@0 279 * being finished first.
michael@0 280 */
michael@0 281 TestRunner.getLoadedTestURL = function () {
michael@0 282 var prefix = "";
michael@0 283 // handle mochitest-chrome URIs
michael@0 284 if ($('testframe').contentWindow.location.protocol == "chrome:") {
michael@0 285 prefix = "chrome://mochitests";
michael@0 286 }
michael@0 287 return prefix + $('testframe').contentWindow.location.pathname;
michael@0 288 };
michael@0 289
michael@0 290 /**
michael@0 291 * TestRunner entry point.
michael@0 292 *
michael@0 293 * The arguments are the URLs of the test to be ran.
michael@0 294 *
michael@0 295 **/
michael@0 296 TestRunner.runTests = function (/*url...*/) {
michael@0 297 TestRunner.log("SimpleTest START");
michael@0 298 TestRunner.originalTestURL = $("current-test").innerHTML;
michael@0 299
michael@0 300 SpecialPowers.registerProcessCrashObservers();
michael@0 301
michael@0 302 TestRunner._urls = flattenArguments(arguments);
michael@0 303 $('testframe').src="";
michael@0 304 TestRunner._checkForHangs();
michael@0 305 TestRunner.runNextTest();
michael@0 306 };
michael@0 307
michael@0 308 /**
michael@0 309 * Used for running a set of tests in a loop for debugging purposes
michael@0 310 * Takes an array of URLs
michael@0 311 **/
michael@0 312 TestRunner.resetTests = function(listURLs) {
michael@0 313 TestRunner._currentTest = 0;
michael@0 314 // Reset our "Current-test" line - functionality depends on it
michael@0 315 $("current-test").innerHTML = TestRunner.originalTestURL;
michael@0 316 if (TestRunner.logEnabled)
michael@0 317 TestRunner.log("SimpleTest START Loop " + TestRunner._currentLoop);
michael@0 318
michael@0 319 TestRunner._urls = listURLs;
michael@0 320 $('testframe').src="";
michael@0 321 TestRunner._checkForHangs();
michael@0 322 TestRunner.runNextTest();
michael@0 323 }
michael@0 324
michael@0 325 /**
michael@0 326 * Run the next test. If no test remains, calls onComplete().
michael@0 327 **/
michael@0 328 TestRunner._haltTests = false;
michael@0 329 TestRunner.runNextTest = function() {
michael@0 330 if (TestRunner._currentTest < TestRunner._urls.length &&
michael@0 331 !TestRunner._haltTests)
michael@0 332 {
michael@0 333 var url = TestRunner._urls[TestRunner._currentTest];
michael@0 334 TestRunner.currentTestURL = url;
michael@0 335
michael@0 336 $("current-test-path").innerHTML = url;
michael@0 337
michael@0 338 TestRunner._currentTestStartTime = new Date().valueOf();
michael@0 339 TestRunner._timeoutFactor = 1;
michael@0 340 TestRunner._expectedMinAsserts = 0;
michael@0 341 TestRunner._expectedMaxAsserts = 0;
michael@0 342
michael@0 343 TestRunner.log("TEST-START | " + url); // used by automation.py
michael@0 344
michael@0 345 TestRunner._makeIframe(url, 0);
michael@0 346 } else {
michael@0 347 $("current-test").innerHTML = "<b>Finished</b>";
michael@0 348 TestRunner._makeIframe("about:blank", 0);
michael@0 349
michael@0 350 if (parseInt($("pass-count").innerHTML) == 0 &&
michael@0 351 parseInt($("fail-count").innerHTML) == 0 &&
michael@0 352 parseInt($("todo-count").innerHTML) == 0)
michael@0 353 {
michael@0 354 // No |$('testframe').contentWindow|, so manually update: ...
michael@0 355 // ... the log,
michael@0 356 TestRunner.error("TEST-UNEXPECTED-FAIL | (SimpleTest/TestRunner.js) | No checks actually run.");
michael@0 357 // ... the count,
michael@0 358 $("fail-count").innerHTML = 1;
michael@0 359 // ... the indicator.
michael@0 360 var indicator = $("indicator");
michael@0 361 indicator.innerHTML = "Status: Fail (No checks actually run)";
michael@0 362 indicator.style.backgroundColor = "red";
michael@0 363 }
michael@0 364
michael@0 365 SpecialPowers.unregisterProcessCrashObservers();
michael@0 366
michael@0 367 TestRunner.log("TEST-START | Shutdown"); // used by automation.py
michael@0 368 TestRunner.log("Passed: " + $("pass-count").innerHTML);
michael@0 369 TestRunner.log("Failed: " + $("fail-count").innerHTML);
michael@0 370 TestRunner.log("Todo: " + $("todo-count").innerHTML);
michael@0 371 TestRunner.log("Slowest: " + TestRunner.slowestTestTime + 'ms - ' + TestRunner.slowestTestURL);
michael@0 372 // If we are looping, don't send this cause it closes the log file
michael@0 373 if (TestRunner.repeat == 0) {
michael@0 374 TestRunner.log("SimpleTest FINISHED");
michael@0 375 }
michael@0 376
michael@0 377 if (TestRunner.repeat == 0 && TestRunner.onComplete) {
michael@0 378 TestRunner.onComplete();
michael@0 379 }
michael@0 380
michael@0 381 if (TestRunner._currentLoop <= TestRunner.repeat && !TestRunner._haltTests) {
michael@0 382 TestRunner._currentLoop++;
michael@0 383 TestRunner.resetTests(TestRunner._urls);
michael@0 384 TestRunner._loopIsRestarting = true;
michael@0 385 } else {
michael@0 386 // Loops are finished
michael@0 387 if (TestRunner.logEnabled) {
michael@0 388 TestRunner.log("TEST-INFO | Ran " + TestRunner._currentLoop + " Loops");
michael@0 389 TestRunner.log("SimpleTest FINISHED");
michael@0 390 }
michael@0 391
michael@0 392 if (TestRunner.onComplete)
michael@0 393 TestRunner.onComplete();
michael@0 394 }
michael@0 395 TestRunner.generateFailureList();
michael@0 396 }
michael@0 397 };
michael@0 398
michael@0 399 TestRunner.expectChildProcessCrash = function() {
michael@0 400 TestRunner._expectingProcessCrash = true;
michael@0 401 };
michael@0 402
michael@0 403 /**
michael@0 404 * This stub is called by SimpleTest when a test is finished.
michael@0 405 **/
michael@0 406 TestRunner.testFinished = function(tests) {
michael@0 407 // Prevent a test from calling finish() multiple times before we
michael@0 408 // have a chance to unload it.
michael@0 409 if (TestRunner._currentTest == TestRunner._lastTestFinished &&
michael@0 410 !TestRunner._loopIsRestarting) {
michael@0 411 TestRunner.error("TEST-UNEXPECTED-FAIL | " +
michael@0 412 TestRunner.currentTestURL +
michael@0 413 " | called finish() multiple times");
michael@0 414 TestRunner.updateUI([{ result: false }]);
michael@0 415 return;
michael@0 416 }
michael@0 417 TestRunner._lastTestFinished = TestRunner._currentTest;
michael@0 418 TestRunner._loopIsRestarting = false;
michael@0 419
michael@0 420 MemoryStats.dump(TestRunner.log, TestRunner._currentTest,
michael@0 421 TestRunner.currentTestURL,
michael@0 422 TestRunner.dumpOutputDirectory,
michael@0 423 TestRunner.dumpAboutMemoryAfterTest,
michael@0 424 TestRunner.dumpDMDAfterTest);
michael@0 425
michael@0 426 function cleanUpCrashDumpFiles() {
michael@0 427 if (!SpecialPowers.removeExpectedCrashDumpFiles(TestRunner._expectingProcessCrash)) {
michael@0 428 TestRunner.error("TEST-UNEXPECTED-FAIL | " +
michael@0 429 TestRunner.currentTestURL +
michael@0 430 " | This test did not leave any crash dumps behind, but we were expecting some!");
michael@0 431 tests.push({ result: false });
michael@0 432 }
michael@0 433 var unexpectedCrashDumpFiles =
michael@0 434 SpecialPowers.findUnexpectedCrashDumpFiles();
michael@0 435 TestRunner._expectingProcessCrash = false;
michael@0 436 if (unexpectedCrashDumpFiles.length) {
michael@0 437 TestRunner.error("TEST-UNEXPECTED-FAIL | " +
michael@0 438 TestRunner.currentTestURL +
michael@0 439 " | This test left crash dumps behind, but we " +
michael@0 440 "weren't expecting it to!");
michael@0 441 tests.push({ result: false });
michael@0 442 unexpectedCrashDumpFiles.sort().forEach(function(aFilename) {
michael@0 443 TestRunner.log("TEST-INFO | Found unexpected crash dump file " +
michael@0 444 aFilename + ".");
michael@0 445 });
michael@0 446 }
michael@0 447 }
michael@0 448
michael@0 449 function runNextTest() {
michael@0 450 if (TestRunner.currentTestURL != TestRunner.getLoadedTestURL()) {
michael@0 451 TestRunner.error("TEST-UNEXPECTED-FAIL | " +
michael@0 452 TestRunner.currentTestURL +
michael@0 453 " | " + TestRunner.getLoadedTestURL() +
michael@0 454 " finished in a non-clean fashion, probably" +
michael@0 455 " because it didn't call SimpleTest.finish()");
michael@0 456 tests.push({ result: false });
michael@0 457 }
michael@0 458
michael@0 459 var runtime = new Date().valueOf() - TestRunner._currentTestStartTime;
michael@0 460 TestRunner.log("TEST-END | " +
michael@0 461 TestRunner.currentTestURL +
michael@0 462 " | finished in " + runtime + "ms");
michael@0 463 if (TestRunner.slowestTestTime < runtime && TestRunner._timeoutFactor == 1) {
michael@0 464 TestRunner.slowestTestTime = runtime;
michael@0 465 TestRunner.slowestTestURL = TestRunner.currentTestURL;
michael@0 466 }
michael@0 467
michael@0 468 TestRunner.updateUI(tests);
michael@0 469
michael@0 470 var interstitialURL;
michael@0 471 if ($('testframe').contentWindow.location.protocol == "chrome:") {
michael@0 472 interstitialURL = "tests/SimpleTest/iframe-between-tests.html";
michael@0 473 } else {
michael@0 474 interstitialURL = "/tests/SimpleTest/iframe-between-tests.html";
michael@0 475 }
michael@0 476 TestRunner._makeIframe(interstitialURL, 0);
michael@0 477 }
michael@0 478
michael@0 479 SpecialPowers.executeAfterFlushingMessageQueue(function() {
michael@0 480 cleanUpCrashDumpFiles();
michael@0 481 SpecialPowers.flushAllAppsLaunchable();
michael@0 482 SpecialPowers.flushPermissions(function () { SpecialPowers.flushPrefEnv(runNextTest); });
michael@0 483 });
michael@0 484 };
michael@0 485
michael@0 486 TestRunner.testUnloaded = function() {
michael@0 487 // If we're in a debug build, check assertion counts. This code is
michael@0 488 // similar to the code in Tester_nextTest in browser-test.js used
michael@0 489 // for browser-chrome mochitests.
michael@0 490 if (SpecialPowers.isDebugBuild) {
michael@0 491 var newAssertionCount = SpecialPowers.assertionCount();
michael@0 492 var numAsserts = newAssertionCount - TestRunner._lastAssertionCount;
michael@0 493 TestRunner._lastAssertionCount = newAssertionCount;
michael@0 494
michael@0 495 var url = TestRunner._urls[TestRunner._currentTest];
michael@0 496 var max = TestRunner._expectedMaxAsserts;
michael@0 497 var min = TestRunner._expectedMinAsserts;
michael@0 498 if (numAsserts > max) {
michael@0 499 TestRunner.error("TEST-UNEXPECTED-FAIL | " + url + " | Assertion count " + numAsserts + " is greater than expected range " + min + "-" + max + " assertions.");
michael@0 500 TestRunner.updateUI([{ result: false }]);
michael@0 501 } else if (numAsserts < min) {
michael@0 502 TestRunner.error("TEST-UNEXPECTED-PASS | " + url + " | Assertion count " + numAsserts + " is less than expected range " + min + "-" + max + " assertions.");
michael@0 503 TestRunner.updateUI([{ result: false }]);
michael@0 504 } else if (numAsserts > 0) {
michael@0 505 TestRunner.log("TEST-KNOWN-FAIL | " + url + " | Assertion count " + numAsserts + " within expected range " + min + "-" + max + " assertions.");
michael@0 506 }
michael@0 507 }
michael@0 508 TestRunner._currentTest++;
michael@0 509 if (TestRunner.runSlower) {
michael@0 510 setTimeout(TestRunner.runNextTest, 1000);
michael@0 511 } else {
michael@0 512 TestRunner.runNextTest();
michael@0 513 }
michael@0 514 };
michael@0 515
michael@0 516 /**
michael@0 517 * Get the results.
michael@0 518 */
michael@0 519 TestRunner.countResults = function(tests) {
michael@0 520 var nOK = 0;
michael@0 521 var nNotOK = 0;
michael@0 522 var nTodo = 0;
michael@0 523 for (var i = 0; i < tests.length; ++i) {
michael@0 524 var test = tests[i];
michael@0 525 if (test.todo && !test.result) {
michael@0 526 nTodo++;
michael@0 527 } else if (test.result && !test.todo) {
michael@0 528 nOK++;
michael@0 529 } else {
michael@0 530 nNotOK++;
michael@0 531 }
michael@0 532 }
michael@0 533 return {"OK": nOK, "notOK": nNotOK, "todo": nTodo};
michael@0 534 }
michael@0 535
michael@0 536 /**
michael@0 537 * Print out table of any error messages found during looped run
michael@0 538 */
michael@0 539 TestRunner.displayLoopErrors = function(tableName, tests) {
michael@0 540 if(TestRunner.countResults(tests).notOK >0){
michael@0 541 var table = $(tableName);
michael@0 542 var curtest;
michael@0 543 if (table.rows.length == 0) {
michael@0 544 //if table headers are not yet generated, make them
michael@0 545 var row = table.insertRow(table.rows.length);
michael@0 546 var cell = row.insertCell(0);
michael@0 547 var textNode = document.createTextNode("Test File Name:");
michael@0 548 cell.appendChild(textNode);
michael@0 549 cell = row.insertCell(1);
michael@0 550 textNode = document.createTextNode("Test:");
michael@0 551 cell.appendChild(textNode);
michael@0 552 cell = row.insertCell(2);
michael@0 553 textNode = document.createTextNode("Error message:");
michael@0 554 cell.appendChild(textNode);
michael@0 555 }
michael@0 556
michael@0 557 //find the broken test
michael@0 558 for (var testnum in tests){
michael@0 559 curtest = tests[testnum];
michael@0 560 if( !((curtest.todo && !curtest.result) || (curtest.result && !curtest.todo)) ){
michael@0 561 //this is a failed test or the result of todo test. Display the related message
michael@0 562 row = table.insertRow(table.rows.length);
michael@0 563 cell = row.insertCell(0);
michael@0 564 textNode = document.createTextNode(TestRunner.currentTestURL);
michael@0 565 cell.appendChild(textNode);
michael@0 566 cell = row.insertCell(1);
michael@0 567 textNode = document.createTextNode(curtest.name);
michael@0 568 cell.appendChild(textNode);
michael@0 569 cell = row.insertCell(2);
michael@0 570 textNode = document.createTextNode((curtest.diag ? curtest.diag : "" ));
michael@0 571 cell.appendChild(textNode);
michael@0 572 }
michael@0 573 }
michael@0 574 }
michael@0 575 }
michael@0 576
michael@0 577 TestRunner.updateUI = function(tests) {
michael@0 578 var results = TestRunner.countResults(tests);
michael@0 579 var passCount = parseInt($("pass-count").innerHTML) + results.OK;
michael@0 580 var failCount = parseInt($("fail-count").innerHTML) + results.notOK;
michael@0 581 var todoCount = parseInt($("todo-count").innerHTML) + results.todo;
michael@0 582 $("pass-count").innerHTML = passCount;
michael@0 583 $("fail-count").innerHTML = failCount;
michael@0 584 $("todo-count").innerHTML = todoCount;
michael@0 585
michael@0 586 // Set the top Green/Red bar
michael@0 587 var indicator = $("indicator");
michael@0 588 if (failCount > 0) {
michael@0 589 indicator.innerHTML = "Status: Fail";
michael@0 590 indicator.style.backgroundColor = "red";
michael@0 591 } else if (passCount > 0) {
michael@0 592 indicator.innerHTML = "Status: Pass";
michael@0 593 indicator.style.backgroundColor = "#0d0";
michael@0 594 } else {
michael@0 595 indicator.innerHTML = "Status: ToDo";
michael@0 596 indicator.style.backgroundColor = "orange";
michael@0 597 }
michael@0 598
michael@0 599 // Set the table values
michael@0 600 var trID = "tr-" + $('current-test-path').innerHTML;
michael@0 601 var row = $(trID);
michael@0 602
michael@0 603 // Only update the row if it actually exists (autoUI)
michael@0 604 if (row != null) {
michael@0 605 var tds = row.getElementsByTagName("td");
michael@0 606 tds[0].style.backgroundColor = "#0d0";
michael@0 607 tds[0].innerHTML = parseInt(tds[0].innerHTML) + parseInt(results.OK);
michael@0 608 tds[1].style.backgroundColor = results.notOK > 0 ? "red" : "#0d0";
michael@0 609 tds[1].innerHTML = parseInt(tds[1].innerHTML) + parseInt(results.notOK);
michael@0 610 tds[2].style.backgroundColor = results.todo > 0 ? "orange" : "#0d0";
michael@0 611 tds[2].innerHTML = parseInt(tds[2].innerHTML) + parseInt(results.todo);
michael@0 612 }
michael@0 613
michael@0 614 //if we ran in a loop, display any found errors
michael@0 615 if (TestRunner.repeat > 0) {
michael@0 616 TestRunner.displayLoopErrors('fail-table', tests);
michael@0 617 }
michael@0 618 }

mercurial