|
1 #!/usr/bin/env python |
|
2 # |
|
3 # Any copyright is dedicated to the Public Domain. |
|
4 # http://creativecommons.org/publicdomain/zero/1.0/ |
|
5 # |
|
6 |
|
7 from __future__ import with_statement |
|
8 import sys, os, unittest, tempfile, shutil |
|
9 import mozinfo |
|
10 |
|
11 from StringIO import StringIO |
|
12 from xml.etree.ElementTree import ElementTree |
|
13 |
|
14 from mozbuild.base import MozbuildObject |
|
15 build_obj = MozbuildObject.from_environment() |
|
16 |
|
17 from runxpcshelltests import XPCShellTests |
|
18 |
|
19 mozinfo.find_and_update_from_json() |
|
20 |
|
21 objdir = build_obj.topobjdir.encode("utf-8") |
|
22 xpcshellBin = os.path.join(objdir, "dist", "bin", "xpcshell") |
|
23 if sys.platform == "win32": |
|
24 xpcshellBin += ".exe" |
|
25 |
|
26 SIMPLE_PASSING_TEST = "function run_test() { do_check_true(true); }" |
|
27 SIMPLE_FAILING_TEST = "function run_test() { do_check_true(false); }" |
|
28 |
|
29 ADD_TEST_SIMPLE = ''' |
|
30 function run_test() { run_next_test(); } |
|
31 |
|
32 add_test(function test_simple() { |
|
33 do_check_true(true); |
|
34 run_next_test(); |
|
35 }); |
|
36 ''' |
|
37 |
|
38 ADD_TEST_FAILING = ''' |
|
39 function run_test() { run_next_test(); } |
|
40 |
|
41 add_test(function test_failing() { |
|
42 do_check_true(false); |
|
43 run_next_test(); |
|
44 }); |
|
45 ''' |
|
46 |
|
47 CHILD_TEST_PASSING = ''' |
|
48 function run_test () { run_next_test(); } |
|
49 |
|
50 add_test(function test_child_simple () { |
|
51 run_test_in_child("test_pass.js"); |
|
52 run_next_test(); |
|
53 }); |
|
54 ''' |
|
55 |
|
56 CHILD_TEST_FAILING = ''' |
|
57 function run_test () { run_next_test(); } |
|
58 |
|
59 add_test(function test_child_simple () { |
|
60 run_test_in_child("test_fail.js"); |
|
61 run_next_test(); |
|
62 }); |
|
63 ''' |
|
64 |
|
65 CHILD_TEST_HANG = ''' |
|
66 function run_test () { run_next_test(); } |
|
67 |
|
68 add_test(function test_child_simple () { |
|
69 do_test_pending("hang test"); |
|
70 do_load_child_test_harness(); |
|
71 sendCommand("_log('child_test_start', {_message: 'CHILD-TEST-STARTED'}); " + |
|
72 + "const _TEST_FILE=['test_pass.js']; _execute_test(); ", |
|
73 do_test_finished); |
|
74 run_next_test(); |
|
75 }); |
|
76 ''' |
|
77 |
|
78 ADD_TASK_SINGLE = ''' |
|
79 Components.utils.import("resource://gre/modules/Promise.jsm"); |
|
80 |
|
81 function run_test() { run_next_test(); } |
|
82 |
|
83 add_task(function test_task() { |
|
84 yield Promise.resolve(true); |
|
85 yield Promise.resolve(false); |
|
86 }); |
|
87 ''' |
|
88 |
|
89 ADD_TASK_MULTIPLE = ''' |
|
90 Components.utils.import("resource://gre/modules/Promise.jsm"); |
|
91 |
|
92 function run_test() { run_next_test(); } |
|
93 |
|
94 add_task(function test_task() { |
|
95 yield Promise.resolve(true); |
|
96 }); |
|
97 |
|
98 add_task(function test_2() { |
|
99 yield Promise.resolve(true); |
|
100 }); |
|
101 ''' |
|
102 |
|
103 ADD_TASK_REJECTED = ''' |
|
104 Components.utils.import("resource://gre/modules/Promise.jsm"); |
|
105 |
|
106 function run_test() { run_next_test(); } |
|
107 |
|
108 add_task(function test_failing() { |
|
109 yield Promise.reject(new Error("I fail.")); |
|
110 }); |
|
111 ''' |
|
112 |
|
113 ADD_TASK_FAILURE_INSIDE = ''' |
|
114 Components.utils.import("resource://gre/modules/Promise.jsm"); |
|
115 |
|
116 function run_test() { run_next_test(); } |
|
117 |
|
118 add_task(function test() { |
|
119 let result = yield Promise.resolve(false); |
|
120 |
|
121 do_check_true(result); |
|
122 }); |
|
123 ''' |
|
124 |
|
125 ADD_TASK_RUN_NEXT_TEST = ''' |
|
126 function run_test() { run_next_test(); } |
|
127 |
|
128 add_task(function () { |
|
129 Assert.ok(true); |
|
130 |
|
131 run_next_test(); |
|
132 }); |
|
133 ''' |
|
134 |
|
135 ADD_TEST_THROW_STRING = ''' |
|
136 function run_test() {do_throw("Passing a string to do_throw")}; |
|
137 ''' |
|
138 |
|
139 ADD_TEST_THROW_OBJECT = ''' |
|
140 let error = { |
|
141 message: "Error object", |
|
142 fileName: "failure.js", |
|
143 stack: "ERROR STACK", |
|
144 toString: function() {return this.message;} |
|
145 }; |
|
146 function run_test() {do_throw(error)}; |
|
147 ''' |
|
148 |
|
149 ADD_TEST_REPORT_OBJECT = ''' |
|
150 let error = { |
|
151 message: "Error object", |
|
152 fileName: "failure.js", |
|
153 stack: "ERROR STACK", |
|
154 toString: function() {return this.message;} |
|
155 }; |
|
156 function run_test() {do_report_unexpected_exception(error)}; |
|
157 ''' |
|
158 |
|
159 # A test for genuine JS-generated Error objects |
|
160 ADD_TEST_REPORT_REF_ERROR = ''' |
|
161 function run_test() { |
|
162 let obj = {blah: 0}; |
|
163 try { |
|
164 obj.noSuchFunction(); |
|
165 } |
|
166 catch (error) { |
|
167 do_report_unexpected_exception(error); |
|
168 } |
|
169 }; |
|
170 ''' |
|
171 |
|
172 # A test for failure to load a test due to a syntax error |
|
173 LOAD_ERROR_SYNTAX_ERROR = ''' |
|
174 function run_test( |
|
175 ''' |
|
176 |
|
177 # A test for failure to load a test due to an error other than a syntax error |
|
178 LOAD_ERROR_OTHER_ERROR = ''' |
|
179 function run_test() { |
|
180 yield "foo"; |
|
181 return "foo"; // can't use return in a generator! |
|
182 }; |
|
183 ''' |
|
184 |
|
185 # A test for asynchronous cleanup functions |
|
186 ASYNC_CLEANUP = ''' |
|
187 function run_test() { |
|
188 Components.utils.import("resource://gre/modules/Promise.jsm", this); |
|
189 |
|
190 // The list of checkpoints in the order we encounter them. |
|
191 let checkpoints = []; |
|
192 |
|
193 // Cleanup tasks, in reverse order |
|
194 do_register_cleanup(function cleanup_checkout() { |
|
195 do_check_eq(checkpoints.join(""), "1234"); |
|
196 do_print("At this stage, the test has succeeded"); |
|
197 do_throw("Throwing an error to force displaying the log"); |
|
198 }); |
|
199 |
|
200 do_register_cleanup(function sync_cleanup_2() { |
|
201 checkpoints.push(4); |
|
202 }); |
|
203 |
|
204 do_register_cleanup(function async_cleanup_2() { |
|
205 let deferred = Promise.defer(); |
|
206 do_execute_soon(deferred.resolve); |
|
207 return deferred.promise.then(function() { |
|
208 checkpoints.push(3); |
|
209 }); |
|
210 }); |
|
211 |
|
212 do_register_cleanup(function sync_cleanup() { |
|
213 checkpoints.push(2); |
|
214 }); |
|
215 |
|
216 do_register_cleanup(function async_cleanup() { |
|
217 let deferred = Promise.defer(); |
|
218 do_execute_soon(deferred.resolve); |
|
219 return deferred.promise.then(function() { |
|
220 checkpoints.push(1); |
|
221 }); |
|
222 }); |
|
223 |
|
224 } |
|
225 ''' |
|
226 |
|
227 |
|
228 class XPCShellTestsTests(unittest.TestCase): |
|
229 """ |
|
230 Yes, these are unit tests for a unit test harness. |
|
231 """ |
|
232 def setUp(self): |
|
233 self.log = StringIO() |
|
234 self.tempdir = tempfile.mkdtemp() |
|
235 self.x = XPCShellTests(log=self.log) |
|
236 |
|
237 def tearDown(self): |
|
238 shutil.rmtree(self.tempdir) |
|
239 |
|
240 def writeFile(self, name, contents): |
|
241 """ |
|
242 Write |contents| to a file named |name| in the temp directory, |
|
243 and return the full path to the file. |
|
244 """ |
|
245 fullpath = os.path.join(self.tempdir, name) |
|
246 with open(fullpath, "w") as f: |
|
247 f.write(contents) |
|
248 return fullpath |
|
249 |
|
250 def writeManifest(self, tests): |
|
251 """ |
|
252 Write an xpcshell.ini in the temp directory and set |
|
253 self.manifest to its pathname. |tests| is a list containing |
|
254 either strings (for test names), or tuples with a test name |
|
255 as the first element and manifest conditions as the following |
|
256 elements. |
|
257 """ |
|
258 testlines = [] |
|
259 for t in tests: |
|
260 testlines.append("[%s]" % (t if isinstance(t, basestring) |
|
261 else t[0])) |
|
262 if isinstance(t, tuple): |
|
263 testlines.extend(t[1:]) |
|
264 self.manifest = self.writeFile("xpcshell.ini", """ |
|
265 [DEFAULT] |
|
266 head = |
|
267 tail = |
|
268 |
|
269 """ + "\n".join(testlines)) |
|
270 |
|
271 def assertTestResult(self, expected, shuffle=False, xunitFilename=None, verbose=False): |
|
272 """ |
|
273 Assert that self.x.runTests with manifest=self.manifest |
|
274 returns |expected|. |
|
275 """ |
|
276 self.assertEquals(expected, |
|
277 self.x.runTests(xpcshellBin, |
|
278 manifest=self.manifest, |
|
279 mozInfo=mozinfo.info, |
|
280 shuffle=shuffle, |
|
281 testsRootDir=self.tempdir, |
|
282 verbose=verbose, |
|
283 xunitFilename=xunitFilename, |
|
284 sequential=True), |
|
285 msg="""Tests should have %s, log: |
|
286 ======== |
|
287 %s |
|
288 ======== |
|
289 """ % ("passed" if expected else "failed", self.log.getvalue())) |
|
290 |
|
291 def _assertLog(self, s, expected): |
|
292 l = self.log.getvalue() |
|
293 self.assertEqual(expected, s in l, |
|
294 msg="""Value %s %s in log: |
|
295 ======== |
|
296 %s |
|
297 ========""" % (s, "expected" if expected else "not expected", l)) |
|
298 |
|
299 def assertInLog(self, s): |
|
300 """ |
|
301 Assert that the string |s| is contained in self.log. |
|
302 """ |
|
303 self._assertLog(s, True) |
|
304 |
|
305 def assertNotInLog(self, s): |
|
306 """ |
|
307 Assert that the string |s| is not contained in self.log. |
|
308 """ |
|
309 self._assertLog(s, False) |
|
310 |
|
311 def testPass(self): |
|
312 """ |
|
313 Check that a simple test without any manifest conditions passes. |
|
314 """ |
|
315 self.writeFile("test_basic.js", SIMPLE_PASSING_TEST) |
|
316 self.writeManifest(["test_basic.js"]) |
|
317 |
|
318 self.assertTestResult(True) |
|
319 self.assertEquals(1, self.x.testCount) |
|
320 self.assertEquals(1, self.x.passCount) |
|
321 self.assertEquals(0, self.x.failCount) |
|
322 self.assertEquals(0, self.x.todoCount) |
|
323 self.assertInLog("TEST-PASS") |
|
324 self.assertNotInLog("TEST-UNEXPECTED-FAIL") |
|
325 |
|
326 def testFail(self): |
|
327 """ |
|
328 Check that a simple failing test without any manifest conditions fails. |
|
329 """ |
|
330 self.writeFile("test_basic.js", SIMPLE_FAILING_TEST) |
|
331 self.writeManifest(["test_basic.js"]) |
|
332 |
|
333 self.assertTestResult(False) |
|
334 self.assertEquals(1, self.x.testCount) |
|
335 self.assertEquals(0, self.x.passCount) |
|
336 self.assertEquals(1, self.x.failCount) |
|
337 self.assertEquals(0, self.x.todoCount) |
|
338 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
339 self.assertNotInLog("TEST-PASS") |
|
340 |
|
341 @unittest.skipIf(build_obj.defines.get('MOZ_B2G'), |
|
342 'selftests with child processes fail on b2g desktop builds') |
|
343 def testChildPass(self): |
|
344 """ |
|
345 Check that a simple test running in a child process passes. |
|
346 """ |
|
347 self.writeFile("test_pass.js", SIMPLE_PASSING_TEST) |
|
348 self.writeFile("test_child_pass.js", CHILD_TEST_PASSING) |
|
349 self.writeManifest(["test_child_pass.js"]) |
|
350 |
|
351 self.assertTestResult(True, verbose=True) |
|
352 self.assertEquals(1, self.x.testCount) |
|
353 self.assertEquals(1, self.x.passCount) |
|
354 self.assertEquals(0, self.x.failCount) |
|
355 self.assertEquals(0, self.x.todoCount) |
|
356 self.assertInLog("TEST-PASS") |
|
357 self.assertInLog("CHILD-TEST-STARTED") |
|
358 self.assertInLog("CHILD-TEST-COMPLETED") |
|
359 self.assertNotInLog("TEST-UNEXPECTED-FAIL") |
|
360 |
|
361 |
|
362 @unittest.skipIf(build_obj.defines.get('MOZ_B2G'), |
|
363 'selftests with child processes fail on b2g desktop builds') |
|
364 def testChildFail(self): |
|
365 """ |
|
366 Check that a simple failing test running in a child process fails. |
|
367 """ |
|
368 self.writeFile("test_fail.js", SIMPLE_FAILING_TEST) |
|
369 self.writeFile("test_child_fail.js", CHILD_TEST_FAILING) |
|
370 self.writeManifest(["test_child_fail.js"]) |
|
371 |
|
372 self.assertTestResult(False) |
|
373 self.assertEquals(1, self.x.testCount) |
|
374 self.assertEquals(0, self.x.passCount) |
|
375 self.assertEquals(1, self.x.failCount) |
|
376 self.assertEquals(0, self.x.todoCount) |
|
377 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
378 self.assertInLog("CHILD-TEST-STARTED") |
|
379 self.assertInLog("CHILD-TEST-COMPLETED") |
|
380 self.assertNotInLog("TEST-PASS") |
|
381 |
|
382 @unittest.skipIf(build_obj.defines.get('MOZ_B2G'), |
|
383 'selftests with child processes fail on b2g desktop builds') |
|
384 def testChildHang(self): |
|
385 """ |
|
386 Check that incomplete output from a child process results in a |
|
387 test failure. |
|
388 """ |
|
389 self.writeFile("test_pass.js", SIMPLE_PASSING_TEST) |
|
390 self.writeFile("test_child_hang.js", CHILD_TEST_HANG) |
|
391 self.writeManifest(["test_child_hang.js"]) |
|
392 |
|
393 self.assertTestResult(False) |
|
394 self.assertEquals(1, self.x.testCount) |
|
395 self.assertEquals(0, self.x.passCount) |
|
396 self.assertEquals(1, self.x.failCount) |
|
397 self.assertEquals(0, self.x.todoCount) |
|
398 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
399 self.assertInLog("CHILD-TEST-STARTED") |
|
400 self.assertNotInLog("CHILD-TEST-COMPLETED") |
|
401 self.assertNotInLog("TEST-PASS") |
|
402 |
|
403 def testSyntaxError(self): |
|
404 """ |
|
405 Check that running a test file containing a syntax error produces |
|
406 a test failure and expected output. |
|
407 """ |
|
408 self.writeFile("test_syntax_error.js", '"') |
|
409 self.writeManifest(["test_syntax_error.js"]) |
|
410 |
|
411 self.assertTestResult(False, verbose=True) |
|
412 self.assertEquals(1, self.x.testCount) |
|
413 self.assertEquals(0, self.x.passCount) |
|
414 self.assertEquals(1, self.x.failCount) |
|
415 self.assertEquals(0, self.x.todoCount) |
|
416 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
417 self.assertNotInLog("TEST-PASS") |
|
418 |
|
419 def testPassFail(self): |
|
420 """ |
|
421 Check that running more than one test works. |
|
422 """ |
|
423 self.writeFile("test_pass.js", SIMPLE_PASSING_TEST) |
|
424 self.writeFile("test_fail.js", SIMPLE_FAILING_TEST) |
|
425 self.writeManifest(["test_pass.js", "test_fail.js"]) |
|
426 |
|
427 self.assertTestResult(False) |
|
428 self.assertEquals(2, self.x.testCount) |
|
429 self.assertEquals(1, self.x.passCount) |
|
430 self.assertEquals(1, self.x.failCount) |
|
431 self.assertEquals(0, self.x.todoCount) |
|
432 self.assertInLog("TEST-PASS") |
|
433 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
434 |
|
435 def testSkip(self): |
|
436 """ |
|
437 Check that a simple failing test skipped in the manifest does |
|
438 not cause failure. |
|
439 """ |
|
440 self.writeFile("test_basic.js", SIMPLE_FAILING_TEST) |
|
441 self.writeManifest([("test_basic.js", "skip-if = true")]) |
|
442 self.assertTestResult(True) |
|
443 self.assertEquals(1, self.x.testCount) |
|
444 self.assertEquals(0, self.x.passCount) |
|
445 self.assertEquals(0, self.x.failCount) |
|
446 self.assertEquals(0, self.x.todoCount) |
|
447 self.assertNotInLog("TEST-UNEXPECTED-FAIL") |
|
448 self.assertNotInLog("TEST-PASS") |
|
449 |
|
450 def testKnownFail(self): |
|
451 """ |
|
452 Check that a simple failing test marked as known-fail in the manifest |
|
453 does not cause failure. |
|
454 """ |
|
455 self.writeFile("test_basic.js", SIMPLE_FAILING_TEST) |
|
456 self.writeManifest([("test_basic.js", "fail-if = true")]) |
|
457 self.assertTestResult(True) |
|
458 self.assertEquals(1, self.x.testCount) |
|
459 self.assertEquals(0, self.x.passCount) |
|
460 self.assertEquals(0, self.x.failCount) |
|
461 self.assertEquals(1, self.x.todoCount) |
|
462 self.assertInLog("TEST-KNOWN-FAIL") |
|
463 # This should be suppressed because the harness doesn't include |
|
464 # the full log from the xpcshell run when things pass. |
|
465 self.assertNotInLog("TEST-UNEXPECTED-FAIL") |
|
466 self.assertNotInLog("TEST-PASS") |
|
467 |
|
468 def testUnexpectedPass(self): |
|
469 """ |
|
470 Check that a simple failing test marked as known-fail in the manifest |
|
471 that passes causes an unexpected pass. |
|
472 """ |
|
473 self.writeFile("test_basic.js", SIMPLE_PASSING_TEST) |
|
474 self.writeManifest([("test_basic.js", "fail-if = true")]) |
|
475 self.assertTestResult(False) |
|
476 self.assertEquals(1, self.x.testCount) |
|
477 self.assertEquals(0, self.x.passCount) |
|
478 self.assertEquals(1, self.x.failCount) |
|
479 self.assertEquals(0, self.x.todoCount) |
|
480 # From the outer (Python) harness |
|
481 self.assertInLog("TEST-UNEXPECTED-PASS") |
|
482 self.assertNotInLog("TEST-KNOWN-FAIL") |
|
483 # From the inner (JS) harness |
|
484 self.assertInLog("TEST-PASS") |
|
485 |
|
486 def testReturnNonzero(self): |
|
487 """ |
|
488 Check that a test where xpcshell returns nonzero fails. |
|
489 """ |
|
490 self.writeFile("test_error.js", "throw 'foo'") |
|
491 self.writeManifest(["test_error.js"]) |
|
492 |
|
493 self.assertTestResult(False) |
|
494 self.assertEquals(1, self.x.testCount) |
|
495 self.assertEquals(0, self.x.passCount) |
|
496 self.assertEquals(1, self.x.failCount) |
|
497 self.assertEquals(0, self.x.todoCount) |
|
498 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
499 self.assertNotInLog("TEST-PASS") |
|
500 |
|
501 def testAddTestSimple(self): |
|
502 """ |
|
503 Ensure simple add_test() works. |
|
504 """ |
|
505 self.writeFile("test_add_test_simple.js", ADD_TEST_SIMPLE) |
|
506 self.writeManifest(["test_add_test_simple.js"]) |
|
507 |
|
508 self.assertTestResult(True) |
|
509 self.assertEquals(1, self.x.testCount) |
|
510 self.assertEquals(1, self.x.passCount) |
|
511 self.assertEquals(0, self.x.failCount) |
|
512 |
|
513 def testAddTestFailing(self): |
|
514 """ |
|
515 Ensure add_test() with a failing test is reported. |
|
516 """ |
|
517 self.writeFile("test_add_test_failing.js", ADD_TEST_FAILING) |
|
518 self.writeManifest(["test_add_test_failing.js"]) |
|
519 |
|
520 self.assertTestResult(False) |
|
521 self.assertEquals(1, self.x.testCount) |
|
522 self.assertEquals(0, self.x.passCount) |
|
523 self.assertEquals(1, self.x.failCount) |
|
524 |
|
525 def testAddTaskTestSingle(self): |
|
526 """ |
|
527 Ensure add_test_task() with a single passing test works. |
|
528 """ |
|
529 self.writeFile("test_add_task_simple.js", ADD_TASK_SINGLE) |
|
530 self.writeManifest(["test_add_task_simple.js"]) |
|
531 |
|
532 self.assertTestResult(True) |
|
533 self.assertEquals(1, self.x.testCount) |
|
534 self.assertEquals(1, self.x.passCount) |
|
535 self.assertEquals(0, self.x.failCount) |
|
536 |
|
537 def testAddTaskTestMultiple(self): |
|
538 """ |
|
539 Ensure multiple calls to add_test_task() work as expected. |
|
540 """ |
|
541 self.writeFile("test_add_task_multiple.js", |
|
542 ADD_TASK_MULTIPLE) |
|
543 self.writeManifest(["test_add_task_multiple.js"]) |
|
544 |
|
545 self.assertTestResult(True) |
|
546 self.assertEquals(1, self.x.testCount) |
|
547 self.assertEquals(1, self.x.passCount) |
|
548 self.assertEquals(0, self.x.failCount) |
|
549 |
|
550 def testAddTaskTestRejected(self): |
|
551 """ |
|
552 Ensure rejected task reports as failure. |
|
553 """ |
|
554 self.writeFile("test_add_task_rejected.js", |
|
555 ADD_TASK_REJECTED) |
|
556 self.writeManifest(["test_add_task_rejected.js"]) |
|
557 |
|
558 self.assertTestResult(False) |
|
559 self.assertEquals(1, self.x.testCount) |
|
560 self.assertEquals(0, self.x.passCount) |
|
561 self.assertEquals(1, self.x.failCount) |
|
562 |
|
563 def testAddTaskTestFailureInside(self): |
|
564 """ |
|
565 Ensure tests inside task are reported as failures. |
|
566 """ |
|
567 self.writeFile("test_add_task_failure_inside.js", |
|
568 ADD_TASK_FAILURE_INSIDE) |
|
569 self.writeManifest(["test_add_task_failure_inside.js"]) |
|
570 |
|
571 self.assertTestResult(False) |
|
572 self.assertEquals(1, self.x.testCount) |
|
573 self.assertEquals(0, self.x.passCount) |
|
574 self.assertEquals(1, self.x.failCount) |
|
575 |
|
576 def testAddTaskRunNextTest(self): |
|
577 """ |
|
578 Calling run_next_test() from inside add_task() results in failure. |
|
579 """ |
|
580 self.writeFile("test_add_task_run_next_test.js", |
|
581 ADD_TASK_RUN_NEXT_TEST) |
|
582 self.writeManifest(["test_add_task_run_next_test.js"]) |
|
583 |
|
584 self.assertTestResult(False) |
|
585 self.assertEquals(1, self.x.testCount) |
|
586 self.assertEquals(0, self.x.passCount) |
|
587 self.assertEquals(1, self.x.failCount) |
|
588 |
|
589 def testMissingHeadFile(self): |
|
590 """ |
|
591 Ensure that missing head file results in fatal error. |
|
592 """ |
|
593 self.writeFile("test_basic.js", SIMPLE_PASSING_TEST) |
|
594 self.writeManifest([("test_basic.js", "head = missing.js")]) |
|
595 |
|
596 raised = False |
|
597 |
|
598 try: |
|
599 # The actual return value is never checked because we raise. |
|
600 self.assertTestResult(True) |
|
601 except Exception, ex: |
|
602 raised = True |
|
603 self.assertEquals(ex.message[0:9], "head file") |
|
604 |
|
605 self.assertTrue(raised) |
|
606 |
|
607 def testMissingTailFile(self): |
|
608 """ |
|
609 Ensure that missing tail file results in fatal error. |
|
610 """ |
|
611 self.writeFile("test_basic.js", SIMPLE_PASSING_TEST) |
|
612 self.writeManifest([("test_basic.js", "tail = missing.js")]) |
|
613 |
|
614 raised = False |
|
615 |
|
616 try: |
|
617 self.assertTestResult(True) |
|
618 except Exception, ex: |
|
619 raised = True |
|
620 self.assertEquals(ex.message[0:9], "tail file") |
|
621 |
|
622 self.assertTrue(raised) |
|
623 |
|
624 def testRandomExecution(self): |
|
625 """ |
|
626 Check that random execution doesn't break. |
|
627 """ |
|
628 manifest = [] |
|
629 for i in range(0, 10): |
|
630 filename = "test_pass_%d.js" % i |
|
631 self.writeFile(filename, SIMPLE_PASSING_TEST) |
|
632 manifest.append(filename) |
|
633 |
|
634 self.writeManifest(manifest) |
|
635 self.assertTestResult(True, shuffle=True) |
|
636 self.assertEquals(10, self.x.testCount) |
|
637 self.assertEquals(10, self.x.passCount) |
|
638 |
|
639 def testXunitOutput(self): |
|
640 """ |
|
641 Check that Xunit XML files are written. |
|
642 """ |
|
643 self.writeFile("test_00.js", SIMPLE_PASSING_TEST) |
|
644 self.writeFile("test_01.js", SIMPLE_FAILING_TEST) |
|
645 self.writeFile("test_02.js", SIMPLE_PASSING_TEST) |
|
646 |
|
647 manifest = [ |
|
648 "test_00.js", |
|
649 "test_01.js", |
|
650 ("test_02.js", "skip-if = true") |
|
651 ] |
|
652 |
|
653 self.writeManifest(manifest) |
|
654 |
|
655 filename = os.path.join(self.tempdir, "xunit.xml") |
|
656 |
|
657 self.assertTestResult(False, xunitFilename=filename) |
|
658 |
|
659 self.assertTrue(os.path.exists(filename)) |
|
660 self.assertTrue(os.path.getsize(filename) > 0) |
|
661 |
|
662 tree = ElementTree() |
|
663 tree.parse(filename) |
|
664 suite = tree.getroot() |
|
665 |
|
666 self.assertTrue(suite is not None) |
|
667 self.assertEqual(suite.get("tests"), "3") |
|
668 self.assertEqual(suite.get("failures"), "1") |
|
669 self.assertEqual(suite.get("skip"), "1") |
|
670 |
|
671 testcases = suite.findall("testcase") |
|
672 self.assertEqual(len(testcases), 3) |
|
673 |
|
674 for testcase in testcases: |
|
675 attributes = testcase.keys() |
|
676 self.assertTrue("classname" in attributes) |
|
677 self.assertTrue("name" in attributes) |
|
678 self.assertTrue("time" in attributes) |
|
679 |
|
680 self.assertTrue(testcases[1].find("failure") is not None) |
|
681 self.assertTrue(testcases[2].find("skipped") is not None) |
|
682 |
|
683 def testDoThrowString(self): |
|
684 """ |
|
685 Check that do_throw produces reasonable messages when the |
|
686 input is a string instead of an object |
|
687 """ |
|
688 self.writeFile("test_error.js", ADD_TEST_THROW_STRING) |
|
689 self.writeManifest(["test_error.js"]) |
|
690 |
|
691 self.assertTestResult(False) |
|
692 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
693 self.assertInLog("Passing a string to do_throw") |
|
694 self.assertNotInLog("TEST-PASS") |
|
695 |
|
696 def testDoThrowForeignObject(self): |
|
697 """ |
|
698 Check that do_throw produces reasonable messages when the |
|
699 input is a generic object with 'filename', 'message' and 'stack' attributes |
|
700 but 'object instanceof Error' returns false |
|
701 """ |
|
702 self.writeFile("test_error.js", ADD_TEST_THROW_OBJECT) |
|
703 self.writeManifest(["test_error.js"]) |
|
704 |
|
705 self.assertTestResult(False) |
|
706 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
707 self.assertInLog("failure.js") |
|
708 self.assertInLog("Error object") |
|
709 self.assertInLog("ERROR STACK") |
|
710 self.assertNotInLog("TEST-PASS") |
|
711 |
|
712 def testDoReportForeignObject(self): |
|
713 """ |
|
714 Check that do_report_unexpected_exception produces reasonable messages when the |
|
715 input is a generic object with 'filename', 'message' and 'stack' attributes |
|
716 but 'object instanceof Error' returns false |
|
717 """ |
|
718 self.writeFile("test_error.js", ADD_TEST_REPORT_OBJECT) |
|
719 self.writeManifest(["test_error.js"]) |
|
720 |
|
721 self.assertTestResult(False) |
|
722 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
723 self.assertInLog("failure.js") |
|
724 self.assertInLog("Error object") |
|
725 self.assertInLog("ERROR STACK") |
|
726 self.assertNotInLog("TEST-PASS") |
|
727 |
|
728 def testDoReportRefError(self): |
|
729 """ |
|
730 Check that do_report_unexpected_exception produces reasonable messages when the |
|
731 input is a JS-generated Error |
|
732 """ |
|
733 self.writeFile("test_error.js", ADD_TEST_REPORT_REF_ERROR) |
|
734 self.writeManifest(["test_error.js"]) |
|
735 |
|
736 self.assertTestResult(False) |
|
737 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
738 self.assertInLog("test_error.js") |
|
739 self.assertInLog("obj.noSuchFunction is not a function") |
|
740 self.assertInLog("run_test@") |
|
741 self.assertNotInLog("TEST-PASS") |
|
742 |
|
743 def testDoReportSyntaxError(self): |
|
744 """ |
|
745 Check that attempting to load a test file containing a syntax error |
|
746 generates details of the error in the log |
|
747 """ |
|
748 self.writeFile("test_error.js", LOAD_ERROR_SYNTAX_ERROR) |
|
749 self.writeManifest(["test_error.js"]) |
|
750 |
|
751 self.assertTestResult(False) |
|
752 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
753 self.assertInLog("test_error.js") |
|
754 self.assertInLog("test_error.js contains SyntaxError") |
|
755 self.assertInLog("Diagnostic: SyntaxError: missing formal parameter at") |
|
756 self.assertInLog("test_error.js:3") |
|
757 self.assertNotInLog("TEST-PASS") |
|
758 |
|
759 def testDoReportNonSyntaxError(self): |
|
760 """ |
|
761 Check that attempting to load a test file containing an error other |
|
762 than a syntax error generates details of the error in the log |
|
763 """ |
|
764 self.writeFile("test_error.js", LOAD_ERROR_OTHER_ERROR) |
|
765 self.writeManifest(["test_error.js"]) |
|
766 |
|
767 self.assertTestResult(False) |
|
768 self.assertInLog("TEST-UNEXPECTED-FAIL") |
|
769 self.assertInLog("Diagnostic: TypeError: generator function run_test returns a value at") |
|
770 self.assertInLog("test_error.js:4") |
|
771 self.assertNotInLog("TEST-PASS") |
|
772 |
|
773 def testAsyncCleanup(self): |
|
774 """ |
|
775 Check that do_register_cleanup handles nicely cleanup tasks that |
|
776 return a promise |
|
777 """ |
|
778 self.writeFile("test_asyncCleanup.js", ASYNC_CLEANUP) |
|
779 self.writeManifest(["test_asyncCleanup.js"]) |
|
780 self.assertTestResult(False) |
|
781 self.assertInLog("\"1234\" == \"1234\"") |
|
782 self.assertInLog("At this stage, the test has succeeded") |
|
783 self.assertInLog("Throwing an error to force displaying the log") |
|
784 |
|
785 if __name__ == "__main__": |
|
786 unittest.main() |