| |
1 /* Any copyright is dedicated to the Public Domain. |
| |
2 * http://creativecommons.org/publicdomain/zero/1.0/ |
| |
3 */ |
| |
4 |
| |
5 // This file tests SQLITE_FCNTL_CHUNK_SIZE behaves as expected |
| |
6 |
| |
7 function run_sql(d, sql) { |
| |
8 var stmt = d.createStatement(sql) |
| |
9 stmt.execute() |
| |
10 stmt.finalize(); |
| |
11 } |
| |
12 |
| |
13 function new_file(name) |
| |
14 { |
| |
15 var file = dirSvc.get("ProfD", Ci.nsIFile); |
| |
16 file.append(name); |
| |
17 return file; |
| |
18 } |
| |
19 |
| |
20 function get_size(name) { |
| |
21 return new_file(name).fileSize |
| |
22 } |
| |
23 |
| |
24 function run_test() |
| |
25 { |
| |
26 const filename = "chunked.sqlite"; |
| |
27 const CHUNK_SIZE = 512 * 1024; |
| |
28 var d = getDatabase(new_file(filename)); |
| |
29 try { |
| |
30 d.setGrowthIncrement(CHUNK_SIZE, ""); |
| |
31 } catch (e if e.result == Cr.NS_ERROR_FILE_TOO_BIG) { |
| |
32 print("Too little free space to set CHUNK_SIZE!"); |
| |
33 return; |
| |
34 } |
| |
35 run_sql(d, "CREATE TABLE bloat(data varchar)"); |
| |
36 |
| |
37 var orig_size = get_size(filename); |
| |
38 /* Dump in at least 32K worth of data. |
| |
39 * While writing ensure that the file size growth in chunksize set above. |
| |
40 */ |
| |
41 const str1024 = new Array(1024).join("T"); |
| |
42 for(var i = 0; i < 32; i++) { |
| |
43 run_sql(d, "INSERT INTO bloat VALUES('" + str1024 + "')"); |
| |
44 var size = get_size(filename) |
| |
45 // Must not grow in small increments. |
| |
46 do_check_true(size == orig_size || size >= CHUNK_SIZE); |
| |
47 } |
| |
48 /* In addition to growing in chunk-size increments, the db |
| |
49 * should shrink in chunk-size increments too. |
| |
50 */ |
| |
51 run_sql(d, "DELETE FROM bloat") |
| |
52 run_sql(d, "VACUUM") |
| |
53 do_check_true(get_size(filename) >= CHUNK_SIZE) |
| |
54 } |
| |
55 |