michael@0: function gen_1MiB() michael@0: { michael@0: var i; michael@0: var data="x"; michael@0: for (i=0 ; i < 20 ; i++) michael@0: data+=data; michael@0: return data; michael@0: } michael@0: michael@0: function write_and_check(str, data, len) michael@0: { michael@0: var written = str.write(data, len); michael@0: if (written != len) { michael@0: do_throw("str.write has not written all data!\n" + michael@0: " Expected: " + len + "\n" + michael@0: " Actual: " + written + "\n"); michael@0: } michael@0: } michael@0: michael@0: function write_big_datafile(status, entry) michael@0: { michael@0: do_check_eq(status, Cr.NS_OK); michael@0: var os = entry.openOutputStream(0); michael@0: var data = gen_1MiB(); michael@0: michael@0: // write 65MiB michael@0: var i; michael@0: for (i=0 ; i<65 ; i++) michael@0: write_and_check(os, data, data.length); michael@0: michael@0: // another write should fail and the entry will be doomed michael@0: try { michael@0: write_and_check(os, data, data.length); michael@0: do_throw("write should fail"); michael@0: } catch (e) {} michael@0: michael@0: os.close(); michael@0: entry.close(); michael@0: michael@0: // DoomEntry() is called while writing to the entry, but the data is really michael@0: // deleted (and the cache size updated) on the background thread when michael@0: // the entry is deactivated. We need to sync with the cache IO thread before michael@0: // we continue with the test. michael@0: syncWithCacheIOThread(run_test_2); michael@0: } michael@0: michael@0: function write_big_metafile(status, entry) michael@0: { michael@0: do_check_eq(status, Cr.NS_OK); michael@0: var os = entry.openOutputStream(0); michael@0: var data = gen_1MiB(); michael@0: michael@0: // > 64MiB michael@0: var i; michael@0: for (i=0 ; i<65 ; i++) michael@0: entry.setMetaDataElement("metadata_"+i, data); michael@0: michael@0: entry.metaDataReady(); michael@0: michael@0: os.close(); michael@0: entry.close(); michael@0: michael@0: // We don't check whether the cache is full while writing metadata. Also we michael@0: // write the metadata when closing the entry, so we need to write some data michael@0: // after closing this entry to invoke the cache cleanup. michael@0: asyncOpenCacheEntry("http://smalldata/", michael@0: "disk", Ci.nsICacheStorage.OPEN_TRUNCATE, null, michael@0: write_and_doom_small_datafile); michael@0: } michael@0: michael@0: function write_and_doom_small_datafile(status, entry) michael@0: { michael@0: do_check_eq(status, Cr.NS_OK); michael@0: var os = entry.openOutputStream(0); michael@0: var data = "0123456789"; michael@0: michael@0: write_and_check(os, data, data.length); michael@0: michael@0: os.close(); michael@0: entry.asyncDoom(null); michael@0: entry.close(); michael@0: syncWithCacheIOThread(run_test_3); michael@0: } michael@0: michael@0: function check_cache_size(cont) { michael@0: get_device_entry_count("disk", null, function(count, consumption) { michael@0: // Because the last entry we store is doomed using AsyncDoom and not Doom, it is still active michael@0: // during the visit processing, hence consumption is larger then 0 (one block is allocated). michael@0: // ...I really like all these small old-cache bugs, that will finally go away... :) michael@0: do_check_true(consumption <= 1024) michael@0: cont(); michael@0: }); michael@0: } michael@0: michael@0: function run_test() { michael@0: if (newCacheBackEndUsed()) { michael@0: // browser.cache.disk.* (limits mainly) tests michael@0: do_check_true(true, "This test doesn't run with the new cache backend, the test or the cache needs to be fixed"); michael@0: return; michael@0: } michael@0: michael@0: var prefBranch = Cc["@mozilla.org/preferences-service;1"]. michael@0: getService(Ci.nsIPrefBranch); michael@0: michael@0: // set max entry size bigger than 64MiB michael@0: prefBranch.setIntPref("browser.cache.disk.max_entry_size", 65*1024); michael@0: // disk cache capacity must be at least 8 times bigger michael@0: prefBranch.setIntPref("browser.cache.disk.capacity", 8*65*1024); michael@0: // disable smart size michael@0: prefBranch.setBoolPref("browser.cache.disk.smart_size.enabled", false); michael@0: michael@0: do_get_profile(); michael@0: michael@0: // clear the cache michael@0: evict_cache_entries(); michael@0: michael@0: // write an entry with data > 64MiB michael@0: asyncOpenCacheEntry("http://bigdata/", michael@0: "disk", Ci.nsICacheStorage.OPEN_TRUNCATE, null, michael@0: write_big_datafile); michael@0: michael@0: do_test_pending(); michael@0: } michael@0: michael@0: function run_test_2() michael@0: { michael@0: check_cache_size(run_test_2a); michael@0: } michael@0: michael@0: function run_test_2a() michael@0: { michael@0: var prefBranch = Cc["@mozilla.org/preferences-service;1"]. michael@0: getService(Ci.nsIPrefBranch); michael@0: michael@0: // set cache capacity lower than max entry size (see comment in michael@0: // write_big_metafile) michael@0: prefBranch.setIntPref("browser.cache.disk.capacity", 64*1024); michael@0: michael@0: // write an entry with metadata > 64MiB michael@0: asyncOpenCacheEntry("http://bigmetadata/", michael@0: "disk", Ci.nsICacheStorage.OPEN_TRUNCATE, null, michael@0: write_big_metafile); michael@0: } michael@0: michael@0: function run_test_3() michael@0: { michael@0: check_cache_size(do_test_finished); michael@0: }