gecko-dev/netwerk/test/unit/test_bug1218029.js
Kershaw Chang 3f851f4ddf Bug 1905454 - Make sure the consumed data is erased, r=necko-reviewers,valentin
No matter what the consumedCount is, the previous code always erases 1 byte of data, which is not correct. We should ensure that the amount of data removed matches the consumedCount.

Differential Revision: https://phabricator.services.mozilla.com/D218551
2024-08-06 07:24:27 +00:00

138 lines
3.6 KiB
JavaScript

/* Any copyright is dedicated to the Public Domain.
http://creativecommons.org/publicdomain/zero/1.0/ */
"use strict";
var tests = [
{ data: "", chunks: [], status: Cr.NS_OK, consume: [], dataChunks: [""] },
{
data: "TWO-PARTS",
chunks: [4, 5],
status: Cr.NS_OK,
consume: [4, 5],
dataChunks: ["TWO-", "PARTS", ""],
},
{
data: "TWO-PARTS",
chunks: [4, 5],
status: Cr.NS_OK,
consume: [0, 0],
dataChunks: ["TWO-", "TWO-PARTS", "TWO-PARTS"],
},
{
data: "3-PARTS",
chunks: [1, 1, 5],
status: Cr.NS_OK,
consume: [0, 2, 5],
dataChunks: ["3", "3-", "PARTS", ""],
},
{
data: "ALL-AT-ONCE",
chunks: [11],
status: Cr.NS_OK,
consume: [0],
dataChunks: ["ALL-AT-ONCE", "ALL-AT-ONCE"],
},
{
data: "ALL-AT-ONCE",
chunks: [11],
status: Cr.NS_OK,
consume: [11],
dataChunks: ["ALL-AT-ONCE", ""],
},
{
data: "ERROR",
chunks: [1],
status: Cr.NS_ERROR_OUT_OF_MEMORY,
consume: [0],
dataChunks: ["E", "E"],
},
{
data: "123456789",
chunks: [2, 3, 4],
status: Cr.NS_OK,
consume: [1, 2, 6],
dataChunks: ["12", "2345", "456789", ""],
},
];
/**
* @typedef TestData
* @property {string} data - data for the test.
* @property {Array} chunks - lengths of the chunks that are incrementally sent
* to the loader.
* @property {number} status - final status sent on onStopRequest.
* @property {Array} consume - lengths of consumed data that is reported at
* the onIncrementalData callback.
* @property {Array} dataChunks - data chunks that are reported at the
* onIncrementalData and onStreamComplete callbacks.
*/
function execute_test(test) {
let stream = Cc["@mozilla.org/io/string-input-stream;1"].createInstance(
Ci.nsIStringInputStream
);
stream.data = test.data;
let contentTypeCalled = 0;
let channel = {
contentLength: -1,
QueryInterface: ChromeUtils.generateQI(["nsIChannel"]),
get contentType() {
contentTypeCalled++;
return "application/test";
},
};
let chunkIndex = 0;
let observer = {
onStartRequest(request) {
const chan = request.QueryInterface(Ci.nsIChannel);
const before = contentTypeCalled;
const type = chan.contentType;
const after = contentTypeCalled;
equal(type, "application/test");
equal(after, before + 1);
},
onStreamComplete(loader, context, status, length, data) {
equal(chunkIndex, test.dataChunks.length - 1);
var expectedChunk = test.dataChunks[chunkIndex];
equal(length, expectedChunk.length);
equal(String.fromCharCode.apply(null, data), expectedChunk);
equal(status, test.status);
},
onIncrementalData(loader, context, length, data, consumed) {
Assert.less(chunkIndex, test.dataChunks.length - 1);
var expectedChunk = test.dataChunks[chunkIndex];
equal(length, expectedChunk.length);
equal(String.fromCharCode.apply(null, data), expectedChunk);
consumed.value = test.consume[chunkIndex];
chunkIndex++;
},
QueryInterface: ChromeUtils.generateQI([
"nsIIncrementalStreamLoaderObserver",
]),
};
let listener = Cc[
"@mozilla.org/network/incremental-stream-loader;1"
].createInstance(Ci.nsIIncrementalStreamLoader);
listener.init(observer);
listener.onStartRequest(channel);
var offset = 0;
test.chunks.forEach(function (chunkLength) {
listener.onDataAvailable(channel, stream, offset, chunkLength);
offset += chunkLength;
});
listener.onStopRequest(channel, test.status);
}
function run_test() {
tests.forEach(execute_test);
}