test: ensure assertions are reached on more tests

PR-URL: https://github.com/nodejs/node/pull/60500
Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Reviewed-By: Chemi Atlow <chemi@atlow.co.il>
Reviewed-By: Zijian Liu <lxxyxzj@gmail.com>
This commit is contained in:
Antoine du Hamel 2025-11-06 09:41:55 +01:00 committed by GitHub
parent 525c4fb316
commit 761d4f45af
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 109 additions and 153 deletions

View File

@ -191,6 +191,10 @@ export default [
'wasm-allocation',
'wpt',
].join(',')}}/**/*.{js,mjs,cjs}`,
`test/parallel/test-{${
// 0x61 is code for 'a', this generates a string enumerating latin letters: 'z*,y*,…'
Array.from({ length: 2 }, (_, i) => String.fromCharCode(0x61 + 25 - i, 42)).join(',')
}}.{js,mjs,cjs}`,
],
rules: {
'node-core/must-call-assert': 'error',

View File

@ -2,7 +2,7 @@
const common = require('../common');
const { createBrotliDecompress } = require('node:zlib');
const strictEqual = require('node:assert').strictEqual;
const assert = require('node:assert');
const { getDefaultHighWaterMark } = require('stream');
// This tiny HEX string is a 16GB file.
@ -19,5 +19,5 @@ decoder.end(buf);
// to process the data and the buffer is not empty.
setTimeout(common.mustCall(() => {
// There is only one chunk in the buffer
strictEqual(decoder._readableState.buffer.length, getDefaultHighWaterMark() / (16 * 1024));
assert.strictEqual(decoder._readableState.buffer.length, getDefaultHighWaterMark() / (16 * 1024));
}), common.platformTimeout(500));

View File

@ -1,5 +1,5 @@
'use strict';
require('../common');
const common = require('../common');
// This test ensures that zlib throws a RangeError if the final buffer needs to
// be larger than kMaxLength and concatenation fails.
@ -18,9 +18,9 @@ buffer.kMaxLength = oldkMaxLength;
const encoded = Buffer.from('G38A+CXCIrFAIAM=', 'base64');
// Async
zlib.brotliDecompress(encoded, function(err) {
zlib.brotliDecompress(encoded, common.mustCall((err) => {
assert.ok(err instanceof RangeError);
});
}));
// Sync
assert.throws(function() {

View File

@ -47,9 +47,7 @@ const pmmResultBuffers = [];
fs.createReadStream(pmmFileGz)
.pipe(zlib.createGunzip())
.on('error', (err) => {
assert.ifError(err);
})
.on('error', common.mustNotCall())
.on('data', (data) => pmmResultBuffers.push(data))
.on('finish', common.mustCall(() => {
// Result should match original random garbage
@ -61,9 +59,7 @@ fs.createReadStream(pmmFileGz)
const resultBuffers = [];
const unzip = zlib.createGunzip()
.on('error', (err) => {
assert.ifError(err);
})
.on('error', common.mustNotCall())
.on('data', (data) => resultBuffers.push(data))
.on('finish', common.mustCall(() => {
assert.strictEqual(

View File

@ -1,5 +1,5 @@
'use strict';
require('../common');
const common = require('../common');
// This test ensures that zlib throws a RangeError if the final buffer needs to
// be larger than kMaxLength and concatenation fails.
@ -18,9 +18,9 @@ buffer.kMaxLength = oldkMaxLength;
const encoded = Buffer.from('H4sIAAAAAAAAA0tMHFgAAIw2K/GAAAAA', 'base64');
// Async
zlib.gunzip(encoded, function(err) {
zlib.gunzip(encoded, common.mustCall((err) => {
assert.ok(err instanceof RangeError);
});
}));
// Sync
assert.throws(function() {

View File

@ -17,9 +17,7 @@ assert.throws(function() {
}, RangeError);
// Async
zlib.brotliDecompress(encoded, { maxOutputLength: 256 }, function(err) {
assert.strictEqual(err, null);
});
zlib.brotliDecompress(encoded, { maxOutputLength: 256 }, common.mustSucceed());
// Sync
zlib.brotliDecompressSync(encoded, { maxOutputLength: 256 });

View File

@ -21,9 +21,7 @@ for (const fn of [
const output = [];
inflate
.on('error', (err) => {
assert.ifError(err);
})
.on('error', common.mustNotCall())
.on('data', (chunk) => output.push(chunk))
.on('end', common.mustCall(
() => assert.strictEqual(Buffer.concat(output).toString(), 'abc')));

View File

@ -1,7 +1,7 @@
'use strict';
// Tests zlib streams with truncated compressed input
require('../common');
const common = require('../common');
const assert = require('assert');
const zlib = require('zlib');
@ -23,8 +23,7 @@ const errMessage = /unexpected end of file/;
{ comp: 'deflate', decomp: 'inflate', decompSync: 'inflateSync' },
{ comp: 'deflateRaw', decomp: 'inflateRaw', decompSync: 'inflateRawSync' },
].forEach(function(methods) {
zlib[methods.comp](inputString, function(err, compressed) {
assert.ifError(err);
zlib[methods.comp](inputString, common.mustSucceed((compressed) => {
const truncated = compressed.slice(0, compressed.length / 2);
const toUTF8 = (buffer) => buffer.toString('utf-8');
@ -33,10 +32,9 @@ const errMessage = /unexpected end of file/;
assert.strictEqual(toUTF8(decompressed), inputString);
// async sanity
zlib[methods.decomp](compressed, function(err, result) {
assert.ifError(err);
zlib[methods.decomp](compressed, common.mustSucceed((result) => {
assert.strictEqual(toUTF8(result), inputString);
});
}));
// Sync truncated input test
assert.throws(function() {
@ -44,9 +42,9 @@ const errMessage = /unexpected end of file/;
}, errMessage);
// Async truncated input test
zlib[methods.decomp](truncated, function(err, result) {
zlib[methods.decomp](truncated, common.mustCall((err) => {
assert.match(err.message, errMessage);
});
}));
const syncFlushOpt = { finishFlush: zlib.constants.Z_SYNC_FLUSH };
@ -55,10 +53,9 @@ const errMessage = /unexpected end of file/;
assert.strictEqual(result, inputString.slice(0, result.length));
// Async truncated input test, finishFlush = Z_SYNC_FLUSH
zlib[methods.decomp](truncated, syncFlushOpt, function(err, decompressed) {
assert.ifError(err);
zlib[methods.decomp](truncated, syncFlushOpt, common.mustSucceed((decompressed) => {
const result = toUTF8(decompressed);
assert.strictEqual(result, inputString.slice(0, result.length));
});
});
}));
}));
});

View File

@ -1,20 +1,9 @@
'use strict';
require('../common');
const assert = require('assert').strict;
const assert = require('assert');
const test = require('node:test');
const { DecompressionStream } = require('stream/web');
async function expectTypeError(promise) {
let threw = false;
try {
await promise;
} catch (err) {
threw = true;
assert(err instanceof TypeError, `Expected TypeError, got ${err}`);
}
assert(threw, 'Expected promise to reject');
}
test('DecompressStream deflat emits error on trailing data', async () => {
const valid = new Uint8Array([120, 156, 75, 4, 0, 0, 98, 0, 98]); // deflate('a')
const empty = new Uint8Array(1);
@ -22,10 +11,11 @@ test('DecompressStream deflat emits error on trailing data', async () => {
const double = new Uint8Array([...valid, ...valid]);
for (const chunk of [[invalid], [valid, empty], [valid, valid], [valid, double]]) {
await expectTypeError(
await assert.rejects(
Array.fromAsync(
new Blob([chunk]).stream().pipeThrough(new DecompressionStream('deflate'))
)
),
{ name: 'TypeError' },
);
}
});
@ -37,10 +27,11 @@ test('DecompressStream gzip emits error on trailing data', async () => {
const invalid = new Uint8Array([...valid, ...empty]);
const double = new Uint8Array([...valid, ...valid]);
for (const chunk of [[invalid], [valid, empty], [valid, valid], [double]]) {
await expectTypeError(
await assert.rejects(
Array.fromAsync(
new Blob([chunk]).stream().pipeThrough(new DecompressionStream('gzip'))
)
),
{ name: 'TypeError' },
);
}
});

View File

@ -1,38 +1,30 @@
'use strict';
require('../common');
const common = require('../common');
const assert = require('node:assert');
const zlib = require('node:zlib');
const { test } = require('node:test');
test('zlib should unzip one byte chunks', async () => {
const { promise, resolve } = Promise.withResolvers();
const data = Buffer.concat([
zlib.gzipSync('abc'),
zlib.gzipSync('def'),
]);
const data = Buffer.concat([
zlib.gzipSync('abc'),
zlib.gzipSync('def'),
]);
const resultBuffers = [];
const resultBuffers = [];
const unzip = zlib.createUnzip()
.on('error', (err) => {
assert.ifError(err);
})
.on('data', (data) => resultBuffers.push(data))
.on('finish', () => {
const unzipped = Buffer.concat(resultBuffers).toString();
assert.strictEqual(unzipped, 'abcdef',
`'${unzipped}' should match 'abcdef' after zipping ` +
'and unzipping');
resolve();
});
const unzip = zlib.createUnzip()
.on('error', common.mustNotCall())
.on('data', (data) => resultBuffers.push(data))
.on('finish', common.mustCall(() => {
const unzipped = Buffer.concat(resultBuffers).toString();
assert.strictEqual(unzipped, 'abcdef',
`'${unzipped}' should match 'abcdef' after zipping ` +
'and unzipping');
}));
for (let i = 0; i < data.length; i++) {
// Write each single byte individually.
unzip.write(Buffer.from([data[i]]));
}
for (let i = 0; i < data.length; i++) {
// Write each single byte individually.
unzip.write(Buffer.from([data[i]]));
}
unzip.end();
await promise;
});
unzip.end();

View File

@ -21,25 +21,17 @@
'use strict';
require('../common');
const common = require('../common');
const zlib = require('node:zlib');
const assert = require('node:assert');
const { test } = require('node:test');
test('zlib should not allow writing after close', async (t) => {
const { promise, resolve } = Promise.withResolvers();
const closeCallback = t.mock.fn();
zlib.gzip('hello', function() {
const unzip = zlib.createGunzip();
unzip.close(closeCallback);
unzip.write('asd', function(err) {
assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED');
assert.strictEqual(err.name, 'Error');
assert.strictEqual(err.message, 'Cannot call write after a stream was destroyed');
resolve();
});
});
await promise;
assert.strictEqual(closeCallback.mock.callCount(), 1);
});
zlib.gzip('hello', common.mustCall(() => {
const unzip = zlib.createGunzip();
unzip.close(common.mustCall());
unzip.write('asd', common.mustCall((err) => {
assert.strictEqual(err.code, 'ERR_STREAM_DESTROYED');
assert.strictEqual(err.name, 'Error');
assert.strictEqual(err.message, 'Cannot call write after a stream was destroyed');
}));
}));

View File

@ -21,39 +21,33 @@
'use strict';
require('../common');
const common = require('../common');
const assert = require('node:assert');
const zlib = require('node:zlib');
const { test } = require('node:test');
test('zlib should accept writing after flush', async () => {
for (const [ createCompress, createDecompress ] of [
[ zlib.createGzip, zlib.createGunzip ],
[ zlib.createBrotliCompress, zlib.createBrotliDecompress ],
[ zlib.createZstdCompress, zlib.createZstdDecompress ],
]) {
const { promise, resolve, reject } = Promise.withResolvers();
const gzip = createCompress();
const gunz = createDecompress();
for (const [ createCompress, createDecompress ] of [
[ zlib.createGzip, zlib.createGunzip ],
[ zlib.createBrotliCompress, zlib.createBrotliDecompress ],
[ zlib.createZstdCompress, zlib.createZstdDecompress ],
]) {
const gzip = createCompress();
const gunz = createDecompress();
gzip.pipe(gunz);
gzip.pipe(gunz);
let output = '';
const input = 'A line of data\n';
gunz.setEncoding('utf8');
gunz.on('error', reject);
gunz.on('data', (c) => output += c);
gunz.on('end', () => {
assert.strictEqual(output, input);
resolve();
});
let output = '';
const input = 'A line of data\n';
gunz.setEncoding('utf8');
gunz.on('error', common.mustNotCall());
gunz.on('data', (c) => output += c);
gunz.on('end', common.mustCall(() => {
assert.strictEqual(output, input);
}));
// Make sure that flush/write doesn't trigger an assert failure
gzip.flush();
gzip.write(input);
gzip.end();
gunz.read(0);
await promise;
}
});
// Make sure that flush/write doesn't trigger an assert failure
gzip.flush();
gzip.write(input);
gzip.end();
gunz.read(0);
}

View File

@ -21,35 +21,29 @@
'use strict';
require('../common');
const common = require('../common');
const assert = require('node:assert');
const zlib = require('node:zlib');
const { test } = require('node:test');
test('zlib should properly handle zero byte input', async () => {
const compressors = [
[zlib.Gzip, 20],
[zlib.BrotliCompress, 1],
[zlib.ZstdCompress, 9],
];
const compressors = [
[zlib.Gzip, 20],
[zlib.BrotliCompress, 1],
[zlib.ZstdCompress, 9],
];
for (const [Compressor, expected] of compressors) {
const { promise, resolve, reject } = Promise.withResolvers();
const gz = new Compressor();
const emptyBuffer = Buffer.alloc(0);
let received = 0;
gz.on('data', function(c) {
received += c.length;
});
gz.on('error', reject);
gz.on('end', function() {
assert.strictEqual(received, expected,
`${received}, ${expected}, ${Compressor.name}`);
resolve();
});
gz.write(emptyBuffer);
gz.end();
await promise;
}
});
for (const [Compressor, expected] of compressors) {
const gz = new Compressor();
const emptyBuffer = Buffer.alloc(0);
let received = 0;
gz.on('data', function(c) {
received += c.length;
});
gz.on('error', common.mustNotCall());
gz.on('end', common.mustCall(() => {
assert.strictEqual(received, expected,
`${received}, ${expected}, ${Compressor.name}`);
}));
gz.write(emptyBuffer);
gz.end();
}

View File

@ -1,5 +1,5 @@
'use strict';
require('../common');
const common = require('../common');
// This test ensures that zlib throws a RangeError if the final buffer needs to
// be larger than kMaxLength and concatenation fails.
@ -19,9 +19,9 @@ buffer.kMaxLength = oldkMaxLength;
const encoded = Buffer.from('KLUv/SCARQAAEGFhAQA7BVg=', 'base64');
// Async
zlib.zstdDecompress(encoded, function(err) {
zlib.zstdDecompress(encoded, common.mustCall((err) => {
assert.ok(err instanceof RangeError);
});
}));
// Sync
assert.throws(function() {