Skip to content

Commit b4b4c11

Browse files
gireeshpunathilMylesBorins
authored andcommittedMay 16, 2019
test: relax chunk count expectations
In parallel/test-fs-read-stream-concurrent-reads.js the number of data chunks used is being tested when few concurrent reads are performed. The number of chunks can fluctuate based on the number of concurrent reads as well as the data that was read in one shot. Accommodate these variations in the test. Fixes: #22339 PR-URL: #25415 Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Luigi Pinca <luigipinca@gmail.com>
1 parent 12fe2d3 commit b4b4c11

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed
 

‎test/parallel/test-fs-read-stream-concurrent-reads.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ const fs = require('fs');
1313
const filename = fixtures.path('loop.js'); // Some small non-homogeneous file.
1414
const content = fs.readFileSync(filename);
1515

16-
const N = 1000;
16+
const N = 2000;
1717
let started = 0;
1818
let done = 0;
1919

@@ -26,10 +26,10 @@ function startRead() {
2626
.on('data', (chunk) => {
2727
chunks.push(chunk);
2828
arrayBuffers.add(chunk.buffer);
29-
if (started < N)
30-
startRead();
3129
})
3230
.on('end', common.mustCall(() => {
31+
if (started < N)
32+
startRead();
3333
assert.deepStrictEqual(Buffer.concat(chunks), content);
3434
if (++done === N) {
3535
const retainedMemory =
@@ -43,5 +43,5 @@ function startRead() {
4343

4444
// Don’t start the reads all at once – that way we would have to allocate
4545
// a large amount of memory upfront.
46-
for (let i = 0; i < 4; ++i)
46+
for (let i = 0; i < 6; ++i)
4747
startRead();

0 commit comments

Comments
 (0)