Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
child_process: close pipe ends that are re-piped
when t0 and t1 are spawned with t0's outputstream [1, 2] is piped into
t1's input, a new pipe is created which uses a copy of the t0's fd.
This leaves the original copy in Node parent, unattended. Net result is
that when t0 produces data, it gets bifurcated into both the copies

Detect the passed handle to be of 'wrap' type and close after the
native spawn invocation by which time piping would have been over.

Fixes: #9413
Fixes: #18016

PR-URL: #21209
Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Reviewed-By: James M Snell <jasnell@gmail.com>
Reviewed-By: Anna Henningsen <anna@addaleax.net>
  • Loading branch information
gireeshpunathil committed Feb 6, 2019
commit b1f82e4342f8a630b1ef83cd33781a725428f569
6 changes: 6 additions & 0 deletions lib/internal/child_process.js
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,12 @@ ChildProcess.prototype.spawn = function(options) {
continue;
}

// stream is already cloned and piped, so close
if (stream.type === 'wrap') {
stream.handle.close();
continue;
}

if (stream.handle) {
// when i === 0 - we're dealing with stdin
// (which is the only one writable pipe)
Expand Down
51 changes: 51 additions & 0 deletions test/parallel/test-child-process-pipe-dataflow.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
'use strict';
const common = require('../common');
const assert = require('assert');
const path = require('path');
const fs = require('fs');
const spawn = require('child_process').spawn;
const tmpdir = require('../common/tmpdir');

let cat, grep, wc;

const KB = 1024;
const MB = KB * KB;


// Make sure process chaining allows desired data flow:
// check cat <file> | grep 'x' | wc -c === 1MB
// This helps to make sure no data is lost between pipes.

{
tmpdir.refresh();
const file = path.resolve(tmpdir.path, 'data.txt');
const buf = Buffer.alloc(MB).fill('x');

// Most OS commands that deal with data, attach special
// meanings to new line - for example, line buffering.
// So cut the buffer into lines at some points, forcing
// data flow to be split in the stream.
for (let i = 0; i < KB; i++)
buf[i * KB] = 10;
fs.writeFileSync(file, buf.toString());

cat = spawn('cat', [file]);
grep = spawn('grep', ['x'], { stdio: [cat.stdout, 'pipe', 'pipe'] });
wc = spawn('wc', ['-c'], { stdio: [grep.stdout, 'pipe', 'pipe'] });

wc.stdout.on('data', common.mustCall(function(data) {
assert.strictEqual(data.toString().trim(), MB.toString());
}));

cat.on('exit', common.mustCall(function(code) {
assert.strictEqual(code, 0);
}));

grep.on('exit', common.mustCall(function(code) {
assert.strictEqual(code, 0);
}));

wc.on('exit', common.mustCall(function(code) {
assert.strictEqual(code, 0);
}));
}