Skip to content

Commit 84acbd3

Browse files
committed
fix(unpack): fix hang on large file on open() fail
The fs-minipass WriteStream always returns false from a write() call if it did not successfully open the file descriptor. This makes the Parse/Unpack stream get backed up if the file was large enough that it did not already move on to the next entry in the archive. Fix: #264
1 parent 97c46fc commit 84acbd3

File tree

3 files changed

+56
-6
lines changed

3 files changed

+56
-6
lines changed

lib/unpack.js

+4-1
Original file line numberDiff line numberDiff line change
@@ -338,6 +338,10 @@ class Unpack extends Parser {
338338
stream.on('error', er => {
339339
if (stream.fd)
340340
fs.close(stream.fd, () => {})
341+
// flush all the data out so that we aren't left hanging
342+
// if the error wasn't actually fatal. otherwise the parse
343+
// is blocked, and we never proceed.
344+
stream.write = () => true
341345
this[ONERROR](er, entry)
342346
fullyDone()
343347
})
@@ -510,7 +514,6 @@ class Unpack extends Parser {
510514
done()
511515
} else if (er || this[ISREUSABLE](entry, st))
512516
this[MAKEFS](null, entry, done)
513-
514517
else if (st.isDirectory()) {
515518
if (entry.type === 'Directory') {
516519
if (!this.noChmod && (!entry.mode || (st.mode & 0o7777) === entry.mode))

test/make-tar.js

+13-5
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,22 @@ if (module === require.main)
44
return require('tap').pass('this is fine')
55

66
const Header = require('../lib/header.js')
7-
module.exports = chunks =>
8-
Buffer.concat(chunks.map(chunk => {
9-
if (Buffer.isBuffer(chunk))
7+
module.exports = chunks => {
8+
let dataLen = 0
9+
return Buffer.concat(chunks.map(chunk => {
10+
if (Buffer.isBuffer(chunk)) {
11+
dataLen += chunk.length
1012
return chunk
11-
const buf = Buffer.alloc(512)
13+
}
14+
const size = Math.max(typeof chunk === 'string'
15+
? 512 * Math.ceil(chunk.length / 512)
16+
: 512)
17+
dataLen += size
18+
const buf = Buffer.alloc(size)
1219
if (typeof chunk === 'string')
1320
buf.write(chunk)
1421
else
1522
new Header(chunk).encode(buf, 0)
1623
return buf
17-
}), chunks.length * 512)
24+
}), dataLen)
25+
}

test/unpack.js

+39
Original file line numberDiff line numberDiff line change
@@ -2880,3 +2880,42 @@ t.test('close fd when error setting mtime', t => {
28802880
})
28812881
unpack.end(data)
28822882
})
2883+
2884+
t.test('do not hang on large files that fail to open()', t => {
2885+
const data = makeTar([
2886+
{
2887+
type: 'Directory',
2888+
path: 'x',
2889+
},
2890+
{
2891+
type: 'File',
2892+
size: 31745,
2893+
path: 'x/y',
2894+
},
2895+
'x'.repeat(31745),
2896+
'',
2897+
'',
2898+
])
2899+
t.teardown(mutateFS.fail('open', new Error('nope')))
2900+
const dir = path.resolve(unpackdir, 'no-hang-for-large-file-failures')
2901+
mkdirp.sync(dir)
2902+
const WARNINGS = []
2903+
const unpack = new Unpack({
2904+
cwd: dir,
2905+
onwarn: (code, msg) => WARNINGS.push([code, msg]),
2906+
})
2907+
unpack.on('end', () => {
2908+
t.strictSame(WARNINGS, [['TAR_ENTRY_ERROR', 'nope']])
2909+
t.end()
2910+
})
2911+
unpack.write(data.slice(0, 2048))
2912+
setTimeout(() => {
2913+
unpack.write(data.slice(2048, 4096))
2914+
setTimeout(() => {
2915+
unpack.write(data.slice(4096))
2916+
setTimeout(() => {
2917+
unpack.end()
2918+
})
2919+
})
2920+
})
2921+
})

0 commit comments

Comments
 (0)