Skip to content

Commit

Permalink
fix(unpack): always resume parsing after an entry error
Browse files Browse the repository at this point in the history
Also, close the fd, no matter what happens.

Fix: #265
Fix: #276
  • Loading branch information
isaacs committed Aug 3, 2021
1 parent 488ab8c commit 97c46fc
Show file tree
Hide file tree
Showing 2 changed files with 215 additions and 12 deletions.
48 changes: 36 additions & 12 deletions lib/unpack.js
Expand Up @@ -335,17 +335,31 @@ class Unpack extends Parser {
mode: mode,
autoClose: false,
})
stream.on('error', er => this[ONERROR](er, entry))
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
this[ONERROR](er, entry)
fullyDone()
})

let actions = 1
const done = er => {
if (er)
return this[ONERROR](er, entry)
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
this[ONERROR](er, entry)
fullyDone()
return
}

if (--actions === 0) {
fs.close(stream.fd, er => {
if (er)
this[ONERROR](er, entry)
else
this[UNPEND]()
fullyDone()
er ? this[ONERROR](er, entry) : this[UNPEND]()
})
}
}
Expand Down Expand Up @@ -380,7 +394,10 @@ class Unpack extends Parser {

const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
Expand All @@ -390,8 +407,9 @@ class Unpack extends Parser {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return this[ONERROR](er, entry)
return
}

let actions = 1
Expand Down Expand Up @@ -482,8 +500,9 @@ class Unpack extends Parser {

this[MKDIR](path.dirname(entry.absolute), this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return this[ONERROR](er, entry)
return
}
fs.lstat(entry.absolute, (er, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
Expand All @@ -509,8 +528,11 @@ class Unpack extends Parser {
}

[MAKEFS] (er, entry, done) {
if (er)
return this[ONERROR](er, entry)
if (er) {
this[ONERROR](er, entry)
done()
return
}

switch (entry.type) {
case 'File':
Expand All @@ -534,10 +556,12 @@ class Unpack extends Parser {
// XXX: get the type ('file' or 'dir') for windows
fs[link](linkpath, entry.absolute, er => {
if (er)
return this[ONERROR](er, entry)
this[ONERROR](er, entry)
else {
this[UNPEND]()
entry.resume()
}
done()
this[UNPEND]()
entry.resume()
})
}
}
Expand Down
179 changes: 179 additions & 0 deletions test/unpack.js
Expand Up @@ -2701,3 +2701,182 @@ t.test('using strip option when top level file exists', t => {
check(t, path)
})
})

t.test('handle EPERMs when creating symlinks', t => {
// https://github.com/npm/node-tar/issues/265
const msg = 'You do not have sufficient privilege to perform this operation.'
const er = Object.assign(new Error(msg), {
code: 'EPERM',
})
t.teardown(mutateFS.fail('symlink', er))
const data = makeTar([
{
path: 'x',
type: 'Directory',
},
{
path: 'x/y',
type: 'File',
size: 'hello, world'.length,
},
'hello, world',
{
path: 'x/link1',
type: 'SymbolicLink',
linkpath: './y',
},
{
path: 'x/link2',
type: 'SymbolicLink',
linkpath: './y',
},
{
path: 'x/link3',
type: 'SymbolicLink',
linkpath: './y',
},
{
path: 'x/z',
type: 'File',
size: 'hello, world'.length,
},
'hello, world',
'',
'',
])

const dir = path.resolve(unpackdir, 'eperm-symlinks')
mkdirp.sync(`${dir}/sync`)
mkdirp.sync(`${dir}/async`)

const check = path => {
t.match(WARNINGS, [
['TAR_ENTRY_ERROR', msg],
['TAR_ENTRY_ERROR', msg],
['TAR_ENTRY_ERROR', msg],
], 'got expected warnings')
t.equal(WARNINGS.length, 3)
WARNINGS.length = 0
t.equal(fs.readFileSync(`${path}/x/y`, 'utf8'), 'hello, world')
t.equal(fs.readFileSync(`${path}/x/z`, 'utf8'), 'hello, world')
t.throws(() => fs.statSync(`${path}/x/link1`), { code: 'ENOENT' })
t.throws(() => fs.statSync(`${path}/x/link2`), { code: 'ENOENT' })
t.throws(() => fs.statSync(`${path}/x/link3`), { code: 'ENOENT' })
}

const WARNINGS = []
const u = new Unpack({
cwd: `${dir}/async`,
onwarn: (code, msg, er) => WARNINGS.push([code, msg]),
})
u.on('end', () => {
check(`${dir}/async`)
const u = new UnpackSync({
cwd: `${dir}/sync`,
onwarn: (code, msg, er) => WARNINGS.push([code, msg]),
})
u.end(data)
check(`${dir}/sync`)
t.end()
})
u.end(data)
})

t.test('close fd when error writing', t => {
const data = makeTar([
{
type: 'Directory',
path: 'x',
},
{
type: 'File',
size: 1,
path: 'x/y',
},
'.',
'',
'',
])
t.teardown(mutateFS.fail('write', new Error('nope')))
const CLOSES = []
const OPENS = {}
const {open} = require('fs')
t.teardown(() => fs.open = open)
fs.open = (...args) => {
const cb = args.pop()
args.push((er, fd) => {
OPENS[args[0]] = fd
cb(er, fd)
})
return open.call(fs, ...args)
}
t.teardown(mutateFS.mutateArgs('close', ([fd]) => {
CLOSES.push(fd)
return [fd]
}))
const WARNINGS = []
const dir = path.resolve(unpackdir, 'close-on-write-error')
mkdirp.sync(dir)
const unpack = new Unpack({
cwd: dir,
onwarn: (code, msg) => WARNINGS.push([code, msg]),
})
unpack.on('end', () => {
for (const [path, fd] of Object.entries(OPENS))
t.equal(CLOSES.includes(fd), true, 'closed fd for ' + path)
t.end()
})
unpack.end(data)
})

t.test('close fd when error setting mtime', t => {
const data = makeTar([
{
type: 'Directory',
path: 'x',
},
{
type: 'File',
size: 1,
path: 'x/y',
atime: new Date('1979-07-01T19:10:00.000Z'),
ctime: new Date('2011-03-27T22:16:31.000Z'),
mtime: new Date('2011-03-27T22:16:31.000Z'),
},
'.',
'',
'',
])
// have to clobber these both, because we fall back
t.teardown(mutateFS.fail('futimes', new Error('nope')))
t.teardown(mutateFS.fail('utimes', new Error('nooooope')))
const CLOSES = []
const OPENS = {}
const {open} = require('fs')
t.teardown(() => fs.open = open)
fs.open = (...args) => {
const cb = args.pop()
args.push((er, fd) => {
OPENS[args[0]] = fd
cb(er, fd)
})
return open.call(fs, ...args)
}
t.teardown(mutateFS.mutateArgs('close', ([fd]) => {
CLOSES.push(fd)
return [fd]
}))
const WARNINGS = []
const dir = path.resolve(unpackdir, 'close-on-futimes-error')
mkdirp.sync(dir)
const unpack = new Unpack({
cwd: dir,
onwarn: (code, msg) => WARNINGS.push([code, msg]),
})
unpack.on('end', () => {
for (const [path, fd] of Object.entries(OPENS))
t.equal(CLOSES.includes(fd), true, 'closed fd for ' + path)
t.end()
})
unpack.end(data)
})

0 comments on commit 97c46fc

Please sign in to comment.