Skip to content

Commit

Permalink
prevent extraction in excessively deep subfolders
Browse files Browse the repository at this point in the history
This sets the limit at 1024 subfolders nesting by default, but that can
be dropped down, or set to Infinity to remove the limitation.
  • Loading branch information
isaacs committed Mar 16, 2024
1 parent fe7ebfd commit ffe6824
Show file tree
Hide file tree
Showing 5 changed files with 94 additions and 6 deletions.
10 changes: 10 additions & 0 deletions README.md
Expand Up @@ -115,6 +115,8 @@ Handlers receive 3 arguments:
encountered an error which prevented it from being unpacked. This occurs
when:
- an unrecoverable fs error happens during unpacking,
- an entry is trying to extract into an excessively deep
location (by default, limited to 1024 subfolders),
- an entry has `..` in the path and `preservePaths` is not set, or
- an entry is extracting through a symbolic link, when `preservePaths` is
not set.
Expand Down Expand Up @@ -427,6 +429,10 @@ The following options are supported:
`process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply
normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.

The following options are mostly internal, but can be modified in some
advanced use cases, such as re-using caches between runs.
Expand Down Expand Up @@ -749,6 +755,10 @@ Most unpack errors will cause a `warn` event to be emitted. If the
`process.umask()` to determine the default umask value, since tar will
extract with whatever mode is provided, and let the process `umask` apply
normally.
- `maxDepth` The maximum depth of subfolders to extract into. This
defaults to 1024. Anything deeper than the limit will raise a
warning and skip the entry. Set to `Infinity` to remove the
limitation.

### class tar.Unpack.Sync

Expand Down
27 changes: 22 additions & 5 deletions lib/unpack.js
Expand Up @@ -48,6 +48,7 @@ const crypto = require('crypto')
const getFlag = require('./get-write-flag.js')
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
const DEFAULT_MAX_DEPTH = 1024

// Unlinks on Windows are not atomic.
//
Expand Down Expand Up @@ -181,6 +182,12 @@ class Unpack extends Parser {
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null

// prevent excessively deep nesting of subfolders
// set to `Infinity` to remove this restriction
this.maxDepth = typeof opt.maxDepth === 'number'
? opt.maxDepth
: DEFAULT_MAX_DEPTH

// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
Expand Down Expand Up @@ -238,13 +245,13 @@ class Unpack extends Parser {
}

[CHECKPATH] (entry) {
const p = normPath(entry.path)
const parts = p.split('/')

if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip) {
return false
}
entry.path = parts.slice(this.strip).join('/')

if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip) {
Expand All @@ -253,11 +260,21 @@ class Unpack extends Parser {
return false
}
}
parts.splice(0, this.strip)
entry.path = parts.join('/')
}

if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
entry,
path: p,
depth: parts.length,
maxDepth: this.maxDepth,
})
return false
}

if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
Expand Down
Binary file added test/fixtures/excessively-deep.tar
Binary file not shown.
2 changes: 1 addition & 1 deletion test/parse.js
Expand Up @@ -646,7 +646,7 @@ t.test('truncated gzip input', t => {
p.write(tgz.slice(split))
p.end()
t.equal(aborted, true, 'aborted writing')
t.same(warnings, ['zlib: incorrect data check'])
t.match(warnings, [/^zlib: /])
t.end()
})

Expand Down
61 changes: 61 additions & 0 deletions test/unpack.js
Expand Up @@ -22,6 +22,7 @@ const mkdirp = require('mkdirp')
const mutateFS = require('mutate-fs')
const eos = require('end-of-stream')
const normPath = require('../lib/normalize-windows-path.js')
const ReadEntry = require('../lib/read-entry.js')

// On Windows in particular, the "really deep folder path" file
// often tends to cause problems, which don't indicate a failure
Expand Down Expand Up @@ -3235,3 +3236,63 @@ t.test('recognize C:.. as a dot path part', t => {

t.end()
})

t.test('excessively deep subfolder nesting', async t => {
const tf = path.resolve(fixtures, 'excessively-deep.tar')
const data = fs.readFileSync(tf)
const warnings = []
const onwarn = (c, w, { entry, path, depth, maxDepth }) =>
warnings.push([c, w, { entry, path, depth, maxDepth }])

const check = (t, maxDepth = 1024) => {
t.match(warnings, [
['TAR_ENTRY_ERROR',
'path excessively deep',
{
entry: ReadEntry,
path: /^\.(\/a){1024,}\/foo.txt$/,
depth: 222372,
maxDepth,
}
]
])
warnings.length = 0
t.end()
}

t.test('async', t => {
const cwd = t.testdir()
new Unpack({
cwd,
onwarn
}).on('end', () => check(t)).end(data)
})

t.test('sync', t => {
const cwd = t.testdir()
new UnpackSync({
cwd,
onwarn
}).end(data)
check(t)
})

t.test('async set md', t => {
const cwd = t.testdir()
new Unpack({
cwd,
onwarn,
maxDepth: 64,
}).on('end', () => check(t, 64)).end(data)
})

t.test('sync set md', t => {
const cwd = t.testdir()
new UnpackSync({
cwd,
onwarn,
maxDepth: 64,
}).end(data)
check(t, 64)
})
})

0 comments on commit ffe6824

Please sign in to comment.