Skip to content

Commit

Permalink
standard: --fix for standard@11
Browse files Browse the repository at this point in the history
  • Loading branch information
zkat committed Apr 9, 2018
1 parent 3817b7a commit 109ff93
Show file tree
Hide file tree
Showing 10 changed files with 112 additions and 112 deletions.
14 changes: 7 additions & 7 deletions get.js
Expand Up @@ -20,8 +20,8 @@ function getData (byDigest, cache, key, opts) {
opts = opts || {}
const memoized = (
byDigest
? memo.get.byDigest(cache, key, opts)
: memo.get(cache, key, opts)
? memo.get.byDigest(cache, key, opts)
: memo.get(cache, key, opts)
)
if (memoized && opts.memoize !== false) {
return BB.resolve(byDigest ? memoized : {
Expand Down Expand Up @@ -180,11 +180,11 @@ function copy (byDigest, cache, key, dest, opts) {
} else {
return getData(byDigest, cache, key, opts).then(res => {
return fs.writeFileAsync(dest, byDigest ? res : res.data)
.then(() => byDigest ? key : {
metadata: res.metadata,
size: res.size,
integrity: res.integrity
})
.then(() => byDigest ? key : {
metadata: res.metadata,
size: res.size,
integrity: res.integrity
})
})
}
}
42 changes: 21 additions & 21 deletions lib/content/read.js
Expand Up @@ -68,17 +68,17 @@ module.exports.hasContent = hasContent
function hasContent (cache, integrity) {
if (!integrity) { return BB.resolve(false) }
return pickContentSri(cache, integrity)
.catch({code: 'ENOENT'}, () => false)
.catch({code: 'EPERM'}, err => {
if (process.platform !== 'win32') {
throw err
} else {
return false
}
}).then(content => {
if (!content.sri) return false
return ({ sri: content.sri, size: content.stat.size })
})
.catch({code: 'ENOENT'}, () => false)
.catch({code: 'EPERM'}, err => {
if (process.platform !== 'win32') {
throw err
} else {
return false
}
}).then(content => {
if (!content.sri) return false
return ({ sri: content.sri, size: content.stat.size })
})
}

module.exports._pickContentSri = pickContentSri
Expand All @@ -95,16 +95,16 @@ function pickContentSri (cache, integrity) {
return BB.any(sri[sri.pickAlgorithm()].map(meta => {
return pickContentSri(cache, meta)
}))
.catch(err => {
if ([].some.call(err, e => e.code === 'ENOENT')) {
throw Object.assign(
new Error('No matching content found for ' + sri.toString()),
{code: 'ENOENT'}
)
} else {
throw err[0]
}
})
.catch(err => {
if ([].some.call(err, e => e.code === 'ENOENT')) {
throw Object.assign(
new Error('No matching content found for ' + sri.toString()),
{code: 'ENOENT'}
)
} else {
throw err[0]
}
})
}
}

Expand Down
10 changes: 5 additions & 5 deletions lib/entry-index.js
Expand Up @@ -197,9 +197,9 @@ function hashEntry (str) {

function hash (str, digest) {
return crypto
.createHash(digest)
.update(str)
.digest('hex')
.createHash(digest)
.update(str)
.digest('hex')
}

function formatEntry (cache, entry) {
Expand All @@ -217,8 +217,8 @@ function formatEntry (cache, entry) {

function readdirOrEmpty (dir) {
return readdirAsync(dir)
.catch({code: 'ENOENT'}, () => [])
.catch({code: 'ENOTDIR'}, () => [])
.catch({code: 'ENOENT'}, () => [])
.catch({code: 'ENOTDIR'}, () => [])
}

function nop () {
Expand Down
36 changes: 18 additions & 18 deletions test/benchmarks/content.read.js
Expand Up @@ -74,17 +74,17 @@ module.exports = (suite, CACHE) => {
fn (deferred) {
if (read.copy) {
read.copy(CACHE, INTEGRITY, path.join(CACHE, 'data'))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
} else {
read(CACHE, INTEGRITY)
.then(data => fs.writeFileAsync(path.join(CACHE, 'data'), data))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(data => fs.writeFileAsync(path.join(CACHE, 'data'), data))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
}
})
Expand All @@ -100,17 +100,17 @@ module.exports = (suite, CACHE) => {
fn (deferred) {
if (read.copy) {
read.copy(CACHE, BIGINTEGRITY, path.join(CACHE, 'bigdata'))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
} else {
read(CACHE, BIGINTEGRITY)
.then(data => fs.writeFileAsync(path.join(CACHE, 'bigdata'), data))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(data => fs.writeFileAsync(path.join(CACHE, 'bigdata'), data))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
}
})
Expand Down
16 changes: 8 additions & 8 deletions test/benchmarks/get.js
Expand Up @@ -108,10 +108,10 @@ module.exports = (suite, CACHE) => {
},
fn (deferred) {
get.copy.byDigest(CACHE, INTEGRITY, path.join(CACHE, 'data'))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
})

Expand All @@ -125,10 +125,10 @@ module.exports = (suite, CACHE) => {
},
fn (deferred) {
get.copy.byDigest(CACHE, BIGINTEGRITY, path.join(CACHE, 'data'))
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
.then(
() => deferred.resolve(),
err => deferred.reject(err)
)
}
})
}
16 changes: 8 additions & 8 deletions test/benchmarks/index.js
Expand Up @@ -13,8 +13,8 @@ const WARN_RANGE = 5
const suite = new Benchmark.Suite({
onStart () {
let previousPath = process.env.COMPARETO
? path.resolve(process.env.COMPARETO)
: PREVIOUS
? path.resolve(process.env.COMPARETO)
: PREVIOUS
try {
this.previous = require(previousPath)
} catch (e) {}
Expand All @@ -29,14 +29,14 @@ const suite = new Benchmark.Suite({
const prev = this.previous && this.previous[bench.name]
const pctDelta = prev && (((bench.stats.mean - prev.stats.mean) / prev.stats.mean) * 100)
let colorDiff = !prev
? ''
: `${pctDelta > 0 ? '+' : ''}${pctDelta.toFixed(2)}% `
? ''
: `${pctDelta > 0 ? '+' : ''}${pctDelta.toFixed(2)}% `
colorDiff = ` (${
pctDelta >= (WARN_RANGE + bench.stats.rme)
? chalk.red(colorDiff)
: pctDelta <= -(WARN_RANGE + bench.stats.rme)
? chalk.green(colorDiff)
: colorDiff
? chalk.red(colorDiff)
: pctDelta <= -(WARN_RANGE + bench.stats.rme)
? chalk.green(colorDiff)
: colorDiff
}±${bench.stats.rme.toFixed(2)}%)`
console.log(` ${bench.name}`)
console.log('------------------------------------------------')
Expand Down
20 changes: 10 additions & 10 deletions test/content.read.js
Expand Up @@ -142,18 +142,18 @@ test('hasContent: returns { sri, size } when a cache file exists', function (t)
fixture.create(CACHE)
return BB.join(
read.hasContent(CACHE, 'sha1-deadbeef')
.then(content => {
t.ok(content.sri, 'returned sri for this content')
t.equal(content.size, 0, 'returned the right size for this content')
}),
.then(content => {
t.ok(content.sri, 'returned sri for this content')
t.equal(content.size, 0, 'returned the right size for this content')
}),
read.hasContent(CACHE, 'sha1-not-there')
.then(content => {
t.equal(content, false, 'returned false for missing content')
}),
.then(content => {
t.equal(content, false, 'returned false for missing content')
}),
read.hasContent(CACHE, 'sha1-not-here sha1-also-not-here')
.then(content => {
t.equal(content, false, 'multi-content hash failures work ok')
})
.then(content => {
t.equal(content, false, 'multi-content hash failures work ok')
})
)
})

Expand Down
2 changes: 1 addition & 1 deletion test/content.write.js
Expand Up @@ -143,7 +143,7 @@ test('does not overwrite content if already on disk', t => {

test('errors if input stream errors', t => {
const stream = fromString('foobarbaz')
.on('end', () => stream.emit('error', new Error('bleh')))
.on('end', () => stream.emit('error', new Error('bleh')))
let integrity
const putter = write.stream(CACHE).on('integrity', int => {
integrity = int
Expand Down
38 changes: 19 additions & 19 deletions test/get.js
Expand Up @@ -114,25 +114,25 @@ test('get.copy', t => {
}))
fixture.create(CACHE)
return index.insert(CACHE, KEY, INTEGRITY, opts())
.then(() => get.copy(CACHE, KEY, DEST))
.then(res => {
t.deepEqual(res, {
metadata: METADATA,
integrity: INTEGRITY,
size: SIZE
}, 'copy operation returns basic metadata')
return fs.readFileAsync(DEST)
})
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by key matches')
return rimraf(DEST)
})
.then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST))
.then(() => fs.readFileAsync(DEST))
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by digest matches')
return rimraf(DEST)
})
.then(() => get.copy(CACHE, KEY, DEST))
.then(res => {
t.deepEqual(res, {
metadata: METADATA,
integrity: INTEGRITY,
size: SIZE
}, 'copy operation returns basic metadata')
return fs.readFileAsync(DEST)
})
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by key matches')
return rimraf(DEST)
})
.then(() => get.copy.byDigest(CACHE, INTEGRITY, DEST))
.then(() => fs.readFileAsync(DEST))
.then(data => {
t.deepEqual(data, CONTENT, 'data copied by digest matches')
return rimraf(DEST)
})
})

test('ENOENT if not found', t => {
Expand Down
30 changes: 15 additions & 15 deletions test/ls.js
Expand Up @@ -149,21 +149,21 @@ test('correctly ignores deleted entries', t => {
CACHE, contents.whatwhere.integrity)
fixture.create(CACHE)
return index.delete(CACHE, 'whatnot')
.then(() => ls(CACHE))
.then(listing => t.deepEqual(listing, {
whatever: contents.whatever,
whatwhere: contents.whatwhere
}, 'index contents correct'))
.then(() => {
const listing = []
const stream = ls.stream(CACHE)
stream.on('data', entry => {
listing[entry.key] = entry
})
return finished(stream)
.then(() => t.deepEqual(listing, {
.then(() => ls(CACHE))
.then(listing => t.deepEqual(listing, {
whatever: contents.whatever,
whatwhere: contents.whatwhere
}, 'ls is streamable'))
})
}, 'index contents correct'))
.then(() => {
const listing = []
const stream = ls.stream(CACHE)
stream.on('data', entry => {
listing[entry.key] = entry
})
return finished(stream)
.then(() => t.deepEqual(listing, {
whatever: contents.whatever,
whatwhere: contents.whatwhere
}, 'ls is streamable'))
})
})

0 comments on commit 109ff93

Please sign in to comment.