Skip to content

Commit

Permalink
feat: remove sync methods (#140)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: this package is now async only, all synchronous methods have been removed
  • Loading branch information
nlf committed Oct 12, 2022
1 parent 295b5b1 commit f57bb4d
Show file tree
Hide file tree
Showing 12 changed files with 56 additions and 750 deletions.
2 changes: 1 addition & 1 deletion README.md
Expand Up @@ -601,7 +601,7 @@ See: [options](#tmp-options)

```javascript
cacache.tmp.withTmp(cache, dir => {
return fs.writeFileAsync(path.join(dir, 'blablabla'), Buffer#<1234>, ...)
return fs.writeFile(path.join(dir, 'blablabla'), 'blabla contents', { encoding: 'utf8' })
}).then(() => {
// `dir` no longer exists
})
Expand Down
75 changes: 0 additions & 75 deletions lib/content/read.js
Expand Up @@ -46,24 +46,6 @@ const readPipeline = (cpath, size, sri, stream) => {
return stream
}

module.exports.sync = readSync

function readSync (cache, integrity, opts = {}) {
const { size } = opts
return withContentSriSync(cache, integrity, (cpath, sri) => {
const data = fs.readFileSync(cpath, { encoding: null })
if (typeof size === 'number' && size !== data.length) {
throw sizeError(size, data.length)
}

if (ssri.checkData(data, sri)) {
return data
}

throw integrityError(sri, cpath)
})
}

module.exports.stream = readStream
module.exports.readStream = readStream

Expand All @@ -88,20 +70,13 @@ function readStream (cache, integrity, opts = {}) {
}

module.exports.copy = copy
module.exports.copy.sync = copySync

function copy (cache, integrity, dest) {
return withContentSri(cache, integrity, (cpath, sri) => {
return fs.copyFile(cpath, dest)
})
}

function copySync (cache, integrity, dest) {
return withContentSriSync(cache, integrity, (cpath, sri) => {
return fs.copyFileSync(cpath, dest)
})
}

module.exports.hasContent = hasContent

async function hasContent (cache, integrity) {
Expand Down Expand Up @@ -130,34 +105,6 @@ async function hasContent (cache, integrity) {
}
}

module.exports.hasContent.sync = hasContentSync

function hasContentSync (cache, integrity) {
if (!integrity) {
return false
}

return withContentSriSync(cache, integrity, (cpath, sri) => {
try {
const stat = fs.statSync(cpath)
return { size: stat.size, sri, stat }
} catch (err) {
if (err.code === 'ENOENT') {
return false
}

if (err.code === 'EPERM') {
/* istanbul ignore else */
if (process.platform !== 'win32') {
throw err
} else {
return false
}
}
}
})
}

async function withContentSri (cache, integrity, fn) {
const sri = ssri.parse(integrity)
// If `integrity` has multiple entries, pick the first digest
Expand Down Expand Up @@ -201,28 +148,6 @@ async function withContentSri (cache, integrity, fn) {
}
}

function withContentSriSync (cache, integrity, fn) {
const sri = ssri.parse(integrity)
// If `integrity` has multiple entries, pick the first digest
// with available local data.
const algo = sri.pickAlgorithm()
const digests = sri[algo]
if (digests.length <= 1) {
const cpath = contentPath(cache, digests[0])
return fn(cpath, digests[0])
} else {
let lastErr = null
for (const meta of digests) {
try {
return withContentSriSync(cache, meta, fn)
} catch (err) {
lastErr = err
}
}
throw lastErr
}
}

function sizeError (expected, found) {
/* eslint-disable-next-line max-len */
const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
Expand Down
67 changes: 2 additions & 65 deletions lib/entry-index.js
Expand Up @@ -15,7 +15,6 @@ const indexV = require('../package.json')['cache-version'].index
const moveFile = require('@npmcli/move-file')
const _rimraf = require('rimraf')
const rimraf = util.promisify(_rimraf)
rimraf.sync = _rimraf.sync

module.exports.NotFoundError = class NotFoundError extends Error {
constructor (cache, key) {
Expand Down Expand Up @@ -151,31 +150,6 @@ async function insert (cache, key, integrity, opts = {}) {
return formatEntry(cache, entry)
}

module.exports.insert.sync = insertSync

function insertSync (cache, key, integrity, opts = {}) {
const { metadata, size } = opts
const bucket = bucketPath(cache, key)
const entry = {
key,
integrity: integrity && ssri.stringify(integrity),
time: Date.now(),
size,
metadata,
}
fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
const stringified = JSON.stringify(entry)
fs.appendFileSync(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
try {
fixOwner.chownr.sync(cache, bucket)
} catch (err) {
if (err.code !== 'ENOENT') {
throw err
}
}
return formatEntry(cache, entry)
}

module.exports.find = find

async function find (cache, key) {
Expand All @@ -198,27 +172,6 @@ async function find (cache, key) {
}
}

module.exports.find.sync = findSync

function findSync (cache, key) {
const bucket = bucketPath(cache, key)
try {
return bucketEntriesSync(bucket).reduce((latest, next) => {
if (next && next.key === key) {
return formatEntry(cache, next)
} else {
return latest
}
}, null)
} catch (err) {
if (err.code === 'ENOENT') {
return null
} else {
throw err
}
}
}

module.exports.delete = del

function del (cache, key, opts = {}) {
Expand All @@ -230,17 +183,6 @@ function del (cache, key, opts = {}) {
return rimraf(bucket)
}

module.exports.delete.sync = delSync

function delSync (cache, key, opts = {}) {
if (!opts.removeFully) {
return insertSync(cache, key, null, opts)
}

const bucket = bucketPath(cache, key)
return rimraf.sync(bucket)
}

module.exports.lsStream = lsStream

function lsStream (cache) {
Expand Down Expand Up @@ -308,13 +250,6 @@ async function bucketEntries (bucket, filter) {
return _bucketEntries(data, filter)
}

module.exports.bucketEntries.sync = bucketEntriesSync

function bucketEntriesSync (bucket, filter) {
const data = fs.readFileSync(bucket, 'utf8')
return _bucketEntries(data, filter)
}

function _bucketEntries (data, filter) {
const entries = []
data.split('\n').forEach((entry) => {
Expand All @@ -335,6 +270,8 @@ function _bucketEntries (data, filter) {
// Entry is corrupted!
return
}
// coverage disabled here, no need to test with an entry that parses to something falsey
// istanbul ignore else
if (obj) {
entries.push(obj)
}
Expand Down
55 changes: 0 additions & 55 deletions lib/get.js
Expand Up @@ -53,61 +53,6 @@ async function getDataByDigest (cache, key, opts = {}) {
}
module.exports.byDigest = getDataByDigest

function getDataSync (cache, key, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get(cache, key, opts)

if (memoized && memoize !== false) {
return {
metadata: memoized.entry.metadata,
data: memoized.data,
integrity: memoized.entry.integrity,
size: memoized.entry.size,
}
}
const entry = index.find.sync(cache, key, opts)
if (!entry) {
throw new index.NotFoundError(cache, key)
}
const data = read.sync(cache, entry.integrity, {
integrity: integrity,
size: size,
})
const res = {
metadata: entry.metadata,
data: data,
size: entry.size,
integrity: entry.integrity,
}
if (memoize) {
memo.put(cache, entry, res.data, opts)
}

return res
}

module.exports.sync = getDataSync

function getDataByDigestSync (cache, digest, opts = {}) {
const { integrity, memoize, size } = opts
const memoized = memo.get.byDigest(cache, digest, opts)

if (memoized && memoize !== false) {
return memoized
}

const res = read.sync(cache, digest, {
integrity: integrity,
size: size,
})
if (memoize) {
memo.put.byDigest(cache, digest, res, opts)
}

return res
}
module.exports.sync.byDigest = getDataByDigestSync

const getMemoizedStream = (memoized) => {
const stream = new Minipass()
stream.on('newListener', function (ev, cb) {
Expand Down
3 changes: 0 additions & 3 deletions lib/index.js
Expand Up @@ -17,15 +17,12 @@ module.exports.ls.stream = index.lsStream

module.exports.get = get
module.exports.get.byDigest = get.byDigest
module.exports.get.sync = get.sync
module.exports.get.sync.byDigest = get.sync.byDigest
module.exports.get.stream = get.stream
module.exports.get.stream.byDigest = get.stream.byDigest
module.exports.get.copy = get.copy
module.exports.get.copy.byDigest = get.copy.byDigest
module.exports.get.info = get.info
module.exports.get.hasContent = get.hasContent
module.exports.get.hasContent.sync = get.hasContent.sync

module.exports.put = put
module.exports.put.stream = put.stream
Expand Down
54 changes: 0 additions & 54 deletions lib/util/fix-owner.js
Expand Up @@ -67,40 +67,6 @@ async function fixOwner (cache, filepath) {
)
}

module.exports.chownr.sync = fixOwnerSync

function fixOwnerSync (cache, filepath) {
if (!process.getuid) {
// This platform doesn't need ownership fixing
return
}
const { uid, gid } = inferOwner.sync(cache)
getSelf()
if (self.uid !== 0) {
// almost certainly can't chown anyway
return
}

if (self.uid === uid && self.gid === gid) {
// No need to override if it's already what we used.
return
}
try {
chownr.sync(
filepath,
typeof uid === 'number' ? uid : self.uid,
typeof gid === 'number' ? gid : self.gid
)
} catch (err) {
// only catch ENOENT, any other error is a problem.
if (err.code === 'ENOENT') {
return null
}

throw err
}
}

module.exports.mkdirfix = mkdirfix

async function mkdirfix (cache, p, cb) {
Expand All @@ -123,23 +89,3 @@ async function mkdirfix (cache, p, cb) {
throw err
}
}

module.exports.mkdirfix.sync = mkdirfixSync

function mkdirfixSync (cache, p) {
try {
inferOwner.sync(cache)
const made = mkdirp.sync(p)
if (made) {
fixOwnerSync(cache, made)
return made
}
} catch (err) {
if (err.code === 'EEXIST') {
fixOwnerSync(cache, p)
return null
} else {
throw err
}
}
}
9 changes: 3 additions & 6 deletions lib/verify.js
Expand Up @@ -239,14 +239,11 @@ function cleanTmp (cache, opts) {
return rimraf(path.join(cache, 'tmp'))
}

function writeVerifile (cache, opts) {
async function writeVerifile (cache, opts) {
const verifile = path.join(cache, '_lastverified')
opts.log.silly('verify', 'writing verifile to ' + verifile)
try {
return fs.writeFile(verifile, `${Date.now()}`)
} finally {
fixOwner.chownr.sync(cache, verifile)
}
await fs.writeFile(verifile, `${Date.now()}`)
return fixOwner.chownr(cache, verifile)
}

module.exports.lastRun = lastRun
Expand Down

0 comments on commit f57bb4d

Please sign in to comment.