Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
'use strict'
const isIpfs = require('is-ipfs')
const CID = require('cids')
const { DAGNode } = require('ipld-dag-pb')
const { normalizeCidPath } = require('../../utils')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
const { withTimeoutOption } = require('../../utils')
const Format = {
default: '',
edges: ' -> '
}
module.exports = function ({ ipld, resolve, preload }) {
return withTimeoutOption(async function * refs (ipfsPath, options) { // eslint-disable-line require-await
options = options || {}
if (options.maxDepth === 0) {
return
}
if (options.edges && options.format && options.format !== Format.default) {
'use strict'
const PeerId = require('peer-id')
const { Key, Errors } = require('interface-datastore')
const errcode = require('err-code')
const promisify = require('promisify-es6')
const debug = require('debug')
const log = debug('ipfs:ipns:publisher')
log.error = debug('ipfs:ipns:publisher:error')
const ipns = require('ipns')
const ERR_NOT_FOUND = Errors.notFoundError().code
const defaultRecordLifetime = 60 * 60 * 1000
// IpnsPublisher is capable of publishing and resolving names to the IPFS routing system.
class IpnsPublisher {
constructor (routing, datastore) {
this._routing = routing
this._datastore = datastore
}
// publish record with a eol
async publishWithEOL (privKey, value, lifetime) {
if (!privKey || !privKey.bytes) {
throw errcode(new Error('invalid private key'), 'ERR_INVALID_PRIVATE_KEY')
}
const peerId = await promisify(PeerId.createFromPrivKey)(privKey.bytes)
'use strict'
const CID = require('cids')
const base32 = require('base32.js')
const callbackify = require('callbackify')
const { cidToString } = require('../../../utils/cid')
const log = require('debug')('ipfs:gc')
const { default: Queue } = require('p-queue')
// TODO: Use exported key from root when upgraded to ipfs-mfs@>=13
// https://github.com/ipfs/js-ipfs-mfs/pull/58
const { MFS_ROOT_KEY } = require('ipfs-mfs/src/core/utils/constants')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
// Limit on the number of parallel block remove operations
const BLOCK_RM_CONCURRENCY = 256
// Perform mark and sweep garbage collection
module.exports = function gc (self) {
return callbackify(async () => {
const start = Date.now()
log('Creating set of marked blocks')
const release = await self._gcLock.writeLock()
try {
const [
blockKeys, markedSet
] = await Promise.all([
'use strict'
const CID = require('cids')
const { cidToString } = require('../../../utils/cid')
const log = require('debug')('ipfs:repo:gc')
const { MFS_ROOT_KEY } = require('ipfs-mfs')
const Repo = require('ipfs-repo')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
const { parallelMerge, transform, map } = require('streaming-iterables')
// Limit on the number of parallel block remove operations
const BLOCK_RM_CONCURRENCY = 256
// Perform mark and sweep garbage collection
module.exports = ({ gcLock, pin, pinManager, refs, repo }) => {
return async function * gc () {
const start = Date.now()
log('Creating set of marked blocks')
const release = await gcLock.writeLock()
try {
// Mark all blocks that are being used
const markedSet = await createMarkedSet({ pin, pinManager, refs, repo })
'use strict'
const { DAGNode, DAGLink } = require('ipld-dag-pb')
const CID = require('cids')
const { default: Queue } = require('p-queue')
const { Key } = require('interface-datastore')
const errCode = require('err-code')
const multicodec = require('multicodec')
const dagCborLinks = require('dag-cbor-links')
const debug = require('debug')
const { cidToString } = require('../../../utils/cid')
const createPinSet = require('./pin-set')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
// arbitrary limit to the number of concurrent dag operations
const WALK_DAG_CONCURRENCY_LIMIT = 300
const IS_PINNED_WITH_TYPE_CONCURRENCY_LIMIT = 300
const PIN_DS_KEY = new Key('/local/pins')
function invalidPinTypeErr (type) {
const errMsg = `Invalid type '${type}', must be one of {direct, indirect, recursive, all}`
return errCode(new Error(errMsg), 'ERR_INVALID_PIN_TYPE')
}
const PinTypes = {
direct: 'direct',
recursive: 'recursive',
indirect: 'indirect',
all: 'all'
it('should publish and then fail to resolve if does not find the record', async function () {
const stub = sinon.stub(node._ipns.resolver._routing, 'get').throws(Errors.notFoundError())
await node.name.publish(ipfsRef, { resolve: false })
await expect(node.name.resolve(nodeId, { nocache: true }))
.to.eventually.be.rejected()
.with.property('code', 'ERR_NO_RECORD_FOUND')
stub.restore()
})
'use strict'
const ipns = require('ipns')
const crypto = require('libp2p-crypto')
const PeerId = require('peer-id')
const errcode = require('err-code')
const CID = require('cids')
const debug = require('debug')
const log = debug('ipfs:ipns:resolver')
log.error = debug('ipfs:ipns:resolver:error')
const { Errors } = require('interface-datastore')
const ERR_NOT_FOUND = Errors.notFoundError().code
const defaultMaximumRecursiveDepth = 32
class IpnsResolver {
constructor (routing) {
this._routing = routing
}
async resolve (name, options) {
options = options || {}
if (typeof name !== 'string') {
throw errcode(new Error('invalid name'), 'ERR_INVALID_NAME')
}
options = options || {}