Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit f4de206

Browse files
vmxdaviddias
authored andcommittedFeb 27, 2018
fix: use "ipld" instead of "ipld-resolver"
The "ipld-resolver" has been renamed to just "ipld".
1 parent 3d6c9b1 commit f4de206

20 files changed

+114
-114
lines changed
 

‎package.json

+1-1
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,8 @@
6060
"cids": "~0.5.2",
6161
"deep-extend": "~0.5.0",
6262
"ipfs-unixfs": "~0.1.14",
63+
"ipld": "^0.15.0",
6364
"ipld-dag-pb": "~0.13.1",
64-
"ipld-resolver": "~0.14.1",
6565
"left-pad": "^1.2.0",
6666
"lodash": "^4.17.5",
6767
"multihashes": "~0.4.13",

‎src/builder/builder.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ const defaultOptions = {
1919
}
2020
}
2121

22-
module.exports = function (createChunker, ipldResolver, createReducer, _options) {
22+
module.exports = function (createChunker, ipld, createReducer, _options) {
2323
const options = extend({}, defaultOptions, _options)
2424

2525
return function (source) {
@@ -69,7 +69,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
6969
cid = cid.toV1()
7070
}
7171

72-
ipldResolver.put(node, { cid }, (err) => cb(err, node))
72+
ipld.put(node, { cid }, (err) => cb(err, node))
7373
}
7474
], (err, node) => {
7575
if (err) {
@@ -92,7 +92,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
9292
return callback(new Error('invalid content'))
9393
}
9494

95-
const reducer = createReducer(reduce(file, ipldResolver, options), options)
95+
const reducer = createReducer(reduce(file, ipld, options), options)
9696

9797
let previous
9898
let count = 0
@@ -121,7 +121,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options)
121121
cid = cid.toV1()
122122
}
123123

124-
ipldResolver.put(leaf.DAGNode, { cid }, (err) => callback(err, leaf))
124+
ipld.put(leaf.DAGNode, { cid }, (err) => callback(err, leaf))
125125
}),
126126
pull.map((leaf) => {
127127
return {

‎src/builder/create-build-stream.js

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
const pullPushable = require('pull-pushable')
44
const pullWrite = require('pull-write')
55

6-
module.exports = function createBuildStream (createStrategy, ipldResolver, options) {
6+
module.exports = function createBuildStream (createStrategy, _ipld, options) {
77
const source = pullPushable()
88

99
const sink = pullWrite(

‎src/builder/index.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -16,17 +16,17 @@ const defaultOptions = {
1616
reduceSingleLeafToSelf: false
1717
}
1818

19-
module.exports = function (Chunker, ipldResolver, _options) {
19+
module.exports = function (Chunker, ipld, _options) {
2020
assert(Chunker, 'Missing chunker creator function')
21-
assert(ipldResolver, 'Missing IPLD Resolver')
21+
assert(ipld, 'Missing IPLD')
2222

2323
const options = Object.assign({}, defaultOptions, _options)
2424

2525
const strategyName = options.strategy
2626
const reducer = reducers[strategyName]
2727
assert(reducer, 'Unknown importer build strategy name: ' + strategyName)
2828

29-
const createStrategy = Builder(Chunker, ipldResolver, reducer, options)
29+
const createStrategy = Builder(Chunker, ipld, reducer, options)
3030

31-
return createBuildStream(createStrategy, ipldResolver, options)
31+
return createBuildStream(createStrategy, ipld, options)
3232
}

‎src/builder/reduce.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ const CID = require('cids')
88
const DAGLink = dagPB.DAGLink
99
const DAGNode = dagPB.DAGNode
1010

11-
module.exports = function (file, ipldResolver, options) {
11+
module.exports = function (file, ipld, options) {
1212
return function (leaves, callback) {
1313
if (leaves.length === 1 && (leaves[0].single || options.reduceSingleLeafToSelf)) {
1414
const leave = leaves[0]
@@ -42,7 +42,7 @@ module.exports = function (file, ipldResolver, options) {
4242
cid = cid.toV1()
4343
}
4444

45-
ipldResolver.put(node, { cid }, (err) => cb(err, node))
45+
ipld.put(node, { cid }, (err) => cb(err, node))
4646
}
4747
], (err, node) => {
4848
if (err) {

‎src/importer/dir-flat.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ class DirFlat extends Dir {
4848
)
4949
}
5050

51-
flush (path, ipldResolver, source, callback) {
51+
flush (path, ipld, source, callback) {
5252
const links = Object.keys(this._children)
5353
.map((key) => {
5454
const child = this._children[key]
@@ -70,7 +70,7 @@ class DirFlat extends Dir {
7070
cid = cid.toV1()
7171
}
7272

73-
ipldResolver.put(node, { cid }, (err) => callback(err, node))
73+
ipld.put(node, { cid }, (err) => callback(err, node))
7474
},
7575
(node, callback) => {
7676
this.multihash = node.multihash

‎src/importer/dir-sharded.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -70,8 +70,8 @@ class DirSharded extends Dir {
7070
this._bucket.eachLeafSeries(iterator, callback)
7171
}
7272

73-
flush (path, ipldResolver, source, callback) {
74-
flush(this._options, this._bucket, path, ipldResolver, source, (err, node) => {
73+
flush (path, ipld, source, callback) {
74+
flush(this._options, this._bucket, path, ipld, source, (err, node) => {
7575
if (err) {
7676
callback(err)
7777
} else {
@@ -89,7 +89,7 @@ function createDirSharded (props, _options) {
8989
return new DirSharded(props, _options)
9090
}
9191

92-
function flush (options, bucket, path, ipldResolver, source, callback) {
92+
function flush (options, bucket, path, ipld, source, callback) {
9393
const children = bucket._children // TODO: intromission
9494
let index = 0
9595
const links = []
@@ -119,7 +119,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
119119
function collectChild (child, index, callback) {
120120
const labelPrefix = leftPad(index.toString(16).toUpperCase(), 2, '0')
121121
if (Bucket.isBucket(child)) {
122-
flush(options, child, path, ipldResolver, null, (err, node) => {
122+
flush(options, child, path, ipld, null, (err, node) => {
123123
if (err) {
124124
callback(err)
125125
return // early
@@ -154,7 +154,7 @@ function flush (options, bucket, path, ipldResolver, source, callback) {
154154
cid = cid.toV1()
155155
}
156156

157-
ipldResolver.put(node, { cid }, (err) => callback(err, node))
157+
ipld.put(node, { cid }, (err) => callback(err, node))
158158
},
159159
(node, callback) => {
160160
const pushable = {

‎src/importer/flush-tree.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const waterfall = require('async/waterfall')
99
const DAGLink = dagPB.DAGLink
1010
const DAGNode = dagPB.DAGNode
1111

12-
module.exports = (files, ipldResolver, source, callback) => {
12+
module.exports = (files, ipld, source, callback) => {
1313
// 1) convert files to a tree
1414
const fileTree = createTree(files)
1515

@@ -26,7 +26,7 @@ module.exports = (files, ipldResolver, source, callback) => {
2626
const sizeIndex = createSizeIndex(files)
2727

2828
// 3) bottom up flushing
29-
traverse(fileTree, sizeIndex, null, ipldResolver, source, callback)
29+
traverse(fileTree, sizeIndex, null, ipld, source, callback)
3030
}
3131

3232
/*
@@ -106,13 +106,13 @@ function createSizeIndex (files) {
106106
* If the value is not an object
107107
* add as a link to the dirNode
108108
*/
109-
function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
109+
function traverse (tree, sizeIndex, path, ipld, source, done) {
110110
mapValues(tree, (node, key, cb) => {
111111
if (isLeaf(node)) {
112112
return cb(null, node)
113113
}
114114

115-
traverse(node, sizeIndex, path ? `${path}/${key}` : key, ipldResolver, source, cb)
115+
traverse(node, sizeIndex, path ? `${path}/${key}` : key, ipld, source, cb)
116116
}, (err, tree) => {
117117
if (err) {
118118
return done(err)
@@ -135,7 +135,7 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) {
135135
(node, cb) => {
136136
sizeIndex[mh.toB58String(node.multihash)] = node.size
137137

138-
ipldResolver.put(node, {
138+
ipld.put(node, {
139139
cid: new CID(node.multihash)
140140
}, (err) => cb(err, node))
141141
}

‎src/importer/index.js

+3-3
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ const defaultOptions = {
1717
chunker: 'fixed'
1818
}
1919

20-
module.exports = function (ipldResolver, _options) {
20+
module.exports = function (ipld, _options) {
2121
const options = Object.assign({}, defaultOptions, _options)
2222
const Chunker = chunkers[options.chunker]
2323
assert(Chunker, 'Unknkown chunker named ' + options.chunker)
@@ -39,9 +39,9 @@ module.exports = function (ipldResolver, _options) {
3939
source: pushable()
4040
}
4141

42-
const dagStream = DAGBuilder(Chunker, ipldResolver, options)
42+
const dagStream = DAGBuilder(Chunker, ipld, options)
4343

44-
const treeBuilder = createTreeBuilder(ipldResolver, options)
44+
const treeBuilder = createTreeBuilder(ipld, options)
4545
const treeBuilderStream = treeBuilder.stream()
4646
const pausable = pause(() => {})
4747

‎src/importer/tree-builder.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ const defaultOptions = {
1818
onlyHash: false
1919
}
2020

21-
function createTreeBuilder (ipldResolver, _options) {
21+
function createTreeBuilder (ipld, _options) {
2222
const options = Object.assign({}, defaultOptions, _options)
2323

2424
const queue = createQueue(consumeQueue, 1)
@@ -202,7 +202,7 @@ function createTreeBuilder (ipldResolver, _options) {
202202
// don't flush directory unless it's been modified
203203

204204
tree.dirty = false
205-
tree.flush(path, ipldResolver, stream.source, (err, node) => {
205+
tree.flush(path, ipld, stream.source, (err, node) => {
206206
if (err) {
207207
callback(err)
208208
} else {

‎test/builder-dir-sharding.js

+12-12
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ chai.use(require('dirty-chai'))
99
const expect = chai.expect
1010
const mh = require('multihashes')
1111
const BlockService = require('ipfs-block-service')
12-
const IPLDResolver = require('ipld-resolver')
12+
const Ipld = require('ipld')
1313
const pull = require('pull-stream')
1414
const pushable = require('pull-pushable')
1515
const whilst = require('async/whilst')
@@ -20,11 +20,11 @@ module.exports = (repo) => {
2020
describe('builder: directory sharding', function () {
2121
this.timeout(30 * 1000)
2222

23-
let ipldResolver
23+
let ipld
2424

2525
before(() => {
2626
const bs = new BlockService(repo)
27-
ipldResolver = new IPLDResolver(bs)
27+
ipld = new Ipld(bs)
2828
})
2929

3030
describe('basic dirbuilder', () => {
@@ -42,7 +42,7 @@ module.exports = (repo) => {
4242
content: pull.values([Buffer.from('i have the best bytes')])
4343
}
4444
]),
45-
importer(ipldResolver, options),
45+
importer(ipld, options),
4646
pull.collect((err, nodes) => {
4747
expect(err).to.not.exist()
4848
expect(nodes.length).to.be.eql(2)
@@ -67,7 +67,7 @@ module.exports = (repo) => {
6767
content: pull.values([Buffer.from('i have the best bytes')])
6868
}
6969
]),
70-
importer(ipldResolver, options),
70+
importer(ipld, options),
7171
pull.collect((err, nodes) => {
7272
expect(err).to.not.exist()
7373
expect(nodes.length).to.be.eql(2)
@@ -83,7 +83,7 @@ module.exports = (repo) => {
8383

8484
it('exporting unsharded hash results in the correct files', (done) => {
8585
pull(
86-
exporter(nonShardedHash, ipldResolver),
86+
exporter(nonShardedHash, ipld),
8787
pull.collect((err, nodes) => {
8888
expect(err).to.not.exist()
8989
expect(nodes.length).to.be.eql(2)
@@ -109,7 +109,7 @@ module.exports = (repo) => {
109109

110110
it('exporting sharded hash results in the correct files', (done) => {
111111
pull(
112-
exporter(shardedHash, ipldResolver),
112+
exporter(shardedHash, ipld),
113113
pull.collect((err, nodes) => {
114114
expect(err).to.not.exist()
115115
expect(nodes.length).to.be.eql(2)
@@ -142,7 +142,7 @@ module.exports = (repo) => {
142142
const push = pushable()
143143
pull(
144144
push,
145-
importer(ipldResolver),
145+
importer(ipld),
146146
pull.collect((err, nodes) => {
147147
expect(err).to.not.exist()
148148
expect(nodes.length).to.be.eql(maxDirs + 1)
@@ -179,7 +179,7 @@ module.exports = (repo) => {
179179
const contentEntries = []
180180
const entries = {}
181181
pull(
182-
exporter(rootHash, ipldResolver),
182+
exporter(rootHash, ipld),
183183
pull.asyncMap((node, callback) => {
184184
if (node.content) {
185185
pull(
@@ -234,7 +234,7 @@ module.exports = (repo) => {
234234
const push = pushable()
235235
pull(
236236
push,
237-
importer(ipldResolver),
237+
importer(ipld),
238238
pull.collect((err, nodes) => {
239239
expect(err).to.not.exist()
240240
const last = nodes[nodes.length - 1]
@@ -281,7 +281,7 @@ module.exports = (repo) => {
281281
it('exports a big dir', (done) => {
282282
const entries = {}
283283
pull(
284-
exporter(rootHash, ipldResolver),
284+
exporter(rootHash, ipld),
285285
pull.asyncMap((node, callback) => {
286286
if (node.content) {
287287
pull(
@@ -340,7 +340,7 @@ module.exports = (repo) => {
340340
it('exports a big dir with subpath', (done) => {
341341
const exportHash = mh.toB58String(rootHash) + '/big/big/2000'
342342
pull(
343-
exporter(exportHash, ipldResolver),
343+
exporter(exportHash, ipld),
344344
pull.collect(collected)
345345
)
346346

‎test/builder-only-hash.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,18 @@ chai.use(require('dirty-chai'))
66
const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
88
const pull = require('pull-stream')
9-
const IPLDResolver = require('ipld-resolver')
9+
const Ipld = require('ipld')
1010
const CID = require('cids')
1111
const createBuilder = require('../src/builder')
1212
const FixedSizeChunker = require('../src/chunker/fixed-size')
1313

1414
module.exports = (repo) => {
1515
describe('builder: onlyHash', () => {
16-
let ipldResolver
16+
let ipld
1717

1818
before(() => {
1919
const bs = new BlockService(repo)
20-
ipldResolver = new IPLDResolver(bs)
20+
ipld = new Ipld(bs)
2121
})
2222

2323
it('will only chunk and hash if passed an "onlyHash" option', (done) => {
@@ -27,7 +27,7 @@ module.exports = (repo) => {
2727
const node = nodes[0]
2828
expect(node).to.exist()
2929

30-
ipldResolver.get(new CID(node.multihash), (err, res) => {
30+
ipld.get(new CID(node.multihash), (err, res) => {
3131
expect(err).to.exist()
3232
done()
3333
})
@@ -45,7 +45,7 @@ module.exports = (repo) => {
4545

4646
pull(
4747
pull.values([inputFile]),
48-
createBuilder(FixedSizeChunker, ipldResolver, options),
48+
createBuilder(FixedSizeChunker, ipld, options),
4949
pull.collect(onCollected)
5050
)
5151
})

‎test/builder.js

+6-6
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
88
const pull = require('pull-stream')
99
const mh = require('multihashes')
10-
const IPLDResolver = require('ipld-resolver')
10+
const Ipld = require('ipld')
1111
const eachSeries = require('async').eachSeries
1212
const CID = require('cids')
1313
const UnixFS = require('ipfs-unixfs')
@@ -16,13 +16,13 @@ const FixedSizeChunker = require('../src/chunker/fixed-size')
1616

1717
module.exports = (repo) => {
1818
describe('builder', () => {
19-
let ipldResolver
19+
let ipld
2020

2121
const testMultihashes = Object.keys(mh.names).slice(0, 40)
2222

2323
before(() => {
2424
const bs = new BlockService(repo)
25-
ipldResolver = new IPLDResolver(bs)
25+
ipld = new Ipld(bs)
2626
})
2727

2828
it('allows multihash hash algorithm to be specified', (done) => {
@@ -44,7 +44,7 @@ module.exports = (repo) => {
4444
expect(mh.decode(node.multihash).name).to.equal(hashAlg)
4545

4646
// Fetch using hashAlg encoded multihash
47-
ipldResolver.get(new CID(node.multihash), (err, res) => {
47+
ipld.get(new CID(node.multihash), (err, res) => {
4848
if (err) return cb(err)
4949
const content = UnixFS.unmarshal(res.value.data).data
5050
expect(content.equals(inputFile.content)).to.be.true()
@@ -54,7 +54,7 @@ module.exports = (repo) => {
5454

5555
pull(
5656
pull.values([Object.assign({}, inputFile)]),
57-
createBuilder(FixedSizeChunker, ipldResolver, options),
57+
createBuilder(FixedSizeChunker, ipld, options),
5858
pull.collect(onCollected)
5959
)
6060
}, done)
@@ -87,7 +87,7 @@ module.exports = (repo) => {
8787

8888
pull(
8989
pull.values([Object.assign({}, inputFile)]),
90-
createBuilder(FixedSizeChunker, ipldResolver, options),
90+
createBuilder(FixedSizeChunker, ipld, options),
9191
pull.collect(onCollected)
9292
)
9393
}, done)

‎test/exporter-subtree.js

+8-8
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ const chai = require('chai')
55
chai.use(require('dirty-chai'))
66
const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
8-
const IPLDResolver = require('ipld-resolver')
8+
const Ipld = require('ipld')
99
const CID = require('cids')
1010
const loadFixture = require('aegir/fixtures')
1111
const pull = require('pull-stream')
@@ -19,18 +19,18 @@ module.exports = (repo) => {
1919
describe('exporter subtree', () => {
2020
// this.timeout(10 * 1000)
2121

22-
let ipldResolver
22+
let ipld
2323

2424
before(() => {
2525
const bs = new BlockService(repo)
26-
ipldResolver = new IPLDResolver(bs)
26+
ipld = new Ipld(bs)
2727
})
2828

2929
it('export a file 2 levels down', (done) => {
3030
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1/200Bytes.txt'
3131

3232
pull(
33-
exporter(hash, ipldResolver),
33+
exporter(hash, ipld),
3434
pull.collect((err, files) => {
3535
expect(err).to.not.exist()
3636
expect(files.length).to.equal(1)
@@ -44,7 +44,7 @@ module.exports = (repo) => {
4444
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/level-1'
4545

4646
pull(
47-
exporter(hash, ipldResolver),
47+
exporter(hash, ipld),
4848
pull.collect((err, files) => {
4949
expect(err).to.not.exist()
5050
expect(files.length).to.equal(3)
@@ -60,7 +60,7 @@ module.exports = (repo) => {
6060
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN/doesnotexist'
6161

6262
pull(
63-
exporter(hash, ipldResolver),
63+
exporter(hash, ipld),
6464
pull.collect((err, files) => {
6565
expect(err).to.not.exist()
6666
expect(files.length).to.equal(0)
@@ -71,12 +71,12 @@ module.exports = (repo) => {
7171

7272
it('exports starting from non-protobuf node', (done) => {
7373
const doc = { a: { file: new CID('QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN') } }
74-
ipldResolver.put(doc, { format: 'dag-cbor' }, (err, cid) => {
74+
ipld.put(doc, { format: 'dag-cbor' }, (err, cid) => {
7575
expect(err).to.not.exist()
7676
const nodeCID = cid.toBaseEncodedString()
7777

7878
pull(
79-
exporter(nodeCID + '/a/file/level-1/200Bytes.txt', ipldResolver),
79+
exporter(nodeCID + '/a/file/level-1/200Bytes.txt', ipld),
8080
pull.collect((err, files) => {
8181
expect(err).to.not.exist()
8282
expect(files.length).to.equal(1)

‎test/exporter.js

+14-14
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ const chai = require('chai')
55
chai.use(require('dirty-chai'))
66
const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
8-
const IPLDResolver = require('ipld-resolver')
8+
const Ipld = require('ipld')
99
const UnixFS = require('ipfs-unixfs')
1010
const bs58 = require('bs58')
1111
const pull = require('pull-stream')
@@ -20,25 +20,25 @@ const bigFile = loadFixture('test/fixtures/1.2MiB.txt')
2020

2121
module.exports = (repo) => {
2222
describe('exporter', () => {
23-
let ipldResolver
23+
let ipld
2424

2525
before(() => {
2626
const bs = new BlockService(repo)
27-
ipldResolver = new IPLDResolver(bs)
27+
ipld = new Ipld(bs)
2828
})
2929

3030
it('ensure hash inputs are sanitized', (done) => {
3131
const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8'
3232
const mhBuf = Buffer.from(bs58.decode(hash))
3333
const cid = new CID(hash)
3434

35-
ipldResolver.get(cid, (err, result) => {
35+
ipld.get(cid, (err, result) => {
3636
expect(err).to.not.exist()
3737
const node = result.value
3838
const unmarsh = UnixFS.unmarshal(node.data)
3939

4040
pull(
41-
exporter(mhBuf, ipldResolver),
41+
exporter(mhBuf, ipld),
4242
pull.collect(onFiles)
4343
)
4444

@@ -58,10 +58,10 @@ module.exports = (repo) => {
5858
pull(
5959
zip(
6060
pull(
61-
ipldResolver.getStream(new CID(hash)),
61+
ipld.getStream(new CID(hash)),
6262
pull.map((res) => UnixFS.unmarshal(res.value.data))
6363
),
64-
exporter(hash, ipldResolver)
64+
exporter(hash, ipld)
6565
),
6666
pull.collect((err, values) => {
6767
expect(err).to.not.exist()
@@ -77,7 +77,7 @@ module.exports = (repo) => {
7777
this.timeout(30 * 1000)
7878
const hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q'
7979
pull(
80-
exporter(hash, ipldResolver),
80+
exporter(hash, ipld),
8181
pull.collect((err, files) => {
8282
expect(err).to.not.exist()
8383

@@ -91,7 +91,7 @@ module.exports = (repo) => {
9191
const cid = new CID('QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q')
9292

9393
pull(
94-
exporter(cid, ipldResolver),
94+
exporter(cid, ipld),
9595
pull.collect((err, files) => {
9696
expect(err).to.not.exist()
9797

@@ -104,7 +104,7 @@ module.exports = (repo) => {
104104
this.timeout(30 * 1000)
105105
const hash = 'QmRQgufjp9vLE8XK2LGKZSsPCFCF6e4iynCQtNB5X2HBKE'
106106
pull(
107-
exporter(hash, ipldResolver),
107+
exporter(hash, ipld),
108108
pull.collect((err, files) => {
109109
expect(err).to.not.exist()
110110

@@ -119,7 +119,7 @@ module.exports = (repo) => {
119119
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN'
120120

121121
pull(
122-
exporter(hash, ipldResolver),
122+
exporter(hash, ipld),
123123
pull.collect((err, files) => {
124124
expect(err).to.not.exist()
125125
files.forEach(file => expect(file).to.have.property('hash'))
@@ -160,7 +160,7 @@ module.exports = (repo) => {
160160
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKjN'
161161

162162
pull(
163-
exporter(hash, ipldResolver, { maxDepth: 1 }),
163+
exporter(hash, ipld, { maxDepth: 1 }),
164164
pull.collect((err, files) => {
165165
expect(err).to.not.exist()
166166
files.forEach(file => expect(file).to.have.property('hash'))
@@ -196,7 +196,7 @@ module.exports = (repo) => {
196196
const hash = 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn'
197197

198198
pull(
199-
exporter(hash, ipldResolver),
199+
exporter(hash, ipld),
200200
pull.collect((err, files) => {
201201
expect(err).to.not.exist()
202202
expect(files[0].content).to.not.exist()
@@ -213,7 +213,7 @@ module.exports = (repo) => {
213213
const hash = 'QmWChcSFMNcFkfeJtNd8Yru1rE6PhtCRfewi1tMwjkwKj3'
214214

215215
pull(
216-
exporter(hash, ipldResolver),
216+
exporter(hash, ipld),
217217
pull.collect((err, files) => {
218218
expect(err).to.exist()
219219
done()

‎test/hash-parity-with-go-ipfs.js

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ const expect = chai.expect
99
const BlockService = require('ipfs-block-service')
1010
const pull = require('pull-stream')
1111
const mh = require('multihashes')
12-
const IPLDResolver = require('ipld-resolver')
12+
const Ipld = require('ipld')
1313
const randomByteStream = require('./helpers/finite-pseudorandom-byte-stream')
1414

1515
const strategies = [
@@ -31,11 +31,11 @@ module.exports = (repo) => {
3131
}
3232

3333
describe('go-ipfs interop using importer:' + strategy, () => {
34-
let ipldResolver
34+
let ipld
3535

3636
before(() => {
3737
const bs = new BlockService(repo)
38-
ipldResolver = new IPLDResolver(bs)
38+
ipld = new Ipld(bs)
3939
})
4040

4141
it('yields the same tree as go-ipfs', function (done) {
@@ -47,7 +47,7 @@ module.exports = (repo) => {
4747
content: randomByteStream(45900000, 7382)
4848
}
4949
]),
50-
importer(ipldResolver, options),
50+
importer(ipld, options),
5151
pull.collect((err, files) => {
5252
expect(err).to.not.exist()
5353
expect(files.length).to.be.equal(1)

‎test/import-export-nested-dir.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ const chai = require('chai')
55
chai.use(require('dirty-chai'))
66
const expect = chai.expect
77
const BlockService = require('ipfs-block-service')
8-
const IPLDResolver = require('ipld-resolver')
8+
const Ipld = require('ipld')
99
const pull = require('pull-stream')
1010
const mh = require('multihashes')
1111
const map = require('async/map')
@@ -15,11 +15,11 @@ const unixFSEngine = require('./../')
1515
module.exports = (repo) => {
1616
describe('import and export: directory', () => {
1717
const rootHash = 'QmdCrquDwd7RfZ6GCZFEVADwe8uyyw1YmF9mtAB7etDgmK'
18-
let ipldResolver
18+
let ipld
1919

2020
before(() => {
2121
const bs = new BlockService(repo)
22-
ipldResolver = new IPLDResolver(bs)
22+
ipld = new Ipld(bs)
2323
})
2424

2525
it('imports', function (done) {
@@ -32,7 +32,7 @@ module.exports = (repo) => {
3232
{ path: 'a/b/g', content: pull.values([Buffer.from('ice')]) },
3333
{ path: 'a/b/h', content: pull.values([Buffer.from('cream')]) }
3434
]),
35-
unixFSEngine.importer(ipldResolver),
35+
unixFSEngine.importer(ipld),
3636
pull.collect((err, files) => {
3737
expect(err).to.not.exist()
3838
expect(files.map(normalizeNode).sort(byPath)).to.be.eql([
@@ -62,7 +62,7 @@ module.exports = (repo) => {
6262
this.timeout(20 * 1000)
6363

6464
pull(
65-
unixFSEngine.exporter(rootHash, ipldResolver),
65+
unixFSEngine.exporter(rootHash, ipld),
6666
pull.collect((err, files) => {
6767
expect(err).to.not.exist()
6868
map(

‎test/import-export.js

+5-5
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ const chai = require('chai')
66
chai.use(require('dirty-chai'))
77
const expect = chai.expect
88
const BlockService = require('ipfs-block-service')
9-
const IPLDResolver = require('ipld-resolver')
9+
const Ipld = require('ipld')
1010
const pull = require('pull-stream')
1111
const loadFixture = require('aegir/fixtures')
1212
const bigFile = loadFixture('test/fixtures/1.2MiB.txt')
@@ -40,23 +40,23 @@ module.exports = (repo) => {
4040
const importerOptions = { strategy: strategy }
4141

4242
describe('using builder: ' + strategy, () => {
43-
let ipldResolver
43+
let ipld
4444

4545
before(() => {
4646
const bs = new BlockService(repo)
47-
ipldResolver = new IPLDResolver(bs)
47+
ipld = new Ipld(bs)
4848
})
4949

5050
it('import and export', (done) => {
5151
const path = strategy + '-big.dat'
5252

5353
pull(
5454
pull.values([{ path: path, content: pull.values(bigFile) }]),
55-
unixFSEngine.importer(ipldResolver, importerOptions),
55+
unixFSEngine.importer(ipld, importerOptions),
5656
pull.map((file) => {
5757
expect(file.path).to.eql(path)
5858

59-
return exporter(file.multihash, ipldResolver)
59+
return exporter(file.multihash, ipld)
6060
}),
6161
pull.flatten(),
6262
pull.collect((err, files) => {

‎test/importer-flush.js

+6-6
Original file line numberDiff line numberDiff line change
@@ -7,22 +7,22 @@ const chai = require('chai')
77
chai.use(require('dirty-chai'))
88
const expect = chai.expect
99
const BlockService = require('ipfs-block-service')
10-
const IPLDResolver = require('ipld-resolver')
10+
const Ipld = require('ipld')
1111
const pull = require('pull-stream')
1212
const pushable = require('pull-pushable')
1313

1414
module.exports = (repo) => {
1515
describe('importer: flush', () => {
16-
let ipldResolver
16+
let ipld
1717

1818
before(() => {
1919
const bs = new BlockService(repo)
20-
ipldResolver = new IPLDResolver(bs)
20+
ipld = new Ipld(bs)
2121
})
2222

2323
it('can push a single root file and flush yields no dirs', (done) => {
2424
const source = pushable()
25-
const importer = createImporter(ipldResolver)
25+
const importer = createImporter(ipld)
2626
pull(
2727
source,
2828
importer,
@@ -51,7 +51,7 @@ module.exports = (repo) => {
5151

5252
it('can push a nested file and flush yields parent dir', (done) => {
5353
const source = pushable()
54-
const importer = createImporter(ipldResolver)
54+
const importer = createImporter(ipld)
5555
let count = 0
5656
pull(
5757
source,
@@ -93,7 +93,7 @@ module.exports = (repo) => {
9393
let currentDir = tree
9494

9595
const source = pushable()
96-
const importer = createImporter(ipldResolver)
96+
const importer = createImporter(ipld)
9797

9898
pull(
9999
source,

‎test/importer.js

+20-20
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ const BlockService = require('ipfs-block-service')
1212
const pull = require('pull-stream')
1313
const mh = require('multihashes')
1414
const CID = require('cids')
15-
const IPLDResolver = require('ipld-resolver')
15+
const Ipld = require('ipld')
1616
const loadFixture = require('aegir/fixtures')
1717
const each = require('async/each')
1818

@@ -163,7 +163,7 @@ module.exports = (repo) => {
163163
describe('importer: ' + strategy, function () {
164164
this.timeout(30 * 1000)
165165

166-
let ipldResolver
166+
let ipld
167167

168168
const options = {
169169
strategy: strategy,
@@ -175,7 +175,7 @@ module.exports = (repo) => {
175175

176176
before(() => {
177177
const bs = new BlockService(repo)
178-
ipldResolver = new IPLDResolver(bs)
178+
ipld = new Ipld(bs)
179179
})
180180

181181
it('fails on bad input', (done) => {
@@ -184,7 +184,7 @@ module.exports = (repo) => {
184184
path: '200Bytes.txt',
185185
content: 'banana'
186186
}]),
187-
importer(ipldResolver, options),
187+
importer(ipld, options),
188188
pull.onEnd((err) => {
189189
expect(err).to.exist()
190190
done()
@@ -195,7 +195,7 @@ module.exports = (repo) => {
195195
it('doesn\'t yield anything on empty source', (done) => {
196196
pull(
197197
pull.empty(),
198-
importer(ipldResolver, options),
198+
importer(ipld, options),
199199
pull.collect((err, nodes) => {
200200
expect(err).to.not.exist()
201201
expect(nodes.length).to.be.eql(0)
@@ -209,7 +209,7 @@ module.exports = (repo) => {
209209
path: 'emptyfile',
210210
content: pull.empty()
211211
}]),
212-
importer(ipldResolver, options),
212+
importer(ipld, options),
213213
pull.collect((err, nodes) => {
214214
expect(err).to.not.exist()
215215
expect(nodes.length).to.be.eql(1)
@@ -231,7 +231,7 @@ module.exports = (repo) => {
231231
content: pull.values([smallFile])
232232
}
233233
]),
234-
importer(ipldResolver, options),
234+
importer(ipld, options),
235235
pull.onEnd((err) => {
236236
expect(err).to.exist()
237237
expect(err.message).to.be.eql('detected more than one root')
@@ -246,7 +246,7 @@ module.exports = (repo) => {
246246
path: '200Bytes.txt',
247247
content: pull.values([smallFile])
248248
}]),
249-
importer(ipldResolver, options),
249+
importer(ipld, options),
250250
pull.collect((err, files) => {
251251
expect(err).to.not.exist()
252252
expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']])
@@ -261,7 +261,7 @@ module.exports = (repo) => {
261261
path: '200Bytes.txt',
262262
content: smallFile
263263
}]),
264-
importer(ipldResolver, options),
264+
importer(ipld, options),
265265
pull.collect((err, files) => {
266266
expect(err).to.not.exist()
267267
expect(stringifyMh(files)).to.be.eql([expected['200Bytes.txt']])
@@ -276,7 +276,7 @@ module.exports = (repo) => {
276276
path: 'foo/bar/200Bytes.txt',
277277
content: pull.values([smallFile])
278278
}]),
279-
importer(ipldResolver, options),
279+
importer(ipld, options),
280280
pull.collect(collected)
281281
)
282282

@@ -305,7 +305,7 @@ module.exports = (repo) => {
305305
path: '1.2MiB.txt',
306306
content: pull.values([bigFile])
307307
}]),
308-
importer(ipldResolver, options),
308+
importer(ipld, options),
309309
pull.collect((err, files) => {
310310
expect(err).to.not.exist()
311311
expect(stringifyMh(files)).to.be.eql([expected['1.2MiB.txt']])
@@ -321,7 +321,7 @@ module.exports = (repo) => {
321321
path: 'foo-big/1.2MiB.txt',
322322
content: pull.values([bigFile])
323323
}]),
324-
importer(ipldResolver, options),
324+
importer(ipld, options),
325325
pull.collect((err, files) => {
326326
expect(err).to.not.exist()
327327

@@ -340,7 +340,7 @@ module.exports = (repo) => {
340340
pull.values([{
341341
path: 'empty-dir'
342342
}]),
343-
importer(ipldResolver, options),
343+
importer(ipld, options),
344344
pull.collect((err, files) => {
345345
expect(err).to.not.exist()
346346

@@ -360,7 +360,7 @@ module.exports = (repo) => {
360360
path: 'pim/1.2MiB.txt',
361361
content: pull.values([bigFile])
362362
}]),
363-
importer(ipldResolver, options),
363+
importer(ipld, options),
364364
pull.collect((err, files) => {
365365
expect(err).to.not.exist()
366366

@@ -387,7 +387,7 @@ module.exports = (repo) => {
387387
path: 'pam/1.2MiB.txt',
388388
content: pull.values([bigFile])
389389
}]),
390-
importer(ipldResolver, options),
390+
importer(ipld, options),
391391
pull.collect((err, files) => {
392392
expect(err).to.not.exist()
393393

@@ -442,15 +442,15 @@ module.exports = (repo) => {
442442
const file = files[0]
443443
expect(file).to.exist()
444444

445-
ipldResolver.get(new CID(file.multihash), (err, res) => {
445+
ipld.get(new CID(file.multihash), (err, res) => {
446446
expect(err).to.exist()
447447
done()
448448
})
449449
}
450450

451451
pull(
452452
pull.values([inputFile]),
453-
importer(ipldResolver, options),
453+
importer(ipld, options),
454454
pull.collect(onCollected)
455455
)
456456
})
@@ -463,7 +463,7 @@ module.exports = (repo) => {
463463
path: '1.2MiB.txt',
464464
content: pull.values([bigFile])
465465
}]),
466-
importer(ipldResolver, options),
466+
importer(ipld, options),
467467
pull.collect(() => {
468468
expect(options.progress.called).to.equal(true)
469469
expect(options.progress.args[0][0]).to.equal(1024)
@@ -496,7 +496,7 @@ module.exports = (repo) => {
496496

497497
each(files, (file, cb) => {
498498
const cid = new CID(file.multihash).toV1()
499-
ipldResolver.get(cid, cb)
499+
ipld.get(cid, cb)
500500
}, done)
501501
}
502502

@@ -513,7 +513,7 @@ module.exports = (repo) => {
513513
createInputFile('/foo/bar', 262144 + 876),
514514
createInputFile('/foo/bar', 262144 + 21)
515515
]),
516-
importer(ipldResolver, options),
516+
importer(ipld, options),
517517
pull.collect(onCollected)
518518
)
519519
})

0 commit comments

Comments
 (0)
This repository has been archived.