diff --git a/package.json b/package.json index 0ff4f2ca..41bdea0b 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,7 @@ }, "homepage": "https://github.com/ipfs/js-ipfs-unixfs-engine#readme", "devDependencies": { - "aegir": "^9.4.0", + "aegir": "^10.0.0", "buffer-loader": "0.0.1", "chai": "^3.5.0", "fs-pull-blob-store": "^0.4.1", @@ -58,7 +58,7 @@ "deep-extend": "^0.4.1", "ipfs-unixfs": "^0.1.9", "ipld-dag-pb": "^0.9.4", - "ipld-resolver": "^0.6.0", + "ipld-resolver": "^0.8.0", "is-ipfs": "^0.3.0", "lodash": "^4.17.4", "multihashes": "^0.3.2", @@ -85,4 +85,4 @@ "jbenet ", "nginnever " ] -} \ No newline at end of file +} diff --git a/src/builder/builder.js b/src/builder/builder.js index 54bfda67..71e32e32 100644 --- a/src/builder/builder.js +++ b/src/builder/builder.js @@ -62,8 +62,7 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options) waterfall([ (cb) => DAGNode.create(d.marshal(), cb), (node, cb) => { - ipldResolver.put({ - node: node, + ipldResolver.put(node, { cid: new CID(node.multihash) }, (err) => cb(err, node)) } @@ -104,12 +103,9 @@ module.exports = function (createChunker, ipldResolver, createReducer, _options) }) }), pull.asyncMap((leaf, callback) => { - ipldResolver.put( - { - node: leaf.DAGNode, - cid: new CID(leaf.DAGNode.multihash) - }, - err => callback(err, leaf) + ipldResolver.put(leaf.DAGNode, { + cid: new CID(leaf.DAGNode.multihash) + }, (err) => callback(err, leaf) ) }), pull.map((leaf) => { diff --git a/src/builder/reduce.js b/src/builder/reduce.js index 868fa15f..83d43ac7 100644 --- a/src/builder/reduce.js +++ b/src/builder/reduce.js @@ -34,8 +34,7 @@ module.exports = function (file, ipldResolver, options) { waterfall([ (cb) => DAGNode.create(f.marshal(), links, cb), (node, cb) => { - ipldResolver.put({ - node: node, + ipldResolver.put(node, { cid: new CID(node.multihash) }, (err) => cb(err, node)) } diff --git a/src/exporter/dir.js b/src/exporter/dir.js index 8e9630e7..2ec2fb13 100644 --- a/src/exporter/dir.js +++ b/src/exporter/dir.js @@ -30,7 +30,7 @@ function dirExporter (node, name, ipldResolver) { path: path.join(name, link.name), hash: link.multihash })), - paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, n) => { + paramap((item, cb) => ipldResolver.get(new CID(item.hash), (err, result) => { if (err) { return cb(err) } @@ -40,10 +40,12 @@ function dirExporter (node, name, ipldResolver) { size: item.size } + const node = result.value + cb(null, switchType( - n, - () => cat([pull.values([dir]), dirExporter(n, item.path, ipldResolver)]), - () => fileExporter(n, item.path, ipldResolver) + node, + () => cat([pull.values([dir]), dirExporter(node, item.path, ipldResolver)]), + () => fileExporter(node, item.path, ipldResolver) )) })), pull.flatten() diff --git a/src/exporter/file.js b/src/exporter/file.js index 9ec72cbb..59a910a4 100644 --- a/src/exporter/file.js +++ b/src/exporter/file.js @@ -20,7 +20,8 @@ module.exports = (node, name, ipldResolver) => { function visitor (node) { return pull( pull.values(node.links), - paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb)) + paramap((link, cb) => ipldResolver.get(new CID(link.multihash), cb)), + pull.map((result) => result.value) ) } diff --git a/src/exporter/index.js b/src/exporter/index.js index 9be52dfe..33167047 100644 --- a/src/exporter/index.js +++ b/src/exporter/index.js @@ -34,6 +34,7 @@ module.exports = (hash, ipldResolver, options) => { } return pull( ipldResolver.getStream(new CID(item.hash)), + pull.map((result) => result.value), pull.map((node) => switchType( node, () => dirExporter(node, item.path, ipldResolver), @@ -46,6 +47,7 @@ module.exports = (hash, ipldResolver, options) => { // Traverse the DAG return pull( ipldResolver.getStream(new CID(hash)), + pull.map((result) => result.value), pull.map((node) => switchType( node, () => traverse.widthFirst({path: hash, hash}, visitor), diff --git a/src/importer/flush-tree.js b/src/importer/flush-tree.js index f465bc4a..936c44af 100644 --- a/src/importer/flush-tree.js +++ b/src/importer/flush-tree.js @@ -135,8 +135,7 @@ function traverse (tree, sizeIndex, path, ipldResolver, source, done) { (node, cb) => { sizeIndex[mh.toB58String(node.multihash)] = node.size - ipldResolver.put({ - node: node, + ipldResolver.put(node, { cid: new CID(node.multihash) }, (err) => cb(err, node)) } diff --git a/test/browser.js b/test/browser.js index 23bd5600..3fa874a0 100644 --- a/test/browser.js +++ b/test/browser.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* global self */ 'use strict' const Store = require('idb-pull-blob-store') @@ -6,10 +7,10 @@ const IPFSRepo = require('ipfs-repo') const repoContext = require.context('buffer!./repo-example', true) const pull = require('pull-stream') -const idb = window.indexedDB || - window.mozIndexedDB || - window.webkitIndexedDB || - window.msIndexedDB +const idb = self.indexedDB || + self.mozIndexedDB || + self.webkitIndexedDB || + self.msIndexedDB idb.deleteDatabase('ipfs') idb.deleteDatabase('ipfs/blocks') diff --git a/test/test-exporter.js b/test/test-exporter.js index 731be425..ef0af17f 100644 --- a/test/test-exporter.js +++ b/test/test-exporter.js @@ -28,29 +28,26 @@ module.exports = (repo) => { it('ensure hash inputs are sanitized', (done) => { const hash = 'QmQmZQxSKQppbsWfVzBvg59Cn3DKtsNVQ94bjAxg2h3Lb8' const mhBuf = new Buffer(bs58.decode(hash)) + const cid = new CID(hash) - pull( - ipldResolver.getStream(new CID(hash)), - pull.map((node) => UnixFS.unmarshal(node.data)), - pull.collect((err, nodes) => { - expect(err).to.not.exist - - const unmarsh = nodes[0] + ipldResolver.get(cid, (err, result) => { + expect(err).to.not.exist + const node = result.value + const unmarsh = UnixFS.unmarshal(node.data) - pull( - exporter(mhBuf, ipldResolver), - pull.collect(onFiles) - ) + pull( + exporter(mhBuf, ipldResolver), + pull.collect(onFiles) + ) - function onFiles (err, files) { - expect(err).to.not.exist - expect(files).to.have.length(1) - expect(files[0]).to.have.property('path', hash) + function onFiles (err, files) { + expect(err).to.not.exist + expect(files).to.have.length(1) + expect(files[0]).to.have.property('path', hash) - fileEql(files[0], unmarsh.data, done) - } - }) - ) + fileEql(files[0], unmarsh.data, done) + } + }) }) it('export a file with no links', (done) => { @@ -59,7 +56,7 @@ module.exports = (repo) => { pull( zip( pull( - ipldResolver.getStream(new CID(hash)), + ipldResolver._getStream(new CID(hash)), pull.map((node) => UnixFS.unmarshal(node.data)) ), exporter(hash, ipldResolver) @@ -176,7 +173,7 @@ function fileEql (f1, f2, done) { try { if (f2) { - expect(Buffer.concat(data)).to.be.eql(f2) + expect(Buffer.concat(data)).to.eql(f2) } else { expect(data).to.exist }