diff --git a/package.json b/package.json index b035857d50..dbef8705d7 100644 --- a/package.json +++ b/package.json @@ -98,8 +98,8 @@ "ipfs-block-service": "~0.15.2", "ipfs-http-client": "^34.0.0", "ipfs-http-response": "~0.3.1", - "ipfs-mfs": "~0.12.0", - "ipfs-multipart": "~0.1.1", + "ipfs-mfs": "ipfs/js-ipfs-mfs#v0.12.x-update-ipfs-multipart", + "ipfs-multipart": "^0.2.0", "ipfs-repo": "~0.26.6", "ipfs-unixfs": "~0.1.16", "ipfs-unixfs-exporter": "~0.37.7", @@ -119,6 +119,8 @@ "is-pull-stream": "~0.0.0", "is-stream": "^2.0.0", "iso-url": "~0.4.6", + "it-pipe": "^1.0.1", + "it-to-stream": "^0.1.1", "just-safe-set": "^2.1.0", "kind-of": "^6.0.2", "libp2p": "~0.26.1", @@ -142,7 +144,7 @@ "merge-options": "^1.0.1", "mime-types": "^2.1.21", "mkdirp": "~0.5.1", - "mortice": "^1.2.2", + "mortice": "^2.0.0", "multiaddr": "^6.1.0", "multiaddr-to-uri": "^5.0.0", "multibase": "~0.6.0", @@ -194,7 +196,7 @@ "execa": "^2.0.4", "form-data": "^2.5.1", "hat": "0.0.3", - "interface-ipfs-core": "^0.111.1", + "interface-ipfs-core": "^0.112.0", "ipfsd-ctl": "~0.46.0", "libp2p-websocket-star": "~0.10.2", "ncp": "^2.0.0", diff --git a/src/cli/commands/add.js b/src/cli/commands/add.js index 18d96bac24..a76ba3a7f8 100644 --- a/src/cli/commands/add.js +++ b/src/cli/commands/add.js @@ -1,65 +1,19 @@ 'use strict' -const pull = require('pull-stream/pull') -const through = require('pull-stream/throughs/through') -const end = require('pull-stream/sinks/on-end') const promisify = require('promisify-es6') const getFolderSize = promisify(require('get-folder-size')) const byteman = require('byteman') const mh = require('multihashes') const multibase = require('multibase') -const toPull = require('stream-to-pull-stream') const { createProgressBar } = require('../utils') const { cidToString } = require('../../utils/cid') -const globSource = require('../../utils/files/glob-source') +const globSource = require('ipfs-utils/src/files/glob-source') async function getTotalBytes (paths) { const sizes = await Promise.all(paths.map(p => getFolderSize(p))) return sizes.reduce((total, size) => total + size, 0) } -function addPipeline (source, addStream, options, log) { - let finalHash - - return new Promise((resolve, reject) => { - pull( - source, - addStream, - through((file) => { - const cid = finalHash = cidToString(file.hash, { base: options.cidBase }) - - if (options.silent || options.quieter) { - return - } - - let message = cid - - if (!options.quiet) { - // print the hash twice if we are piping from stdin - message = `added ${cid} ${options.file ? file.path || '' : cid}`.trim() - } - - log(message) - }), - end((err) => { - if (err) { - // Tweak the error message and add more relevant infor for the CLI - if (err.code === 'ERR_DIR_NON_RECURSIVE') { - err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` - } - return reject(err) - } - - if (options.quieter) { - log(finalHash) - } - - resolve() - }) - ) - }) -} - module.exports = { command: 'add [file...]', @@ -199,17 +153,51 @@ module.exports = { } const source = argv.file - ? globSource(...argv.file, { recursive: argv.recursive }) - : toPull.source(process.stdin) // Pipe directly to ipfs.add + ? globSource(argv.file, { recursive: argv.recursive }) + : process.stdin // Pipe directly to ipfs.add - const adder = ipfs.addPullStream(options) + let finalHash try { - await addPipeline(source, adder, argv, log) - } finally { + for await (const file of ipfs._addAsyncIterator(source, options)) { + if (argv.silent) { + continue + } + + if (argv.quieter) { + finalHash = file.hash + continue + } + + const cid = cidToString(file.hash, { base: argv.cidBase }) + let message = cid + + if (!argv.quiet) { + // print the hash twice if we are piping from stdin + message = `added ${cid} ${argv.file ? file.path || '' : cid}`.trim() + } + + log(message) + } + } catch (err) { if (bar) { bar.terminate() } + + // Tweak the error message and add more relevant infor for the CLI + if (err.code === 'ERR_DIR_NON_RECURSIVE') { + err.message = `'${err.path}' is a directory, use the '-r' flag to specify directories` + } + + throw err + } + + if (bar) { + bar.terminate() + } + + if (argv.quieter) { + log(cidToString(finalHash, { base: argv.cidBase })) } })()) } diff --git a/src/core/components/files-regular/add-async-iterator.js b/src/core/components/files-regular/add-async-iterator.js new file mode 100644 index 0000000000..dab9fb01cf --- /dev/null +++ b/src/core/components/files-regular/add-async-iterator.js @@ -0,0 +1,148 @@ +'use strict' + +const importer = require('ipfs-unixfs-importer') +const normaliseAddInput = require('ipfs-utils/src/files/normalise-input') +const { parseChunkerString } = require('./utils') +const pipe = require('it-pipe') +const log = require('debug')('ipfs:add') +log.error = require('debug')('ipfs:add:error') + +function noop () {} + +module.exports = function (self) { + // Internal add func that gets used by all add funcs + return async function * addAsyncIterator (source, options) { + options = options || {} + + const chunkerOptions = parseChunkerString(options.chunker) + + const opts = Object.assign({}, { + shardSplitThreshold: self._options.EXPERIMENTAL.sharding + ? 1000 + : Infinity + }, options, { + chunker: chunkerOptions.chunker, + chunkerOptions: chunkerOptions.chunkerOptions + }) + + // CID v0 is for multihashes encoded with sha2-256 + if (opts.hashAlg && opts.cidVersion !== 1) { + opts.cidVersion = 1 + } + + let total = 0 + + const prog = opts.progress || noop + const progress = (bytes) => { + total += bytes + prog(total) + } + + opts.progress = progress + + const iterator = pipe( + normaliseAddInput(source), + doImport(self, opts), + transformFile(self, opts), + preloadFile(self, opts), + pinFile(self, opts) + ) + + const releaseLock = await self._gcLock.readLock() + + try { + yield * iterator + } finally { + releaseLock() + } + } +} + +function doImport (ipfs, opts) { + return function * (source) { + yield * importer(source, ipfs._ipld, opts) + } +} + +function transformFile (ipfs, opts) { + return async function * (source) { + for await (const file of source) { + let cid = file.cid + const hash = cid.toBaseEncodedString() + let path = file.path ? file.path : hash + + if (opts.wrapWithDirectory && !file.path) { + path = '' + } + + if (opts.onlyHash) { + yield { + path, + hash, + size: file.unixfs.fileSize() + } + + return + } + + const node = await ipfs.object.get(file.cid, Object.assign({}, opts, { preload: false })) + + if (opts.cidVersion === 1) { + cid = cid.toV1() + } + + let size = node.size + + if (Buffer.isBuffer(node)) { + size = node.length + } + + yield { + path, + hash, + size + } + } + } +} + +function preloadFile (ipfs, opts) { + return async function * (source) { + for await (const file of source) { + const isRootFile = !file.path || opts.wrapWithDirectory + ? file.path === '' + : !file.path.includes('/') + + const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false + + if (shouldPreload) { + ipfs._preload(file.hash) + } + + yield file + } + } +} + +function pinFile (ipfs, opts) { + return async function * (source) { + for await (const file of source) { + // Pin a file if it is the root dir of a recursive add or the single file + // of a direct add. + const pin = 'pin' in opts ? opts.pin : true + const isRootDir = !file.path.includes('/') + const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg + + if (shouldPin) { + // Note: addAsyncIterator() has already taken a GC lock, so tell + // pin.add() not to take a (second) GC lock + await ipfs.pin.add(file.hash, { + preload: false, + lock: false + }) + } + + yield file + } + } +} diff --git a/src/core/components/files-regular/add-pull-stream.js b/src/core/components/files-regular/add-pull-stream.js index ee815f1487..e3c1519531 100644 --- a/src/core/components/files-regular/add-pull-stream.js +++ b/src/core/components/files-regular/add-pull-stream.js @@ -1,174 +1,11 @@ 'use strict' -const importer = require('ipfs-unixfs-importer') -const kindOf = require('kind-of') -const toAsyncIterator = require('pull-stream-to-async-iterator') const toPullStream = require('async-iterator-to-pull-stream') -const pull = require('pull-stream/pull') -const pullValues = require('pull-stream/sources/values') -const pullMap = require('pull-stream/throughs/map') -const pullAsyncMap = require('pull-stream/throughs/async-map') -const pullFlatten = require('pull-stream/throughs/flatten') -const toPull = require('stream-to-pull-stream') -const waterfall = require('async/waterfall') -const isStream = require('is-stream') -const { isSource } = require('is-pull-stream') -const { parseChunkerString } = require('./utils') -const streamFromFileReader = require('ipfs-utils/src/streams/stream-from-filereader') -const { supportsFileReader } = require('ipfs-utils/src/supports') - -function noop () {} - -function prepareFile (file, self, opts, callback) { - opts = opts || {} - - let cid = file.cid - - waterfall([ - (cb) => opts.onlyHash - ? cb(null, file) - : self.object.get(file.cid, Object.assign({}, opts, { preload: false }), cb), - (node, cb) => { - if (opts.cidVersion === 1) { - cid = cid.toV1() - } - - const b58Hash = cid.toBaseEncodedString() - let size = node.size - - if (Buffer.isBuffer(node)) { - size = node.length - } - - cb(null, { - path: file.path === undefined ? b58Hash : (file.path || ''), - hash: b58Hash, - // multihash: b58Hash, - size - }) - } - ], callback) -} - -function normalizeContent (content, opts) { - if (!Array.isArray(content)) { - content = [content] - } - - return content.map((data) => { - if (supportsFileReader && kindOf(data) === 'file') { - data = { path: '', content: toPull.source(streamFromFileReader(data)) } - } - // Buffer input - if (Buffer.isBuffer(data)) { - data = { path: '', content: pullValues([data]) } - } - - // Readable stream input - if (isStream.readable(data)) { - data = { path: '', content: toPull.source(data) } - } - - if (isSource(data)) { - data = { path: '', content: data } - } - - if (data && data.content && typeof data.content !== 'function') { - if (supportsFileReader && kindOf(data.content) === 'file') { - data = { path: data.path, content: toPull.source(streamFromFileReader(data.content)) } - } - - if (Buffer.isBuffer(data.content)) { - data = { path: data.path, content: pullValues([data.content]) } - } - - if (isStream.readable(data.content)) { - data = { path: data.path, content: toPull.source(data.content) } - } - } - - if (opts.wrapWithDirectory && !data.path) { - throw new Error('Must provide a path when wrapping with a directory') - } - - return data - }) -} - -function preloadFile (file, self, opts) { - const isRootFile = !file.path || opts.wrapWithDirectory - ? file.path === '' - : !file.path.includes('/') - - const shouldPreload = isRootFile && !opts.onlyHash && opts.preload !== false - - if (shouldPreload) { - self._preload(file.hash) - } - - return file -} - -function pinFile (file, self, opts, cb) { - // Pin a file if it is the root dir of a recursive add or the single file - // of a direct add. - const pin = 'pin' in opts ? opts.pin : true - const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg - if (shouldPin) { - // Note: addPullStream() has already taken a GC lock, so tell - // pin.add() not to take a (second) GC lock - return self.pin.add(file.hash, { preload: false, lock: false }, err => cb(err, file)) - } else { - cb(null, file) - } -} module.exports = function (self) { - // Internal add func that gets used by all add funcs return function addPullStream (options) { - options = options || {} - - let chunkerOptions - try { - chunkerOptions = parseChunkerString(options.chunker) - } catch (err) { - return pullMap(() => { throw err }) - } - const opts = Object.assign({}, { - shardSplitThreshold: self._options.EXPERIMENTAL.sharding - ? 1000 - : Infinity - }, options, { - chunker: chunkerOptions.chunker, - chunkerOptions: chunkerOptions.chunkerOptions + return toPullStream.transform((source) => { + return self._addAsyncIterator(source, options) }) - - // CID v0 is for multihashes encoded with sha2-256 - if (opts.hashAlg && opts.cidVersion !== 1) { - opts.cidVersion = 1 - } - - let total = 0 - - const prog = opts.progress || noop - const progress = (bytes) => { - total += bytes - prog(total) - } - - opts.progress = progress - return self._gcLock.pullReadLock(() => pull( - pullMap(content => normalizeContent(content, opts)), - pullFlatten(), - pullMap(file => ({ - path: file.path ? file.path : undefined, - content: file.content ? toAsyncIterator(file.content) : undefined - })), - toPullStream.transform(source => importer(source, self._ipld, opts)), - pullAsyncMap((file, cb) => prepareFile(file, self, opts, cb)), - pullMap(file => preloadFile(file, self, opts)), - pullAsyncMap((file, cb) => pinFile(file, self, opts, cb)) - )) } } diff --git a/src/core/components/files-regular/add-readable-stream.js b/src/core/components/files-regular/add-readable-stream.js index f394a9782a..5451fa9a0e 100644 --- a/src/core/components/files-regular/add-readable-stream.js +++ b/src/core/components/files-regular/add-readable-stream.js @@ -1,55 +1,11 @@ 'use strict' -const pull = require('pull-stream') -const pushable = require('pull-pushable') -const Duplex = require('readable-stream').Duplex - -class AddHelper extends Duplex { - constructor (pullStream, push, options) { - super(Object.assign({ objectMode: true }, options)) - this._pullStream = pullStream - this._pushable = push - this._waitingPullFlush = [] - } - - _read () { - this._pullStream(null, (end, data) => { - while (this._waitingPullFlush.length) { - const cb = this._waitingPullFlush.shift() - cb() - } - if (end) { - if (end instanceof Error) { - this.emit('error', end) - } else { - this.push(null) - } - } else { - this.push(data) - } - }) - } - - _write (chunk, encoding, callback) { - this._waitingPullFlush.push(callback) - this._pushable.push(chunk) - } -} +const toStream = require('it-to-stream') module.exports = function (self) { - return (options) => { - options = options || {} - - const p = pushable() - const s = pull( - p, - self.addPullStream(options) - ) - - const retStream = new AddHelper(s, p) - - retStream.once('finish', () => p.end()) - - return retStream + return function addReadableStream (options) { + return toStream.transform(source => { + return self._addAsyncIterator(source, options) + }) } } diff --git a/src/core/components/files-regular/add.js b/src/core/components/files-regular/add.js index d254159b7f..20d641648d 100644 --- a/src/core/components/files-regular/add.js +++ b/src/core/components/files-regular/add.js @@ -1,48 +1,22 @@ 'use strict' -const promisify = require('promisify-es6') -const pull = require('pull-stream') -const sort = require('pull-sort') -const isSource = require('is-pull-stream').isSource -const validateAddInput = require('ipfs-utils/src/files/add-input-validation') +const all = require('async-iterator-all') module.exports = function (self) { - const add = promisify((data, options, callback) => { - if (typeof options === 'function') { + // can't use callbackify because if `data` is a pull stream + // it thinks we are passing a callback. This is why we can't have nice things. + return (data, options, callback) => { + if (!callback && typeof options === 'function') { callback = options options = {} } - options = options || {} + const result = all(self._addAsyncIterator(data, options)) - try { - validateAddInput(data) - } catch (err) { - return callback(err) + if (!callback) { + return result } - pull( - pull.values([data]), - self.addPullStream(options), - sort((a, b) => { - if (a.path < b.path) return 1 - if (a.path > b.path) return -1 - return 0 - }), - pull.collect(callback) - ) - }) - - return function () { - const args = Array.from(arguments) - - // If we .add(), then promisify thinks the pull stream - // is a callback! Add an empty options object in this case so that a - // promise is returned. - if (args.length === 1 && isSource(args[0])) { - args.push({}) - } - - return add.apply(null, args) + result.then((result) => callback(null, result), callback) } } diff --git a/src/core/components/files-regular/index.js b/src/core/components/files-regular/index.js index 3261041766..50e1f90ff4 100644 --- a/src/core/components/files-regular/index.js +++ b/src/core/components/files-regular/index.js @@ -8,6 +8,7 @@ module.exports = (self) => { addFromURL: require('./add-from-url')(self), addPullStream: require('./add-pull-stream')(self), addReadableStream: require('./add-readable-stream')(self), + _addAsyncIterator: require('./add-async-iterator')(self), cat: require('./cat')(self), catPullStream: require('./cat-pull-stream')(self), catReadableStream: require('./cat-readable-stream')(self), diff --git a/src/core/components/pin/gc-lock.js b/src/core/components/pin/gc-lock.js index 082b7a687e..59cb18b497 100644 --- a/src/core/components/pin/gc-lock.js +++ b/src/core/components/pin/gc-lock.js @@ -58,13 +58,14 @@ class PullLocker { } // Request the lock - this.mutex[this.type]((releaseLock) => { - // The lock has been granted, so run the locked piece of code - cb(null, i) + this.mutex[this.type]() + .then(release => { + // Save the release function to be called when the stream completes + this.releaseLock = release - // Save the release function to be called when the stream completes - this.releaseLock = releaseLock - }) + // The lock has been granted, so run the locked piece of code + cb(null, i) + }, cb) }) } diff --git a/src/core/components/pin/gc.js b/src/core/components/pin/gc.js index 544b151bf3..9f8714abcc 100644 --- a/src/core/components/pin/gc.js +++ b/src/core/components/pin/gc.js @@ -41,6 +41,7 @@ module.exports = function gc (self) { log('GC failed to delete unmarked blocks', err) return lockCb(err) } + lockCb(null, res) }) }) diff --git a/src/core/runtime/add-from-fs-browser.js b/src/core/runtime/add-from-fs-browser.js index 52e56d1ea2..10f9884a03 100644 --- a/src/core/runtime/add-from-fs-browser.js +++ b/src/core/runtime/add-from-fs-browser.js @@ -1,10 +1,9 @@ 'use strict' -const promisify = require('promisify-es6') +const callbackify = require('callbackify') -module.exports = self => { - return promisify((...args) => { - const callback = args.pop() - callback(new Error('not available in the browser')) +module.exports = () => { + return callbackify(() => { + throw new Error('not available in the browser') }) } diff --git a/src/core/runtime/add-from-fs-nodejs.js b/src/core/runtime/add-from-fs-nodejs.js index cff5375fbd..9370020752 100644 --- a/src/core/runtime/add-from-fs-nodejs.js +++ b/src/core/runtime/add-from-fs-nodejs.js @@ -1,19 +1,13 @@ 'use strict' -const promisify = require('promisify-es6') -const pull = require('pull-stream') -const globSource = require('../../utils/files/glob-source') +const callbackify = require('callbackify') +const globSource = require('ipfs-utils/src/files/glob-source') +const all = require('async-iterator-all') module.exports = self => { - return promisify((...args) => { - const callback = args.pop() + return callbackify.variadic((...args) => { const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() - const paths = args - pull( - globSource(...paths, options), - self.addPullStream(options), - pull.collect(callback) - ) + return all(self._addAsyncIterator(globSource(...args, options), options)) }) } diff --git a/src/http/api/resources/block.js b/src/http/api/resources/block.js index 52e7ac5586..38bfd666ab 100644 --- a/src/http/api/resources/block.js +++ b/src/http/api/resources/block.js @@ -7,6 +7,7 @@ const multibase = require('multibase') const Boom = require('@hapi/boom') const { cidToString } = require('../../../utils/cid') const debug = require('debug') +const all = require('async-iterator-all') const log = debug('ipfs:http-api:block') log.error = debug('ipfs:http-api:block:error') @@ -55,31 +56,26 @@ exports.put = { }, // pre request handler that parses the args and returns `data` which is assigned to `request.pre.args` - parseArgs: (request, h) => { + parseArgs: async (request, h) => { if (!request.payload) { throw Boom.badRequest("File argument 'data' is required") } - return new Promise((resolve, reject) => { - const parser = multipart.reqParser(request.payload) - let file + let data - parser.on('file', (fileName, fileStream) => { - file = Buffer.alloc(0) + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } - fileStream.on('data', (data) => { - file = Buffer.concat([file, data]) - }) - }) + data = Buffer.concat(await all(part.content)) + } - parser.on('end', () => { - if (!file) { - return reject(Boom.badRequest("File argument 'data' is required")) - } + if (!data) { + throw Boom.badRequest("File argument 'data' is required") + } - resolve({ data: file }) - }) - }) + return { data } }, // main route handler which is called after the above `parseArgs`, but only if the args were valid diff --git a/src/http/api/resources/config.js b/src/http/api/resources/config.js index dd52a60f33..6c6aa3dfca 100644 --- a/src/http/api/resources/config.js +++ b/src/http/api/resources/config.js @@ -7,6 +7,7 @@ const log = debug('ipfs:http-api:config') log.error = debug('ipfs:http-api:config:error') const multipart = require('ipfs-multipart') const Boom = require('@hapi/boom') +const all = require('async-iterator-all') exports.getOrSet = { // pre request handler that parses the args and returns `key` & `value` which are assigned to `request.pre.args` @@ -127,20 +128,22 @@ exports.replace = { throw Boom.badRequest("Argument 'file' is required") } - const fileStream = await new Promise((resolve, reject) => { - multipart.reqParser(request.payload) - .on('file', (fileName, fileStream) => resolve(fileStream)) - .on('end', () => reject(Boom.badRequest("Argument 'file' is required"))) - }) + let file - const file = await new Promise((resolve, reject) => { - fileStream - .on('data', data => resolve(data)) - .on('end', () => reject(Boom.badRequest("Argument 'file' is required"))) - }) + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } + + file = Buffer.concat(await all(part.content)) + } + + if (!file) { + throw Boom.badRequest("Argument 'file' is required") + } try { - return { config: JSON.parse(file.toString()) } + return { config: JSON.parse(file.toString('utf8')) } } catch (err) { throw Boom.boomify(err, { message: 'Failed to decode file as config' }) } diff --git a/src/http/api/resources/dag.js b/src/http/api/resources/dag.js index be6af04e8b..f22fd929c3 100644 --- a/src/http/api/resources/dag.js +++ b/src/http/api/resources/dag.js @@ -11,6 +11,7 @@ const debug = require('debug') const { cidToString } = require('../../../utils/cid') +const all = require('async-iterator-all') const log = debug('ipfs:http-api:dag') log.error = debug('ipfs:http-api:dag:error') @@ -149,17 +150,19 @@ exports.put = { throw Boom.badRequest("File argument 'object data' is required") } - const fileStream = await new Promise((resolve, reject) => { - multipart.reqParser(request.payload) - .on('file', (name, stream) => resolve(stream)) - .on('end', () => reject(Boom.badRequest("File argument 'object data' is required"))) - }) + let data - const data = await new Promise((resolve, reject) => { - fileStream - .on('data', data => resolve(data)) - .on('end', () => reject(Boom.badRequest("File argument 'object data' is required"))) - }) + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } + + data = Buffer.concat(await all(part.content)) + } + + if (!data) { + throw Boom.badRequest("File argument 'object data' is required") + } let format = request.query.format diff --git a/src/http/api/resources/files-regular.js b/src/http/api/resources/files-regular.js index dbd1f71a53..3749a88999 100644 --- a/src/http/api/resources/files-regular.js +++ b/src/http/api/resources/files-regular.js @@ -6,7 +6,6 @@ const tar = require('tar-stream') const log = debug('ipfs:http-api:files') log.error = debug('ipfs:http-api:files:error') const pull = require('pull-stream') -const toPull = require('stream-to-pull-stream') const pushable = require('pull-pushable') const toStream = require('pull-stream-to-stream') const abortable = require('pull-abortable') @@ -19,6 +18,7 @@ const isIpfs = require('is-ipfs') const promisify = require('promisify-es6') const { cidToString } = require('../../../utils/cid') const { Format } = require('../../../core/components/files-regular/refs') +const pipe = require('it-pipe') function numberFromQuery (query, key) { if (query && query[key] !== undefined) { @@ -169,112 +169,92 @@ exports.add = { .options({ allowUnknown: true }) }, - async handler (request, h) { + handler (request, h) { if (!request.payload) { throw Boom.badRequest('Array, Buffer, or String is required.') } const { ipfs } = request.server.app + let filesParsed = false + let currentFileName + const output = new PassThrough() + const progressHandler = bytes => { + output.write(JSON.stringify({ + Name: currentFileName, + Bytes: bytes + }) + '\n') + } - const fileAdder = await new Promise((resolve, reject) => { - // TODO: make pull-multipart - const parser = multipart.reqParser(request.payload) - let filesParsed = false - const adder = pushable() + pipe( + multipart(request), + async function * (source) { + for await (const entry of source) { + currentFileName = entry.name || 'unknown' - parser.on('file', (fileName, fileStream) => { - if (!filesParsed) { - resolve(adder) - filesParsed = true - } + if (entry.type === 'file') { + filesParsed = true - adder.push({ - path: decodeURIComponent(fileName), - content: toPull(fileStream) - }) - }) + yield { + path: entry.name, + content: entry.content + } + } - parser.on('directory', (dirName) => { - adder.push({ - path: decodeURIComponent(dirName), - content: '' - }) - }) + if (entry.type === 'directory') { + filesParsed = true - parser.on('end', () => { + yield { + path: entry.name + } + } + } + }, + function (source) { + return ipfs._addAsyncIterator(source, { + cidVersion: request.query['cid-version'], + rawLeaves: request.query['raw-leaves'], + progress: request.query.progress ? progressHandler : null, + onlyHash: request.query['only-hash'], + hashAlg: request.query.hash, + wrapWithDirectory: request.query['wrap-with-directory'], + pin: request.query.pin, + chunker: request.query.chunker, + strategy: request.query.trickle ? 'trickle' : 'balanced', + preload: request.query.preload + }) + }, + async function (source) { + for await (const file of source) { + output.write(JSON.stringify({ + Name: file.path, + Hash: cidToString(file.hash, { base: request.query['cid-base'] }), + Size: file.size + }) + '\n') + } + } + ) + .then(() => { if (!filesParsed) { - reject(new Error("File argument 'data' is required.")) + throw new Error("File argument 'data' is required.") } - adder.end() }) - }) - - const replyStream = pushable() - const progressHandler = bytes => replyStream.push({ Bytes: bytes }) - - const options = { - cidVersion: request.query['cid-version'], - rawLeaves: request.query['raw-leaves'], - progress: request.query.progress ? progressHandler : null, - onlyHash: request.query['only-hash'], - hashAlg: request.query.hash, - wrapWithDirectory: request.query['wrap-with-directory'], - pin: request.query.pin, - chunker: request.query.chunker, - strategy: request.query.trickle ? 'trickle' : 'balanced', - preload: request.query.preload - } - - const aborter = abortable() - const stream = toStream.source(pull( - replyStream, - aborter, - ndjson.serialize() - )) - - // const stream = toStream.source(replyStream.source) - // hapi is not very clever and throws if no - // - _read method - // - _readableState object - // are there :( - if (!stream._read) { - stream._read = () => {} - stream._readableState = {} - stream.unpipe = () => {} - } - - let filesAdded = false - - pull( - fileAdder, - ipfs.addPullStream(options), - pull.map(file => ({ - Name: file.path, // addPullStream already turned this into a hash if it wanted to - Hash: cidToString(file.hash, { base: request.query['cid-base'] }), - Size: file.size - })), - pull.drain( - file => { - replyStream.push(file) - filesAdded = true - }, - err => { - if (err || !filesAdded) { - request.raw.res.addTrailers({ - 'X-Stream-Error': JSON.stringify({ - Message: err ? err.message : 'Failed to add files.', - Code: 0 - }) - }) - return aborter.abort() - } - - replyStream.end() + .catch(err => { + if (!filesParsed) { + output.write(' ') } - ) - ) - return h.response(stream) + request.raw.res.addTrailers({ + 'X-Stream-Error': JSON.stringify({ + Message: err.message, + Code: 0 + }) + }) + }) + .then(() => { + output.end() + }) + + return h.response(output) .header('x-chunked-output', '1') .header('content-type', 'application/json') .header('Trailer', 'X-Stream-Error') diff --git a/src/http/api/resources/object.js b/src/http/api/resources/object.js index ffbb6e070e..090213ee4b 100644 --- a/src/http/api/resources/object.js +++ b/src/http/api/resources/object.js @@ -2,6 +2,7 @@ const CID = require('cids') const multipart = require('ipfs-multipart') +const all = require('async-iterator-all') const dagPB = require('ipld-dag-pb') const { DAGNode, DAGLink } = dagPB const Joi = require('@hapi/joi') @@ -126,18 +127,19 @@ exports.put = { } const enc = request.query.inputenc + let data - const fileStream = await new Promise((resolve, reject) => { - multipart.reqParser(request.payload) - .on('file', (name, stream) => resolve(stream)) - .on('end', () => reject(Boom.badRequest("File argument 'data' is required"))) - }) + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } - const data = await new Promise((resolve, reject) => { - fileStream - .on('data', data => resolve(data)) - .on('end', () => reject(Boom.badRequest("File argument 'data' is required"))) - }) + data = Buffer.concat(await all(part.content)) + } + + if (!data) { + throw Boom.badRequest("File argument 'data' is required") + } if (enc === 'protobuf') { try { @@ -291,19 +293,21 @@ exports.parseKeyAndData = async (request, h) => { throw Boom.badRequest('invalid ipfs ref path') } - const fileStream = await new Promise((resolve, reject) => { - multipart.reqParser(request.payload) - .on('file', (fileName, fileStream) => resolve(fileStream)) - .on('end', () => reject(Boom.badRequest("File argument 'data' is required"))) - }) + let data + + for await (const part of multipart(request)) { + if (part.type !== 'file') { + continue + } - const fileData = await new Promise((resolve, reject) => { - fileStream - .on('data', data => resolve(data)) - .on('end', () => reject(Boom.badRequest("File argument 'data' is required"))) - }) + data = Buffer.concat(await all(part.content)) + } + + if (!data) { + throw Boom.badRequest("File argument 'data' is required") + } - return { data: fileData, key: cid } + return { data, key: cid } } exports.patchAppendData = { diff --git a/src/utils/files/glob-source.js b/src/utils/files/glob-source.js deleted file mode 100644 index 8c6368f043..0000000000 --- a/src/utils/files/glob-source.js +++ /dev/null @@ -1,109 +0,0 @@ -'use strict' - -const fs = require('fs') -const Path = require('path') -const pull = require('pull-stream') -const glob = require('glob') -const cat = require('pull-cat') -const defer = require('pull-defer') -const pushable = require('pull-pushable') -const map = require('async/map') -const errCode = require('err-code') - -/** -* Create a pull stream source that can be piped to ipfs.addPullStream for the -* provided file paths. -* -* @param {String} ...paths File system path(s) to glob from -* @param {Object} [options] Optional options -* @param {Boolean} [options.recursive] Recursively glob all paths in directories -* @param {Boolean} [options.hidden] Include .dot files in matched paths -* @param {Array} [options.ignore] Glob paths to ignore -* @param {Boolean} [options.followSymlinks] follow symlinks -* @returns {Function} pull stream source -*/ -module.exports = (...args) => { - const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() - const paths = args - const deferred = defer.source() - - const globSourceOptions = { - recursive: options.recursive, - glob: { - dot: Boolean(options.hidden), - ignore: Array.isArray(options.ignore) ? options.ignore : [], - follow: options.followSymlinks != null ? options.followSymlinks : true - } - } - - // Check the input paths comply with options.recursive and convert to glob sources - map(paths, pathAndType, (err, results) => { - if (err) return deferred.abort(err) - - try { - const sources = results.map(res => toGlobSource(res, globSourceOptions)) - deferred.resolve(cat(sources)) - } catch (err) { - deferred.abort(err) - } - }) - - return pull( - deferred, - pull.map(({ path, contentPath }) => ({ - path, - content: fs.createReadStream(contentPath) - })) - ) -} - -function toGlobSource ({ path, type }, options) { - options = options || {} - - const baseName = Path.basename(path) - - if (type === 'file') { - return pull.values([{ path: baseName, contentPath: path }]) - } - - if (type === 'dir' && !options.recursive) { - throw errCode( - new Error(`'${path}' is a directory and recursive option not set`), - 'ERR_DIR_NON_RECURSIVE', - { path } - ) - } - - const globOptions = Object.assign({}, options.glob, { - cwd: path, - nodir: true, - realpath: false, - absolute: false - }) - - // TODO: want to use pull-glob but it doesn't have the features... - const pusher = pushable() - - glob('**/*', globOptions) - .on('match', m => pusher.push(m)) - .on('end', () => pusher.end()) - .on('abort', () => pusher.end()) - .on('error', err => pusher.end(err)) - - return pull( - pusher, - pull.map(p => ({ - path: `${baseName}/${toPosix(p)}`, - contentPath: Path.join(path, p) - })) - ) -} - -function pathAndType (path, cb) { - fs.stat(path, (err, stat) => { - if (err) return cb(err) - cb(null, { path, type: stat.isDirectory() ? 'dir' : 'file' }) - }) -} - -const toPosix = path => path.replace(/\\/g, '/') diff --git a/src/utils/mutex.js b/src/utils/mutex.js index b91f3139a4..2ff7f5e1ae 100644 --- a/src/utils/mutex.js +++ b/src/utils/mutex.js @@ -16,45 +16,56 @@ class Mutex { } readLock (lockedFn, cb) { - return this._lock('readLock', lockedFn, cb) + if (lockedFn && cb) { + this._lock('readLock').then(release => { + lockedFn((err, res) => { + release() + + cb(err, res) + }) + }, cb) + return + } + + return this._lock('readLock') } writeLock (lockedFn, cb) { - return this._lock('writeLock', lockedFn, cb) + if (lockedFn && cb) { + this._lock('writeLock').then(release => { + lockedFn((err, res) => { + release() + + cb(err, res) + }) + }, cb) + return + } + + return this._lock('writeLock') } /** * Request a read or write lock * * @param {String} type The type of lock: readLock / writeLock - * @param {function(releaseLock)} lockedFn A function that runs the locked piece of code and calls releaseLock when it completes - * @param {function(err, res)} [cb] A function that is called when the locked function completes - * @returns {void} + * @returns {Promise} */ - _lock (type, lockedFn, cb = noop) { - assert(typeof lockedFn === 'function', `first argument to Mutex.${type}() must be a function`) - assert(typeof cb === 'function', `second argument to Mutex.${type}() must be a callback function`) + async _lock (type) { + assert(typeof type === 'string', `first argument to Mutex.${type}() must be a string, got ${typeof type}`) const lockId = this.lockId++ this.log(`[${lockId}] ${type} requested`) - // mortice presents a promise based API, so we need to give it a function - // that returns a Promise. - // The function is invoked when mortice gives permission to run the locked - // piece of code - const locked = () => new Promise((resolve, reject) => { - this.log(`[${lockId}] ${type} started`) - lockedFn((err, res) => { - this.log(`[${lockId}] ${type} released`) - err ? reject(err) : resolve(res) - }) - }) + // Get a Promise for the lock, wrap it for logging + const release = await this.mutex[type]() - // Get a Promise for the lock - const lock = this.mutex[type](locked) + this.log(`[${lockId}] ${type} started`) - // When the locked piece of code is complete, the Promise resolves - return lock.then(res => cb(null, res), cb) + return () => { + this.log(`[${lockId}] ${type} released`) + release() + } } } diff --git a/test/cli/files.js b/test/cli/files.js index 4e58d1667c..5e266a9d4a 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -31,91 +31,91 @@ describe('files', () => runOnAndOff((thing) => { .toString('utf-8') const recursiveGetDirResults = [ - 'added QmR56UJmAaZLXLdTT1ALrE9vVqV8soUEekm9BMd4FnuYqV recursive-get-dir/version', - 'added QmYE7xo6NxbHEVEHej1yzxijYaNY51BaeKxjXxn6Ssa6Bs recursive-get-dir/init-docs/tour/0.0-intro', - 'added QmciSU8hfpAXKjvK5YLUSwApomGSWN5gFbP4EpDAEzu2Te recursive-get-dir/init-docs/tour', - 'added QmTumTjvcYCAvRRwQ8sDRxh8ezmrcr88YFU7iYNroGGTBZ recursive-get-dir/init-docs/security-notes', - 'added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB recursive-get-dir/init-docs/readme', - 'added QmdncfsVm2h5Kqq9hPmU7oAVX2zTSVP3L869tgTbPYnsha recursive-get-dir/init-docs/quick-start', - 'added QmY5heUM5qgRubMDD1og9fhCPA6QdkMp3QCwd4s7gJsyE7 recursive-get-dir/init-docs/help', - 'added QmQN88TEidd3RY2u3dpib49fERTDfKtDpvxnvczATNsfKT recursive-get-dir/init-docs/docs/index', - 'added QmegvLXxpVKiZ4b57Xs1syfBVRd8CbucVHAp7KpLQdGieC recursive-get-dir/init-docs/docs', - 'added QmYCvbfNbCwFR45HiNP45rwJgvatpiW38D961L5qAhUM5Y recursive-get-dir/init-docs/contact', - 'added QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V recursive-get-dir/init-docs/about', - 'added QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU recursive-get-dir/init-docs', - 'added QmeiTxVN4xAjxUzHzBqCpK3GaT3GeiLQeJRpYDXDfLeEmR recursive-get-dir/datastore/MANIFEST-000014', - 'added QmQpc75sJGUv59dAwHF7vazBGV9o6C7z587Dp9nv7HYAps recursive-get-dir/datastore/LOG.old', - 'added QmbFNLNr9at9eK5LrNyUdyE5cdLb5yaT9DkjXw7BK68kcM recursive-get-dir/datastore/LOG', - 'added QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH recursive-get-dir/datastore/LOCK', - 'added QmVJi93Yj5RW3NuqqxonGz3jAXUYHrdQvWrURxg1TiLEuX recursive-get-dir/datastore/CURRENT', - 'added QmcJ6TXPMPm6puSC9vpxuG57PyfGpr8bvTgkKU9SHHU5Uo recursive-get-dir/datastore/000010.ldb', - 'added QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU recursive-get-dir/datastore/000005.ldb', - 'added QmfExFwdFKspsY2q5WnhQjd1QDKnjpTQ4UkiHqqQxV7h67 recursive-get-dir/datastore/000002.ldb', - 'added QmUqyZtPmsRy1U5Mo8kz2BAMmk1hfJ7yW1KAFTMB2odsFv recursive-get-dir/datastore', - 'added QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN recursive-get-dir/config', - 'added QmbJgQa4XNBFvGQcLbWBNtvWZetbCUKiyAQNfePoTzwf9L recursive-get-dir/blocks/CIQPD/CIQPDQJBGYDZNMOAGGYNRNMP2VDKWBWGAEDDEJDACM3SGG3VIANDDXI.data', - 'added QmSCUPYy4CfFt9nA61J9v2DMfJygQAJjaUcRmygDbVME2D recursive-get-dir/blocks/CIQPD', - 'added QmTU72W5EAnNUAtnVW1qoFzdDD8FyiBjpF5MUzjBAFnHS6 recursive-get-dir/blocks/CIQOY/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data', - 'added QmQ1mNtPTJ6JG3TNNq73m2orvsfKCKrqMKoXyXwRKWM1ma recursive-get-dir/blocks/CIQOY', - 'added QmaTXag3TaaG6hFUXGxybEuMUk7UHSutZobZgDtjr6aXjf recursive-get-dir/blocks/CIQON/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data', - 'added QmNi9kKnfKJGuofhBRKMdKj5R6BQAYHWRtu3vXJHRy69TE recursive-get-dir/blocks/CIQON', - 'added QmTH5Jc2uhu5LqGEFAgrn2HwoDHLpvQd9b6fyoUGi6aeQu recursive-get-dir/blocks/CIQOM/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data', - 'added Qmec4atiyfysPR8HU5gPfjKY1NpQDY2kmSeeadx8wLEBqY recursive-get-dir/blocks/CIQOM', - 'added QmeBypQ2yE4t4Loybhby15DjkeLDXJKCcgMfxTXeFnHa8F recursive-get-dir/blocks/CIQOL/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data', - 'added Qmd6s8LXAEjW7y9QbGSzeuewrRBYjJHmcazG3Hk7cJ74da recursive-get-dir/blocks/CIQOL', - 'added QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH recursive-get-dir/blocks/CIQOH/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data', - 'added QmTnaav9VTSVyLu8PvRzh4gJ8heF9rpdWzeDb7rMx5DkxV recursive-get-dir/blocks/CIQOH', - 'added Qmc1nasezDdPyZiXB5VB6Aygzswcr6QkauzzXMeUGouHTN recursive-get-dir/blocks/CIQMB/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data', - 'added QmeqJBkwmzsVR79HBKLW7AYhfAMxMaJs5dGHSgey5ezy7N recursive-get-dir/blocks/CIQMB', - 'added QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx recursive-get-dir/blocks/CIQLB/CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', - 'added QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD recursive-get-dir/blocks/CIQLB/CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', - 'added Qmbqod68qdYiEs7kkTGu7G59adekUbAFAAg7WEyM6iPP5z recursive-get-dir/blocks/CIQLB', - 'added Qmd4FKC6GcKnhJHnEJJwqg9A1eDd7JXKkG5v3iv9XSHtwq recursive-get-dir/blocks/CIQKK/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data', - 'added QmUBsjP45UUHzKymAUqwEFJsuCvfC1AcaLvBgMsoGMipoG recursive-get-dir/blocks/CIQKK', - 'added QmWR1EuH5cui4EW5W16ADxzmYEFPjHDs1LcPe3uQL3CmiS recursive-get-dir/blocks/CIQJG/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data', - 'added QmWrs7zVFkbpsTEbEpe3MyAB8ssUNp8jamE7i4PZ736zWy recursive-get-dir/blocks/CIQJG', - 'added QmWNXfkCTxSAuFBdNQ8gGmyxnZ28XrzRbjjmvCViLoNU5W recursive-get-dir/blocks/CIQJF/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data', - 'added QmWjsRHRK7ENAhMvgwfkze9bkySxjAsNMGXrMGMsFcrDWU recursive-get-dir/blocks/CIQJF', - 'added QmTt3mbtfVGEfqqjec9WZcWaC4SkesErDPAhhr8NRfsWFp recursive-get-dir/blocks/CIQJB/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data', - 'added QmQebEvyaFbez884asHoTrNsbck1JdMLcM8EhUFYCraGHZ recursive-get-dir/blocks/CIQJB', - 'added Qmb7AKKnZwLLFtseMZiHkq4fKNhP5rSggcvN2oGXUBZv8B recursive-get-dir/blocks/CIQJ2/CIQJ23BL4UHXA2KTI6NLTXZM4PW4VEFWQBJ4ACZQAS37BLGL4HUO5XY.data', - 'added QmT1zKYzUpt2kF8AEV2igH9hXwzGu4q8pc4uJ9BLWqjMhT recursive-get-dir/blocks/CIQJ2', - 'added QmazVLpyExfPkwASngrz3MDZD1pdaBUxj7VqAkjAFAWaa9 recursive-get-dir/blocks/CIQIX/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data', - 'added QmNM7hxdJfaApCJe1ubCrhAQSA6AWQXUvKZrHcf5RxsNvn recursive-get-dir/blocks/CIQIX', - 'added QmRfQcw4qrW91Vqj3evXiH11MuvRVJb7S7vKSgau7aBzRa recursive-get-dir/blocks/CIQHP/CIQHPUVCWD6JA6AFUVD6VA64TGWP67KYA3AIMBUMVWGZ5AQN2L2HSWQ.data', - 'added QmRsyLntZoGPWURqtemAmgRdtmuCjbbdZ5xzkCAEUhh4iU recursive-get-dir/blocks/CIQHP', - 'added QmU7mw6KaaAJA6tHi9FdiHu2HtA6rjb6e1aYuWscwTJ9yV recursive-get-dir/blocks/CIQHB/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data', - 'added Qma1ytRhbzt3tGcJopMvd7g3ZE38mRKTTuJuRaHmguq8mN recursive-get-dir/blocks/CIQHB', - 'added QmVLdEzvgvM5k7NUWWSgURAZuJmiQBnbuZga3EpRip8xTu recursive-get-dir/blocks/CIQHA/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data', - 'added QmddXWuKjfCbF6HXR9jStKDoLEAZ7xc8SZgDanQLMiGjpn recursive-get-dir/blocks/CIQHA', - 'added QmZe7irS2FotZtsUx9wpy5QPKJF6YEaAEZLHLUwQy6XgY8 recursive-get-dir/blocks/CIQH7/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data', - 'added Qmb5NqTFar7MnxyRwwQtfb81nyS6g5NRG1bdo6AefmvhXU recursive-get-dir/blocks/CIQH7', - 'added QmWGima5TqLfUTzUsCF6h3oXGvwu3QQ1zjZYLDMaGeFRbB recursive-get-dir/blocks/CIQGP/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data', - 'added QmZMHzPS1qema8HvLk4jRuSLrUjRHZ8Siu6Wc4njAmx8MG recursive-get-dir/blocks/CIQGP', - 'added QmabxyrxY1uUzHcd7mTBCfibFwemGC89vuJFUw4UkebmSn recursive-get-dir/blocks/CIQGF/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data', - 'added QmYLZ3uqYLkViS7Bh3vxcT5yrPscyWMV11iqFVJnqA7JVT recursive-get-dir/blocks/CIQGF', - 'added QmSMYdQtDTqykd7oLKZq3vJtS7KoWZwjL7GA9zj6UsCngE recursive-get-dir/blocks/CIQFT/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data', - 'added QmWjMLA3ppmngQaHs8YEQ3Bru4tKoDeJh2cKv7U7dtLUuf recursive-get-dir/blocks/CIQFT', - 'added QmVdfEEiQmem5GanTjja7HKHNFpfa2LB8196fD9m9b656Q recursive-get-dir/blocks/CIQFF/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data', - 'added QmUcLzGWDuBPA6iVF65n676KiCbQNXV4owecfSR4QFVy3U recursive-get-dir/blocks/CIQFF', - 'added QmNtkNt8oZASY7AYVpswA3RQ43hASjP1NGj8GB1L6vgHUx recursive-get-dir/blocks/CIQFE/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data', - 'added QmXrrAYhbThjuHRPA23HujCLFbTrnwd3jmvNbZBAnKEddk recursive-get-dir/blocks/CIQFE', - 'added QmcozcFvmaTqVPaFXgZUHPsroSG8YP6tHEYyFaFhnonwSG recursive-get-dir/blocks/CIQEU/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data', - 'added QmVU52FEpQQF3ViFGUKLhLeJaRKpCZfqN4AWLuzAXyrzyU recursive-get-dir/blocks/CIQEU', - 'added QmPiJAUg2J3dWWnQvtKXbkr8g1qCxX4RCPkU3wFmxd6x8H recursive-get-dir/blocks/CIQER/CIQERMRAAFXUAUOX3V2DCW7R77FRIVHQ3V5OIPPS3XQBX34KRPNOIRQ.data', - 'added QmboWqKvhjxdBw1AfxQ56sqhqrrtG7ibaGhHb19TPnjr69 recursive-get-dir/blocks/CIQER', - 'added QmPbgB6GzeUEnvXqQgYLTJnrdcm95kGRWH36euTr2eAB2w recursive-get-dir/blocks/CIQEN/CIQENVCICS44LLYUDQ5KVN6ALXC6QRHK2X4R6EUFRMBB5OSFO2FUYDQ.data', - 'added QmZCxJdNTR1MHRNGGWgZRZdW66FTpyTLdT8odbUz1CP7J9 recursive-get-dir/blocks/CIQEN', - 'added QmQCYnQWAHqSy1ts7VmHbp18BFEmbVvfX7FASVQF21uo5g recursive-get-dir/blocks/CIQDV/CIQDVKITASFS55MC2TXCX5XMZLMGTYVODWPEDIW7JYEG7YXBIA7IUWY.data', - 'added QmQaTiy1CufRfP3zTCW8fAtNWjvdeWuMkvTi4q6dykNDif recursive-get-dir/blocks/CIQDV', - 'added QmSynZ3cTjBzpMTSPCP5Q6RJSa9WEAA8p178cZRLnKdahz recursive-get-dir/blocks/CIQDM/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data', - 'added QmNS3zMGDTPRTuR8nbPz4ddQpGN4gtuVyZ5G3mn3ajg4Rb recursive-get-dir/blocks/CIQDM', - 'added QmTpxXKswGwhTYLn1qL4EG9aLGFXS2LSnreceV2FJeArVh recursive-get-dir/blocks/CIQDD/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data', - 'added Qmbm7ToWsTta4Y1RipmRudCenKF7qAHRVTCtTPuoVqfY8H recursive-get-dir/blocks/CIQDD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data', - 'added QmSCq2peGvGDXZKuX565UczxRpgzsiPPF3PgcJq9zDbByL recursive-get-dir/blocks/CIQDD', 'added QmdgaiKe1HFfhrZvLwTFCrXmgTojhSWuBvyFXUVc8KzJVc recursive-get-dir/blocks/CIQBE/CIQBED3K6YA5I3QQWLJOCHWXDRK5EXZQILBCKAPEDUJENZ5B5HJ5R3A.data', + 'added Qmbm7ToWsTta4Y1RipmRudCenKF7qAHRVTCtTPuoVqfY8H recursive-get-dir/blocks/CIQDD/CIQDDVW2EZIJF4NQH7WJNESD7XHQSXA5EGJVNTPVHD7444C2KLKXHDI.data', + 'added QmTpxXKswGwhTYLn1qL4EG9aLGFXS2LSnreceV2FJeArVh recursive-get-dir/blocks/CIQDD/CIQDDZ5EDQK5AP7LRTLZHQZUR2R3GECRFV3WPKNL7PL2SKFIL2LXC4Y.data', + 'added QmSynZ3cTjBzpMTSPCP5Q6RJSa9WEAA8p178cZRLnKdahz recursive-get-dir/blocks/CIQDM/CIQDMKFEUGKSLXMEXO774EZOYCYNHPRVFD53ZSAU7237F67XDSQGCYQ.data', + 'added QmQCYnQWAHqSy1ts7VmHbp18BFEmbVvfX7FASVQF21uo5g recursive-get-dir/blocks/CIQDV/CIQDVKITASFS55MC2TXCX5XMZLMGTYVODWPEDIW7JYEG7YXBIA7IUWY.data', + 'added QmPbgB6GzeUEnvXqQgYLTJnrdcm95kGRWH36euTr2eAB2w recursive-get-dir/blocks/CIQEN/CIQENVCICS44LLYUDQ5KVN6ALXC6QRHK2X4R6EUFRMBB5OSFO2FUYDQ.data', + 'added QmPiJAUg2J3dWWnQvtKXbkr8g1qCxX4RCPkU3wFmxd6x8H recursive-get-dir/blocks/CIQER/CIQERMRAAFXUAUOX3V2DCW7R77FRIVHQ3V5OIPPS3XQBX34KRPNOIRQ.data', + 'added QmcozcFvmaTqVPaFXgZUHPsroSG8YP6tHEYyFaFhnonwSG recursive-get-dir/blocks/CIQEU/CIQEUWUVLBXVFYSYCHHSCRTXCYHGIOBXKWUMKFR3UPAFHQ5WK5362FQ.data', + 'added QmNtkNt8oZASY7AYVpswA3RQ43hASjP1NGj8GB1L6vgHUx recursive-get-dir/blocks/CIQFE/CIQFEAGMNNXXTYKYQSANT6IBNTFN7WR5RPD5F6GN6MBKUUO25DNOTWQ.data', + 'added QmVdfEEiQmem5GanTjja7HKHNFpfa2LB8196fD9m9b656Q recursive-get-dir/blocks/CIQFF/CIQFFRR4O52TS2Z7QLDDTF32OIR4FWLKT5YLL7MLDVIT7DC3NHOK5VA.data', + 'added QmSMYdQtDTqykd7oLKZq3vJtS7KoWZwjL7GA9zj6UsCngE recursive-get-dir/blocks/CIQFT/CIQFTFEEHEDF6KLBT32BFAGLXEZL4UWFNWM4LFTLMXQBCERZ6CMLX3Y.data', + 'added QmabxyrxY1uUzHcd7mTBCfibFwemGC89vuJFUw4UkebmSn recursive-get-dir/blocks/CIQGF/CIQGFTQ7FSI2COUXWWLOQ45VUM2GUZCGAXLWCTOKKPGTUWPXHBNIVOY.data', + 'added QmWGima5TqLfUTzUsCF6h3oXGvwu3QQ1zjZYLDMaGeFRbB recursive-get-dir/blocks/CIQGP/CIQGPALRQ24P6NS4OWHTQ7R247ZI7KJWP3QWPQYS43LFULQC5ANLQFI.data', + 'added QmZe7irS2FotZtsUx9wpy5QPKJF6YEaAEZLHLUwQy6XgY8 recursive-get-dir/blocks/CIQH7/CIQH7OEYWXL34RWYL7VXLWEU4FWPVGT24VJT7DUZPTNLF25N25IGGQA.data', + 'added QmVLdEzvgvM5k7NUWWSgURAZuJmiQBnbuZga3EpRip8xTu recursive-get-dir/blocks/CIQHA/CIQHAKDLTL5GMIFGN5YVY4BA22FPHUIODJEXS4LCTQDWA275XAJDAPI.data', + 'added QmU7mw6KaaAJA6tHi9FdiHu2HtA6rjb6e1aYuWscwTJ9yV recursive-get-dir/blocks/CIQHB/CIQHBGZNZRPWVEFNMTLP4OS5EAVHFMCX2HD7FZUC2B3WUU3D4LGKS5A.data', + 'added QmRfQcw4qrW91Vqj3evXiH11MuvRVJb7S7vKSgau7aBzRa recursive-get-dir/blocks/CIQHP/CIQHPUVCWD6JA6AFUVD6VA64TGWP67KYA3AIMBUMVWGZ5AQN2L2HSWQ.data', + 'added QmazVLpyExfPkwASngrz3MDZD1pdaBUxj7VqAkjAFAWaa9 recursive-get-dir/blocks/CIQIX/CIQIXBZMUTXFC5QIGMLJNXLLHZOPGSL2PBC65D4UIVWM6TI5F5TAFNI.data', + 'added Qmb7AKKnZwLLFtseMZiHkq4fKNhP5rSggcvN2oGXUBZv8B recursive-get-dir/blocks/CIQJ2/CIQJ23BL4UHXA2KTI6NLTXZM4PW4VEFWQBJ4ACZQAS37BLGL4HUO5XY.data', + 'added QmTt3mbtfVGEfqqjec9WZcWaC4SkesErDPAhhr8NRfsWFp recursive-get-dir/blocks/CIQJB/CIQJBQD2O6K4CGJVCCTJNUP57QHR4SKHZ74OIITBBGLOMCO3ZOLWLGA.data', + 'added QmWNXfkCTxSAuFBdNQ8gGmyxnZ28XrzRbjjmvCViLoNU5W recursive-get-dir/blocks/CIQJF/CIQJFGRQHQ45VCQLM7AJNF2GF5UHUAGGHC6LLAH6VYDEKLQMD4QLILY.data', + 'added QmWR1EuH5cui4EW5W16ADxzmYEFPjHDs1LcPe3uQL3CmiS recursive-get-dir/blocks/CIQJG/CIQJGO2B2N75IUEM372FSMG76VV256I4PXBULZZ5ASNLK4FL4EG7XOI.data', + 'added Qmd4FKC6GcKnhJHnEJJwqg9A1eDd7JXKkG5v3iv9XSHtwq recursive-get-dir/blocks/CIQKK/CIQKKLBWAIBQZOIS5X7E32LQAL6236OUKZTMHPQSFIXPWXNZHQOV7JQ.data', + 'added QmQ8ag7ysVyCMzJGFjxrUStwWtniQ69c7G9aezbmsKeNYD recursive-get-dir/blocks/CIQLB/CIQLBK52T5EHVHZY5URTG5JS3JCUJDQM2DRB5RVF33DCUUOFJNGVDUI.data', + 'added QmaSjzSSRanYzRGPXQY6m5SWfSkkfcnzNkurJEQc4chPJx recursive-get-dir/blocks/CIQLB/CIQLBS5HG4PRCRQ7O4EBXFD5QN6MTI5YBYMCVQJDXPKCOVR6RMLHZFQ.data', + 'added Qmc1nasezDdPyZiXB5VB6Aygzswcr6QkauzzXMeUGouHTN recursive-get-dir/blocks/CIQMB/CIQMB7DLJFKD267QJ2B5FJNHZPTSVA7IB6OHXSQ2XSVEEKMKK6RT75I.data', + 'added QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH recursive-get-dir/blocks/CIQOH/CIQOHMGEIKMPYHAUTL57JSEZN64SIJ5OIHSGJG4TJSSJLGI3PBJLQVI.data', + 'added QmeBypQ2yE4t4Loybhby15DjkeLDXJKCcgMfxTXeFnHa8F recursive-get-dir/blocks/CIQOL/CIQOLBQZSZAODJGGH6RYYVBUXHTS3SM5EORZDU63LYPEFUAFE4SBM4I.data', + 'added QmTH5Jc2uhu5LqGEFAgrn2HwoDHLpvQd9b6fyoUGi6aeQu recursive-get-dir/blocks/CIQOM/CIQOMBKARLB7PAITVSNH7VEGIQJRPL6J7FT2XYVKAXT4MQPXXPUYUNY.data', + 'added QmaTXag3TaaG6hFUXGxybEuMUk7UHSutZobZgDtjr6aXjf recursive-get-dir/blocks/CIQON/CIQONICFQZH7QVU6IPSIM3AK7AD554D3BWZPAGEAQYQOWMFZQDUUAEI.data', + 'added QmTU72W5EAnNUAtnVW1qoFzdDD8FyiBjpF5MUzjBAFnHS6 recursive-get-dir/blocks/CIQOY/CIQOYW2THIZBRGI7IN33ROGCKOFZLXJJ2MPKYZBTV4H3N7GYHXMAO6A.data', + 'added QmbJgQa4XNBFvGQcLbWBNtvWZetbCUKiyAQNfePoTzwf9L recursive-get-dir/blocks/CIQPD/CIQPDQJBGYDZNMOAGGYNRNMP2VDKWBWGAEDDEJDACM3SGG3VIANDDXI.data', + 'added QmPkWYfSLCEBLZu7BZt4kigGDMe3cpogMbeVf97gN2xJDN recursive-get-dir/config', + 'added QmfExFwdFKspsY2q5WnhQjd1QDKnjpTQ4UkiHqqQxV7h67 recursive-get-dir/datastore/000002.ldb', + 'added QmPFVLPmp9zv5Z5KUqLhe2EivAGccQW2r7M7jhVJGLZoZU recursive-get-dir/datastore/000005.ldb', + 'added QmcJ6TXPMPm6puSC9vpxuG57PyfGpr8bvTgkKU9SHHU5Uo recursive-get-dir/datastore/000010.ldb', + 'added QmVJi93Yj5RW3NuqqxonGz3jAXUYHrdQvWrURxg1TiLEuX recursive-get-dir/datastore/CURRENT', + 'added QmbFMke1KXqnYyBBWxB74N4c5SBnJMVAiMNRcGu6x1AwQH recursive-get-dir/datastore/LOCK', + 'added QmbFNLNr9at9eK5LrNyUdyE5cdLb5yaT9DkjXw7BK68kcM recursive-get-dir/datastore/LOG', + 'added QmQpc75sJGUv59dAwHF7vazBGV9o6C7z587Dp9nv7HYAps recursive-get-dir/datastore/LOG.old', + 'added QmeiTxVN4xAjxUzHzBqCpK3GaT3GeiLQeJRpYDXDfLeEmR recursive-get-dir/datastore/MANIFEST-000014', + 'added QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V recursive-get-dir/init-docs/about', + 'added QmYCvbfNbCwFR45HiNP45rwJgvatpiW38D961L5qAhUM5Y recursive-get-dir/init-docs/contact', + 'added QmQN88TEidd3RY2u3dpib49fERTDfKtDpvxnvczATNsfKT recursive-get-dir/init-docs/docs/index', + 'added QmY5heUM5qgRubMDD1og9fhCPA6QdkMp3QCwd4s7gJsyE7 recursive-get-dir/init-docs/help', + 'added QmdncfsVm2h5Kqq9hPmU7oAVX2zTSVP3L869tgTbPYnsha recursive-get-dir/init-docs/quick-start', + 'added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB recursive-get-dir/init-docs/readme', + 'added QmTumTjvcYCAvRRwQ8sDRxh8ezmrcr88YFU7iYNroGGTBZ recursive-get-dir/init-docs/security-notes', + 'added QmYE7xo6NxbHEVEHej1yzxijYaNY51BaeKxjXxn6Ssa6Bs recursive-get-dir/init-docs/tour/0.0-intro', + 'added QmR56UJmAaZLXLdTT1ALrE9vVqV8soUEekm9BMd4FnuYqV recursive-get-dir/version', 'added QmYwUkwNwJN2cevwXKL48DRpbbjbdLWyyLANG3BKTtsTZ8 recursive-get-dir/blocks/CIQBE', + 'added QmSCq2peGvGDXZKuX565UczxRpgzsiPPF3PgcJq9zDbByL recursive-get-dir/blocks/CIQDD', + 'added QmNS3zMGDTPRTuR8nbPz4ddQpGN4gtuVyZ5G3mn3ajg4Rb recursive-get-dir/blocks/CIQDM', + 'added QmQaTiy1CufRfP3zTCW8fAtNWjvdeWuMkvTi4q6dykNDif recursive-get-dir/blocks/CIQDV', + 'added QmZCxJdNTR1MHRNGGWgZRZdW66FTpyTLdT8odbUz1CP7J9 recursive-get-dir/blocks/CIQEN', + 'added QmboWqKvhjxdBw1AfxQ56sqhqrrtG7ibaGhHb19TPnjr69 recursive-get-dir/blocks/CIQER', + 'added QmVU52FEpQQF3ViFGUKLhLeJaRKpCZfqN4AWLuzAXyrzyU recursive-get-dir/blocks/CIQEU', + 'added QmXrrAYhbThjuHRPA23HujCLFbTrnwd3jmvNbZBAnKEddk recursive-get-dir/blocks/CIQFE', + 'added QmUcLzGWDuBPA6iVF65n676KiCbQNXV4owecfSR4QFVy3U recursive-get-dir/blocks/CIQFF', + 'added QmWjMLA3ppmngQaHs8YEQ3Bru4tKoDeJh2cKv7U7dtLUuf recursive-get-dir/blocks/CIQFT', + 'added QmYLZ3uqYLkViS7Bh3vxcT5yrPscyWMV11iqFVJnqA7JVT recursive-get-dir/blocks/CIQGF', + 'added QmZMHzPS1qema8HvLk4jRuSLrUjRHZ8Siu6Wc4njAmx8MG recursive-get-dir/blocks/CIQGP', + 'added Qmb5NqTFar7MnxyRwwQtfb81nyS6g5NRG1bdo6AefmvhXU recursive-get-dir/blocks/CIQH7', + 'added QmddXWuKjfCbF6HXR9jStKDoLEAZ7xc8SZgDanQLMiGjpn recursive-get-dir/blocks/CIQHA', + 'added Qma1ytRhbzt3tGcJopMvd7g3ZE38mRKTTuJuRaHmguq8mN recursive-get-dir/blocks/CIQHB', + 'added QmRsyLntZoGPWURqtemAmgRdtmuCjbbdZ5xzkCAEUhh4iU recursive-get-dir/blocks/CIQHP', + 'added QmNM7hxdJfaApCJe1ubCrhAQSA6AWQXUvKZrHcf5RxsNvn recursive-get-dir/blocks/CIQIX', + 'added QmT1zKYzUpt2kF8AEV2igH9hXwzGu4q8pc4uJ9BLWqjMhT recursive-get-dir/blocks/CIQJ2', + 'added QmQebEvyaFbez884asHoTrNsbck1JdMLcM8EhUFYCraGHZ recursive-get-dir/blocks/CIQJB', + 'added QmWjsRHRK7ENAhMvgwfkze9bkySxjAsNMGXrMGMsFcrDWU recursive-get-dir/blocks/CIQJF', + 'added QmWrs7zVFkbpsTEbEpe3MyAB8ssUNp8jamE7i4PZ736zWy recursive-get-dir/blocks/CIQJG', + 'added QmUBsjP45UUHzKymAUqwEFJsuCvfC1AcaLvBgMsoGMipoG recursive-get-dir/blocks/CIQKK', + 'added Qmbqod68qdYiEs7kkTGu7G59adekUbAFAAg7WEyM6iPP5z recursive-get-dir/blocks/CIQLB', + 'added QmeqJBkwmzsVR79HBKLW7AYhfAMxMaJs5dGHSgey5ezy7N recursive-get-dir/blocks/CIQMB', + 'added QmTnaav9VTSVyLu8PvRzh4gJ8heF9rpdWzeDb7rMx5DkxV recursive-get-dir/blocks/CIQOH', + 'added Qmd6s8LXAEjW7y9QbGSzeuewrRBYjJHmcazG3Hk7cJ74da recursive-get-dir/blocks/CIQOL', + 'added Qmec4atiyfysPR8HU5gPfjKY1NpQDY2kmSeeadx8wLEBqY recursive-get-dir/blocks/CIQOM', + 'added QmNi9kKnfKJGuofhBRKMdKj5R6BQAYHWRtu3vXJHRy69TE recursive-get-dir/blocks/CIQON', + 'added QmQ1mNtPTJ6JG3TNNq73m2orvsfKCKrqMKoXyXwRKWM1ma recursive-get-dir/blocks/CIQOY', + 'added QmSCUPYy4CfFt9nA61J9v2DMfJygQAJjaUcRmygDbVME2D recursive-get-dir/blocks/CIQPD', 'added QmamKEPmEH9RUsqRQsfNf5evZQDQPYL9KXg1ADeT7mkHkT recursive-get-dir/blocks', + 'added QmUqyZtPmsRy1U5Mo8kz2BAMmk1hfJ7yW1KAFTMB2odsFv recursive-get-dir/datastore', + 'added QmegvLXxpVKiZ4b57Xs1syfBVRd8CbucVHAp7KpLQdGieC recursive-get-dir/init-docs/docs', + 'added QmciSU8hfpAXKjvK5YLUSwApomGSWN5gFbP4EpDAEzu2Te recursive-get-dir/init-docs/tour', + 'added QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU recursive-get-dir/init-docs', 'added Qmaj2NmcyAXT8dFmZRRytE12wpcaHADzbChKToMEjBsj5Z recursive-get-dir' ] diff --git a/test/core/files.spec.js b/test/core/files.spec.js index e46c7955d7..3e3af57e53 100644 --- a/test/core/files.spec.js +++ b/test/core/files.spec.js @@ -72,36 +72,29 @@ describe('files', function () { }) describe('add', () => { - it('should not error when passed null options', (done) => { - ipfs.add(Buffer.from(hat()), null, (err) => { - expect(err).to.not.exist() - done() - }) + it('should not error when passed null options', async () => { + await ipfs.add(Buffer.from(hat()), null) }) - it('should add a file with a v1 CID', (done) => { - ipfs.add(Buffer.from([0, 1, 2]), { + it('should add a file with a v1 CID', async () => { + const files = await ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1 - }, (err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].hash).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') - expect(files[0].size).to.equal(3) - done() }) + + expect(files.length).to.equal(1) + expect(files[0].hash).to.equal('bafkreifojmzibzlof6xyh5auu3r5vpu5l67brf3fitaf73isdlglqw2t7q') + expect(files[0].size).to.equal(3) }) - it('should add a file with a v1 CID and not raw leaves', (done) => { - ipfs.add(Buffer.from([0, 1, 2]), { + it('should add a file with a v1 CID and not raw leaves', async () => { + const files = await ipfs.add(Buffer.from([0, 1, 2]), { cidVersion: 1, rawLeaves: false - }, (err, files) => { - expect(err).to.not.exist() - expect(files.length).to.equal(1) - expect(files[0].hash).to.equal('bafybeide2caf5we5a7izifzwzz5ds2gla67vsfgrzvbzpnyyirnfzgwf5e') - expect(files[0].size).to.equal(11) - done() }) + + expect(files.length).to.equal(1) + expect(files[0].hash).to.equal('bafybeide2caf5we5a7izifzwzz5ds2gla67vsfgrzvbzpnyyirnfzgwf5e') + expect(files[0].size).to.equal(11) }) }) })