diff --git a/package.json b/package.json index 16dc4850b..e7b9aa7a3 100644 --- a/package.json +++ b/package.json @@ -17,7 +17,9 @@ "browser": { "glob": false, "fs": false, - "stream": "readable-stream" + "stream": "readable-stream", + "./src/add/form-data.js": "./src/add/form-data.browser.js", + "./src/add-from-fs/index.js": "./src/add-from-fs/index.browser.js" }, "repository": "github:ipfs/js-ipfs-http-client", "scripts": { @@ -35,6 +37,7 @@ "dependencies": { "abort-controller": "^3.0.0", "async": "^2.6.1", + "async-iterator-to-pull-stream": "^1.3.0", "bignumber.js": "^9.0.0", "bl": "^3.0.0", "bs58": "^4.0.1", @@ -58,6 +61,8 @@ "is-stream": "^2.0.0", "iso-stream-http": "~0.1.2", "iso-url": "~0.4.6", + "it-pushable": "^1.2.1", + "it-to-stream": "^0.1.1", "iterable-ndjson": "^1.1.0", "just-kebab-case": "^1.1.0", "just-map-keys": "^1.1.0", @@ -77,6 +82,7 @@ "promisify-es6": "^1.0.3", "pull-defer": "~0.2.3", "pull-stream": "^3.6.9", + "pull-stream-to-async-iterator": "^1.0.2", "pull-to-stream": "~0.1.1", "pump": "^3.0.0", "qs": "^6.5.2", diff --git a/src/add-from-fs/glob-source.js b/src/add-from-fs/glob-source.js new file mode 100644 index 000000000..b48a31cdc --- /dev/null +++ b/src/add-from-fs/glob-source.js @@ -0,0 +1,94 @@ +'use strict' + +const Fs = require('fs') +const Path = require('path') +const glob = require('glob') +const pushable = require('it-pushable') +const errCode = require('err-code') + +/** +* Create an AsyncIterable that can be passed to ipfs.add for the +* provided file paths. +* +* @param {String} ...paths File system path(s) to glob from +* @param {Object} [options] Optional options +* @param {Boolean} [options.recursive] Recursively glob all paths in directories +* @param {Boolean} [options.hidden] Include .dot files in matched paths +* @param {Array} [options.ignore] Glob paths to ignore +* @param {Boolean} [options.followSymlinks] follow symlinks +* @returns {AsyncIterable} +*/ +module.exports = (...args) => (async function * () { + const options = typeof args[args.length - 1] === 'string' ? {} : args.pop() + const paths = args + + const globSourceOptions = { + recursive: options.recursive, + glob: { + dot: Boolean(options.hidden), + ignore: Array.isArray(options.ignore) ? options.ignore : [], + follow: options.followSymlinks != null ? options.followSymlinks : true + } + } + + // Check the input paths comply with options.recursive and convert to glob sources + const results = await Promise.all(paths.map(pathAndType)) + const globSources = results.map(r => toGlobSource(r, globSourceOptions)) + + for (const globSource of globSources) { + for await (const { path, contentPath } of globSource) { + yield { path, content: Fs.createReadStream(contentPath) } + } + } +})() + +function toGlobSource ({ path, type }, options) { + return (async function * () { + options = options || {} + + const baseName = Path.basename(path) + + if (type === 'file') { + yield { path: baseName, contentPath: path } + return + } + + if (type === 'dir' && !options.recursive) { + throw errCode( + new Error(`'${path}' is a directory and recursive option not set`), + 'ERR_DIR_NON_RECURSIVE', + { path } + ) + } + + const globOptions = Object.assign({}, options.glob, { + cwd: path, + nodir: true, + realpath: false, + absolute: false + }) + + // TODO: want to use pull-glob but it doesn't have the features... + const pusher = pushable() + + glob('**/*', globOptions) + .on('match', m => pusher.push(m)) + .on('end', () => pusher.end()) + .on('abort', () => pusher.end()) + .on('error', err => pusher.end(err)) + + for await (const p of pusher) { + yield { + path: `${baseName}/${toPosix(p)}`, + contentPath: Path.join(path, p) + } + } + })() +} + +async function pathAndType (path) { + const stat = await Fs.promises.stat(path) + return { path, type: stat.isDirectory() ? 'dir' : 'file' } +} + +const toPosix = path => path.replace(/\\/g, '/') diff --git a/src/add-from-fs/index.browser.js b/src/add-from-fs/index.browser.js new file mode 100644 index 000000000..81d551294 --- /dev/null +++ b/src/add-from-fs/index.browser.js @@ -0,0 +1,3 @@ +'use strict' + +module.exports = () => () => { throw new Error('unavailable in the browser') } diff --git a/src/add-from-fs/index.js b/src/add-from-fs/index.js new file mode 100644 index 000000000..225acf9c3 --- /dev/null +++ b/src/add-from-fs/index.js @@ -0,0 +1,9 @@ +'use strict' + +const configure = require('../lib/configure') +const globSource = require('./glob-source') + +module.exports = configure(({ ky }) => { + const add = require('../add')({ ky }) + return (path, options) => add(globSource(path, options), options) +}) diff --git a/src/add-from-url.js b/src/add-from-url.js new file mode 100644 index 000000000..e98169eed --- /dev/null +++ b/src/add-from-url.js @@ -0,0 +1,24 @@ +'use strict' + +const kyDefault = require('ky-universal').default +const configure = require('./lib/configure') +const toIterable = require('./lib/stream-to-iterable') + +module.exports = configure(({ ky }) => { + const add = require('./add')({ ky }) + + return (url, options) => (async function * () { + options = options || {} + + const { body } = await kyDefault.get(url) + + const input = { + path: decodeURIComponent(new URL(url).pathname.split('/').pop() || ''), + content: toIterable(body) + } + + for await (const file of add(input, options)) { + yield file + } + })() +}) diff --git a/src/add/form-data.browser.js b/src/add/form-data.browser.js new file mode 100644 index 000000000..46f0254d5 --- /dev/null +++ b/src/add/form-data.browser.js @@ -0,0 +1,30 @@ +'use strict' +/* eslint-env browser */ + +const normaliseInput = require('./normalise-input') + +exports.toFormData = async (input) => { + const files = normaliseInput(input) + const formData = new FormData() + let i = 0 + + for await (const file of files) { + if (file.content) { + // In the browser there's _currently_ no streaming upload, buffer up our + // async iterator chunks and append a big Blob :( + // One day, this will be browser streams + const bufs = [] + for await (const chunk of file.content) { + bufs.push(Buffer.isBuffer(chunk) ? chunk.buffer : chunk) + } + + formData.append(`file-${i}`, new Blob(bufs, { type: 'application/octet-stream' }), file.path) + } else { + formData.append(`dir-${i}`, new Blob([], { type: 'application/x-directory' }), file.path) + } + + i++ + } + + return formData +} diff --git a/src/add/form-data.js b/src/add/form-data.js new file mode 100644 index 000000000..be268320f --- /dev/null +++ b/src/add/form-data.js @@ -0,0 +1,42 @@ +'use strict' + +const FormData = require('form-data') +const { Buffer } = require('buffer') +const toStream = require('it-to-stream') +const normaliseInput = require('./normalise-input') + +exports.toFormData = async (input) => { + const files = normaliseInput(input) + const formData = new FormData() + let i = 0 + + for await (const file of files) { + if (file.content) { + // In Node.js, FormData can be passed a stream so no need to buffer + formData.append( + `file-${i}`, + // FIXME: add a `path` property to the stream so `form-data` doesn't set + // a Content-Length header that is only the sum of the size of the + // header/footer when knownLength option (below) is null. + Object.assign( + toStream.readable(file.content), + { path: file.path || `file-${i}` } + ), + { + filepath: encodeURIComponent(file.path), + contentType: 'application/octet-stream', + knownLength: file.content.length // Send Content-Length header if known + } + ) + } else { + formData.append(`dir-${i}`, Buffer.alloc(0), { + filepath: encodeURIComponent(file.path), + contentType: 'application/x-directory' + }) + } + + i++ + } + + return formData +} diff --git a/src/add/index.js b/src/add/index.js new file mode 100644 index 000000000..34a5d33bd --- /dev/null +++ b/src/add/index.js @@ -0,0 +1,54 @@ +'use strict' + +const ndjson = require('iterable-ndjson') +const configure = require('../lib/configure') +const toIterable = require('../lib/stream-to-iterable') +const { toFormData } = require('./form-data') +const toCamel = require('../lib/object-to-camel') + +module.exports = configure(({ ky }) => { + return (input, options) => (async function * () { + options = options || {} + + const searchParams = new URLSearchParams(options.searchParams) + + searchParams.set('stream-channels', true) + if (options.chunker) searchParams.set('chunker', options.chunker) + if (options.cidVersion) searchParams.set('cid-version', options.cidVersion) + if (options.cidBase) searchParams.set('cid-base', options.cidBase) + if (options.enableShardingExperiment != null) searchParams.set('enable-sharding-experiment', options.enableShardingExperiment) + if (options.hashAlg) searchParams.set('hash', options.hashAlg) + if (options.onlyHash != null) searchParams.set('only-hash', options.onlyHash) + if (options.pin != null) searchParams.set('pin', options.pin) + if (options.progress) searchParams.set('progress', true) + if (options.quiet != null) searchParams.set('quiet', options.quiet) + if (options.quieter != null) searchParams.set('quieter', options.quieter) + if (options.rawLeaves != null) searchParams.set('raw-leaves', options.rawLeaves) + if (options.shardSplitThreshold) searchParams.set('shard-split-threshold', options.shardSplitThreshold) + if (options.silent) searchParams.set('silent', options.silent) + if (options.trickle != null) searchParams.set('trickle', options.trickle) + if (options.wrapWithDirectory != null) searchParams.set('wrap-with-directory', options.wrapWithDirectory) + + const res = await ky.post('add', { + timeout: options.timeout, + signal: options.signal, + headers: options.headers, + searchParams, + body: await toFormData(input) + }) + + for await (let file of ndjson(toIterable(res.body))) { + file = toCamel(file) + // console.log(file) + if (options.progress && file.bytes) { + options.progress(file.bytes) + } else { + yield toCoreInterface(file) + } + } + })() +}) + +function toCoreInterface ({ name, hash, size }) { + return { path: name, hash, size: parseInt(size) } +} diff --git a/src/add/normalise-input.js b/src/add/normalise-input.js new file mode 100644 index 000000000..e38ba3a31 --- /dev/null +++ b/src/add/normalise-input.js @@ -0,0 +1,135 @@ +'use strict' + +const errCode = require('err-code') +const toAsyncIterable = require('../lib/file-data-to-async-iterable') +const isBytes = require('../lib/is-bytes') +const isBloby = require('../lib/is-bloby') + +/* +Transform one of: + +Buffer|ArrayBuffer|TypedArray +Blob|File +{ path, content: Buffer } +{ path, content: Blob } +{ path, content: Iterable } +{ path, content: AsyncIterable } +{ path, content: PullStream } +Iterable +Iterable +Iterable +Iterable<{ path, content: Buffer }> +Iterable<{ path, content: Blob }> +Iterable<{ path, content: Iterable }> +Iterable<{ path, content: AsyncIterable }> +Iterable<{ path, content: PullStream }> +AsyncIterable +AsyncIterable<{ path, content: Buffer }> +AsyncIterable<{ path, content: Blob }> +AsyncIterable<{ path, content: Iterable }> +AsyncIterable<{ path, content: AsyncIterable }> +AsyncIterable<{ path, content: PullStream }> +PullStream + +Into: + +AsyncIterable<{ path, content: AsyncIterable }> +*/ + +module.exports = function normalizeInput (input) { + // Buffer|ArrayBuffer|TypedArray + // Blob|File + if (isBytes(input) || isBloby(input)) { + return (async function * () { // eslint-disable-line require-await + yield normalizeTuple({ path: '', content: input }) + })() + } + + // Iterable + // Iterable + // Iterable + // Iterable<{ path, content: Buffer }> + // Iterable<{ path, content: Blob }> + // Iterable<{ path, content: Iterable }> + // Iterable<{ path, content: AsyncIterable }> + // Iterable<{ path, content: PullStream }> + if (input[Symbol.iterator]) { + return (async function * () { // eslint-disable-line require-await + for (const chunk of input) { + if (isBytes(chunk) || isBloby(chunk)) { + yield normalizeTuple({ path: '', content: chunk }) + } else if (isFileObject(chunk)) { + yield normalizeTuple(chunk) + } else if (Number.isInteger(chunk)) { // Must be an Iterable i.e. Buffer/ArrayBuffer/Array of bytes + yield normalizeTuple({ path: '', content: input }) + return + } else { + throw errCode(new Error('Unexpected input: ' + typeof chunk), 'ERR_UNEXPECTED_INPUT') + } + } + })() + } + + // AsyncIterable + // AsyncIterable<{ path, content: Buffer }> + // AsyncIterable<{ path, content: Blob }> + // AsyncIterable<{ path, content: Iterable }> + // AsyncIterable<{ path, content: AsyncIterable }> + // AsyncIterable<{ path, content: PullStream }> + if (input[Symbol.asyncIterator]) { + return (async function * () { + for await (const chunk of input) { + if (isFileObject(chunk)) { + yield normalizeTuple(chunk) + } else { // Must be an AsyncIterable i.e. a Stream + let path = '' + + // fs.createReadStream will create a stream with a `path` prop + // If available, use it here! + if (input.path && input.path.split) { + path = input.path.split(/[/\\]/).pop() || '' + } + + yield normalizeTuple({ + path, + content: (async function * () { + yield chunk + for await (const restChunk of input) { + yield restChunk + } + })() + }) + return + } + } + })() + } + + // { path, content: Buffer } + // { path, content: Blob } + // { path, content: Iterable } + // { path, content: AsyncIterable } + // { path, content: PullStream } + if (isFileObject(input)) { + // eslint-disable-next-line require-await + return (async function * () { yield normalizeTuple(input) })() + } + + // PullStream + if (typeof input === 'function') { + return (async function * () { // eslint-disable-line require-await + yield normalizeTuple({ path: '', content: input }) + })() + } + + throw errCode(new Error('Unexpected input: ' + typeof input), 'ERR_UNEXPECTED_INPUT') +} + +function normalizeTuple ({ path, content }) { + return { path: path || '', content: content ? toAsyncIterable(content) : null } +} + +// An object with a path or content property +function isFileObject (obj) { + return typeof obj === 'object' && (obj.path || obj.content) +} diff --git a/src/files-mfs/write.js b/src/files-mfs/write.js index 0485406bd..33f1ec973 100644 --- a/src/files-mfs/write.js +++ b/src/files-mfs/write.js @@ -3,7 +3,6 @@ const promisify = require('promisify-es6') const concatStream = require('concat-stream') const once = require('once') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') const SendFilesStream = require('../utils/send-files-stream') module.exports = (send) => { @@ -29,8 +28,7 @@ module.exports = (send) => { const options = { args: pathDst, - qs: opts, - converter: FileResultStreamConverter + qs: opts } const stream = sendFilesStream({ qs: options }) diff --git a/src/files-regular/add-from-fs.js b/src/files-regular/add-from-fs.js deleted file mode 100644 index 2320fc537..000000000 --- a/src/files-regular/add-from-fs.js +++ /dev/null @@ -1,40 +0,0 @@ -'use strict' - -const isNode = require('detect-node') -const promisify = require('promisify-es6') -const SendOneFile = require('../utils/send-one-file-multiple-results') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') - -module.exports = (send) => { - const sendOneFile = SendOneFile(send, 'add') - - return promisify((path, opts, callback) => { - if (typeof opts === 'function' && - callback === undefined) { - callback = opts - opts = {} - } - - // opts is the real callback -- - // 'callback' is being injected by promisify - if (typeof opts === 'function' && - typeof callback === 'function') { - callback = opts - opts = {} - } - - if (!isNode) { - return callback(new Error('fsAdd does not work in the browser')) - } - - if (typeof path !== 'string') { - return callback(new Error('"path" must be a string')) - } - - const requestOpts = { - qs: opts, - converter: FileResultStreamConverter - } - sendOneFile(path, requestOpts, callback) - }) -} diff --git a/src/files-regular/add-from-url.js b/src/files-regular/add-from-url.js deleted file mode 100644 index f1065f2de..000000000 --- a/src/files-regular/add-from-url.js +++ /dev/null @@ -1,70 +0,0 @@ -'use strict' - -const promisify = require('promisify-es6') -const { URL } = require('iso-url') -const { getRequest } = require('iso-stream-http') -const SendOneFile = require('../utils/send-one-file-multiple-results') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') - -module.exports = (send) => { - const sendOneFile = SendOneFile(send, 'add') - - return promisify((url, opts, callback) => { - if (typeof (opts) === 'function' && - callback === undefined) { - callback = opts - opts = {} - } - - // opts is the real callback -- - // 'callback' is being injected by promisify - if (typeof opts === 'function' && - typeof callback === 'function') { - callback = opts - opts = {} - } - - if (!validUrl(url)) { - return callback(new Error('"url" param must be an http(s) url')) - } - - requestWithRedirect(url, opts, sendOneFile, callback) - }) -} - -const validUrl = (url) => typeof url === 'string' && url.startsWith('http') - -const requestWithRedirect = (url, opts, sendOneFile, callback) => { - const parsedUrl = new URL(url) - - const req = getRequest(parsedUrl, (res) => { - if (res.statusCode >= 400) { - return callback(new Error(`Failed to download with ${res.statusCode}`)) - } - - const redirection = res.headers.location - - if (res.statusCode >= 300 && res.statusCode < 400 && redirection) { - if (!validUrl(redirection)) { - return callback(new Error('redirection url must be an http(s) url')) - } - - requestWithRedirect(redirection, opts, sendOneFile, callback) - } else { - const requestOpts = { - qs: opts, - converter: FileResultStreamConverter - } - const fileName = decodeURIComponent(parsedUrl.pathname.split('/').pop()) - - sendOneFile({ - content: res, - path: fileName - }, requestOpts, callback) - } - }) - - req.once('error', callback) - - req.end() -} diff --git a/src/files-regular/add-pull-stream.js b/src/files-regular/add-pull-stream.js deleted file mode 100644 index 2076ffa8d..000000000 --- a/src/files-regular/add-pull-stream.js +++ /dev/null @@ -1,13 +0,0 @@ -'use strict' - -const SendFilesStream = require('../utils/send-files-stream') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') -const toPull = require('stream-to-pull-stream') - -module.exports = (send) => { - return (options) => { - options = options || {} - options.converter = FileResultStreamConverter - return toPull(SendFilesStream(send, 'add')({ qs: options })) - } -} diff --git a/src/files-regular/add-readable-stream.js b/src/files-regular/add-readable-stream.js deleted file mode 100644 index 320abe692..000000000 --- a/src/files-regular/add-readable-stream.js +++ /dev/null @@ -1,12 +0,0 @@ -'use strict' - -const SendFilesStream = require('../utils/send-files-stream') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') - -module.exports = (send) => { - return (options) => { - options = options || {} - options.converter = FileResultStreamConverter - return SendFilesStream(send, 'add')(options) - } -} diff --git a/src/files-regular/add.js b/src/files-regular/add.js deleted file mode 100644 index cb5898265..000000000 --- a/src/files-regular/add.js +++ /dev/null @@ -1,55 +0,0 @@ -'use strict' - -const promisify = require('promisify-es6') -const ConcatStream = require('concat-stream') -const once = require('once') -const { isSource } = require('is-pull-stream') -const FileResultStreamConverter = require('../utils/file-result-stream-converter') -const SendFilesStream = require('../utils/send-files-stream') -const validateAddInput = require('ipfs-utils/src/files/add-input-validation') - -module.exports = (send) => { - const createAddStream = SendFilesStream(send, 'add') - - const add = promisify((_files, options, _callback) => { - if (typeof options === 'function') { - _callback = options - options = null - } - const callback = once(_callback) - - if (!options) { - options = {} - } - options.converter = FileResultStreamConverter - - try { - validateAddInput(_files) - } catch (err) { - return callback(err) - } - - const files = [].concat(_files) - - const stream = createAddStream({ qs: options }) - const concat = ConcatStream((result) => callback(null, result)) - stream.once('error', callback) - stream.pipe(concat) - - files.forEach((file) => stream.write(file)) - stream.end() - }) - - return function () { - const args = Array.from(arguments) - - // If we files.add(), then promisify thinks the pull stream is - // a callback! Add an empty options object in this case so that a promise - // is returned. - if (args.length === 1 && isSource(args[0])) { - args.push({}) - } - - return add.apply(null, args) - } -} diff --git a/src/files-regular/index.js b/src/files-regular/index.js index 059d7ea1c..408f76494 100644 --- a/src/files-regular/index.js +++ b/src/files-regular/index.js @@ -6,12 +6,6 @@ module.exports = (arg) => { const send = moduleConfig(arg) return { - add: require('../files-regular/add')(send), - addReadableStream: require('../files-regular/add-readable-stream')(send), - addPullStream: require('../files-regular/add-pull-stream')(send), - addFromFs: require('../files-regular/add-from-fs')(send), - addFromURL: require('../files-regular/add-from-url')(send), - addFromStream: require('../files-regular/add')(send), cat: require('../files-regular/cat')(send), catReadableStream: require('../files-regular/cat-readable-stream')(send), catPullStream: require('../files-regular/cat-pull-stream')(send), diff --git a/src/lib/blob-to-async-iterable.js b/src/lib/blob-to-async-iterable.js new file mode 100644 index 000000000..3f9e982c0 --- /dev/null +++ b/src/lib/blob-to-async-iterable.js @@ -0,0 +1,30 @@ +'use strict' +/* eslint-env browser */ + +// Convert a Blob into an AsyncIterable +module.exports = (blob, options) => (async function * () { + options = options || {} + + const reader = new FileReader() + const chunkSize = options.chunkSize || 1024 * 1024 + let offset = options.offset || 0 + + const getNextChunk = () => new Promise((resolve, reject) => { + reader.onloadend = e => { + const data = e.target.result + resolve(data.byteLength === 0 ? null : data) + } + reader.onerror = reject + + const end = offset + chunkSize + const slice = blob.slice(offset, end) + reader.readAsArrayBuffer(slice) + offset = end + }) + + while (true) { + const data = await getNextChunk() + if (data == null) return + yield data + } +})() diff --git a/src/lib/file-data-to-async-iterable.js b/src/lib/file-data-to-async-iterable.js new file mode 100644 index 000000000..32614324e --- /dev/null +++ b/src/lib/file-data-to-async-iterable.js @@ -0,0 +1,55 @@ +'use strict' +/* eslint-env browser */ + +const toIterator = require('pull-stream-to-async-iterator') +const { Buffer } = require('buffer') +const blobToAsyncIterable = require('../lib/blob-to-async-iterable') +const isBloby = require('../lib/is-bloby') + +/* +Transform one of: + +Buffer|ArrayBuffer|TypedArray +Blob|File +Iterable +AsyncIterable +PullStream + +Into: + +AsyncIterable +*/ +module.exports = function toAsyncIterable (input) { + // Buffer|ArrayBuffer|TypedArray|array of bytes + if (input[Symbol.iterator]) { + const buf = Buffer.from(input) + return Object.assign( + (async function * () { yield buf })(), // eslint-disable-line require-await + { length: buf.length } + ) + } + + // Blob|File + if (isBloby(input)) { + return Object.assign( + blobToAsyncIterable(input), + { length: input.size } + ) + } + + // AsyncIterable + if (input[Symbol.asyncIterator]) { + return (async function * () { + for await (const chunk of input) { + yield Buffer.from(chunk) + } + })() + } + + // PullStream + if (typeof input === 'function') { + return toIterator(input) + } + + throw new Error('Unexpected input: ' + typeof input) +} diff --git a/src/lib/is-bloby.js b/src/lib/is-bloby.js new file mode 100644 index 000000000..a8682f315 --- /dev/null +++ b/src/lib/is-bloby.js @@ -0,0 +1,7 @@ +'use strict' +/* eslint-env browser */ + +// Blob|File +module.exports = function isBloby (obj) { + return typeof Blob !== 'undefined' && obj instanceof Blob +} diff --git a/src/lib/is-bytes.js b/src/lib/is-bytes.js new file mode 100644 index 000000000..adc996835 --- /dev/null +++ b/src/lib/is-bytes.js @@ -0,0 +1,8 @@ +'use strict' + +const { Buffer } = require('buffer') + +// Buffer|ArrayBuffer|TypedArray +module.exports = function isBytes (obj) { + return Buffer.isBuffer(obj) || ArrayBuffer.isView(obj) || obj instanceof ArrayBuffer +} diff --git a/src/lib/iterable.js b/src/lib/iterable.js new file mode 100644 index 000000000..78de19f5e --- /dev/null +++ b/src/lib/iterable.js @@ -0,0 +1,19 @@ +'use strict' + +const toPull = require('async-iterator-to-pull-stream') +const toStream = require('it-to-stream') + +exports.collectify = fn => async (...args) => { + const items = [] + for await (const item of fn(...args)) items.push(item) + return items +} + +exports.pullify = { + source: fn => (...args) => toPull(fn(...args)), + transform: fn => (...args) => toPull.transform(source => fn(source, ...args)) +} + +exports.streamify = { + transform: fn => (...args) => toStream.transform(source => fn(source, ...args), { objectMode: true }) +} diff --git a/src/lib/object-to-camel.js b/src/lib/object-to-camel.js new file mode 100644 index 000000000..f13b2b6a1 --- /dev/null +++ b/src/lib/object-to-camel.js @@ -0,0 +1,21 @@ +'use strict' + +// Convert object properties to camel case. +// NOT recursive! +// e.g. +// AgentVersion => agentVersion +// ID => id +module.exports = obj => { + if (obj == null) return obj + const caps = /^[A-Z]+$/ + return Object.keys(obj).reduce((camelObj, k) => { + if (caps.test(k)) { // all caps + camelObj[k.toLowerCase()] = obj[k] + } else if (caps.test(k[0])) { // pascal + camelObj[k[0].toLowerCase() + k.slice(1)] = obj[k] + } else { + camelObj[k] = obj[k] + } + return camelObj + }, {}) +} diff --git a/src/utils/file-result-stream-converter.js b/src/utils/file-result-stream-converter.js deleted file mode 100644 index 7f5b19aeb..000000000 --- a/src/utils/file-result-stream-converter.js +++ /dev/null @@ -1,44 +0,0 @@ -'use strict' - -const TransformStream = require('readable-stream').Transform - -/* - Transforms a stream of {Name, Hash} objects to include size - of the DAG object. - - Usage: inputStream.pipe(new FileResultStreamConverter()) - - Input object format: - { - Name: '/path/to/file/foo.txt', - Hash: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP' - Size: '20' - } - - Output object format: - { - path: '/path/to/file/foo.txt', - hash: 'Qma4hjFTnCasJ8PVp3mZbZK5g2vGDT4LByLJ7m8ciyRFZP', - size: 20 - } -*/ -class FileResultStreamConverter extends TransformStream { - constructor (options) { - const opts = Object.assign({}, options || {}, { objectMode: true }) - super(opts) - } - - _transform (obj, enc, callback) { - if (!obj.Hash) { - return callback() - } - - callback(null, { - path: obj.Name, - hash: obj.Hash, - size: parseInt(obj.Size, 10) - }) - } -} - -module.exports = FileResultStreamConverter diff --git a/src/utils/load-commands.js b/src/utils/load-commands.js index e4a914dd0..3479c8d7c 100644 --- a/src/utils/load-commands.js +++ b/src/utils/load-commands.js @@ -1,14 +1,59 @@ 'use strict' +const nodeify = require('promise-nodeify') +const { collectify, pullify, streamify } = require('../lib/iterable') + function requireCommands () { return { // Files Regular (not MFS) - add: require('../files-regular/add'), - addReadableStream: require('../files-regular/add-readable-stream'), - addPullStream: require('../files-regular/add-pull-stream'), - addFromFs: require('../files-regular/add-from-fs'), - addFromURL: require('../files-regular/add-from-url'), - addFromStream: require('../files-regular/add'), + add: (_, config) => { + const add = collectify(require('../add')(config)) + return (input, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(add(input, options), callback) + } + }, + addReadableStream: (_, config) => { + const add = require('../add')(config) + return streamify.transform(add) + }, + addPullStream: (_, config) => { + const add = require('../add')(config) + return pullify.transform(add) + }, + addFromFs: (_, config) => { + const addFromFs = collectify(require('../add-from-fs')(config)) + return (path, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(addFromFs(path, options), callback) + } + }, + addFromURL: (_, config) => { + const addFromURL = collectify(require('../add-from-url')(config)) + return (url, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(addFromURL(url, options), callback) + } + }, + addFromStream: (_, config) => { + const add = collectify(require('../add')(config)) + return (input, options, callback) => { + if (typeof options === 'function') { + callback = options + options = {} + } + return nodeify(add(input, options), callback) + } + }, cat: require('../files-regular/cat'), catReadableStream: require('../files-regular/cat-readable-stream'), catPullStream: require('../files-regular/cat-pull-stream'), diff --git a/test/files-mfs.spec.js b/test/files-mfs.spec.js index beda9b24a..dff3b2258 100644 --- a/test/files-mfs.spec.js +++ b/test/files-mfs.spec.js @@ -96,7 +96,7 @@ describe('.files (the MFS API part)', function () { it('.add with cid-version=1 and raw-leaves=false', (done) => { const expectedCid = 'bafybeifogzovjqrcxvgt7g36y7g63hvwvoakledwk4b2fr2dl4wzawpnny' - const options = { 'cid-version': 1, 'raw-leaves': false } + const options = { cidVersion: 1, rawLeaves: false } ipfs.add(testfile, options, (err, res) => { expect(err).to.not.exist() @@ -174,7 +174,7 @@ describe('.files (the MFS API part)', function () { path: content + '.txt', content: Buffer.from(content) } - const options = { hash: name, 'raw-leaves': false } + const options = { hashAlg: name, rawLeaves: false } ipfs.add([file], options, (err, res) => { if (err) return done(err) @@ -264,7 +264,7 @@ describe('.files (the MFS API part)', function () { path: content + '.txt', content: Buffer.from(content) } - const options = { hash: name, 'raw-leaves': false } + const options = { hashAlg: name, rawLeaves: false } ipfs.add([file], options, (err, res) => { expect(err).to.not.exist() diff --git a/test/interface.spec.js b/test/interface.spec.js index 86ffac21d..f14104246 100644 --- a/test/interface.spec.js +++ b/test/interface.spec.js @@ -116,6 +116,14 @@ describe('interface-ipfs-core tests', () => { name: 'should add a nested directory as array of tupples with progress', reason: 'FIXME https://github.com/ipfs/js-ipfs-http-client/issues/339' }, + { + name: 'should not be able to add a string', + reason: 'FIXME test needs to change to inspect error code ERR_UNEXPECTED_INPUT' + }, + { + name: 'should not be able to add a non-Buffer TypedArray', + reason: 'TODO remove test, this should be supported' + }, // .addPullStream isNode ? null : { name: 'should add pull stream of valid files and dirs', @@ -131,6 +139,10 @@ describe('interface-ipfs-core tests', () => { name: 'addFromStream', reason: 'Not designed to run in the browser' }, + { + name: 'should add from a stream', + reason: 'TODO change test to use readable-stream@3 with async iterator support' + }, // .addFromFs isNode ? null : { name: 'addFromFs',