From 0662cd8f9f18f497792b6c555e220fb249e177d0 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 16 Mar 2018 16:46:14 -0400 Subject: [PATCH 01/21] feat: jsipfs pin improvements (#1249) * initial sweep through to understand how pin works. Did make some changes but mostly minor. * refactor pb schema to it's own file * fix: don't pin files during files.add if opts.pin === false * feat: add some http qs parsing, http route/resources cleanup, cleanup core/utils.parseIpfsPath * feat: expand pin tests. \nFirst draft. still needs some further work. * feat: Add logging for entry/exit of pins: add/rm/flush/load. Clean some documentation. * feat: add --pin to files.add, fix: improper pin option parsing in core. * feat: Use ipfs.files.add to add init-docs instead of directly using the unix-fs importer. * feat(tests): Add tests for cli --pin option. I know this should be more of an integration test. Should be written in /core. Maybe talk with Victor about testing different layers * feat: use isIPFS to valiate a multihash. --- src/cli/commands/files/add.js | 5 + src/cli/commands/pin/add.js | 30 +++ src/cli/commands/pin/ls.js | 42 +++ src/cli/commands/pin/rm.js | 29 ++ src/core/components/files.js | 7 +- src/core/components/init-assets.js | 21 +- src/core/components/init.js | 13 +- src/core/components/pin-set.js | 255 ++++++++++++++++++ src/core/components/pin.js | 414 +++++++++++++++++++++++++++++ src/core/components/pin.proto.js | 17 ++ src/core/utils.js | 93 +++++++ src/http/api/resources/files.js | 1 + src/http/api/resources/pin.js | 101 +++++++ test/cli/files.js | 51 +++- test/cli/pin.js | 90 +++++++ test/core/utils.spec.js | 136 ++++++++++ test/http-api/spec/pin.js | 147 ++++++++++ 17 files changed, 1416 insertions(+), 36 deletions(-) create mode 100644 src/cli/commands/pin/add.js create mode 100644 src/cli/commands/pin/ls.js create mode 100644 src/cli/commands/pin/rm.js create mode 100644 src/core/components/pin-set.js create mode 100644 src/core/components/pin.js create mode 100644 src/core/components/pin.proto.js create mode 100644 src/http/api/resources/pin.js create mode 100644 test/cli/pin.js create mode 100644 test/core/utils.spec.js create mode 100644 test/http-api/spec/pin.js diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index 1abedb5122..cd0106e86d 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -173,6 +173,11 @@ module.exports = { type: 'boolean', default: false, describe: 'Write no output' + }, + pin: { + type: 'boolean', + default: true, + describe: 'Pin this object when adding' } }, diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js new file mode 100644 index 0000000000..7b1a220aa5 --- /dev/null +++ b/src/cli/commands/pin/add.js @@ -0,0 +1,30 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + command: 'add ', + + describe: 'Pins objects to local storage.', + + builder: { + recursive: { + type: 'boolean', + alias: 'r', + default: true, + describe: 'Recursively pin the object linked to by the specified object(s).' + } + }, + + handler (argv) { + const paths = argv['ipfs-path'].split(' ') + const recursive = argv.recursive + const type = recursive ? 'recursive' : 'direct' + argv.ipfs.pin.add(paths[0], { recursive: recursive }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + print(`pinned ${res.hash} ${type}ly`) + }) + }) + } +} diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js new file mode 100644 index 0000000000..869d684f73 --- /dev/null +++ b/src/cli/commands/pin/ls.js @@ -0,0 +1,42 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + // bracket syntax with '...' tells yargs to optionally accept a list + command: 'ls [ipfs-path...]', + + describe: 'List objects pinned to local storage.', + + builder: { + type: { + type: 'string', + alias: 't', + default: 'all', + choices: ['direct', 'indirect', 'recursive', 'all'], + describe: 'The type of pinned keys to list.' + }, + quiet: { + type: 'boolean', + alias: 'q', + default: false, + describe: 'Write just hashes of objects.' + } + }, + + handler: (argv) => { + const paths = argv.ipfsPath || '' + const type = argv.type + const quiet = argv.quiet + argv.ipfs.pin.ls(paths, { type: type }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + let line = res.hash + if (!quiet) { + line += ` ${res.type}` + } + print(line) + }) + }) + } +} diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js new file mode 100644 index 0000000000..f3d1e7cf7f --- /dev/null +++ b/src/cli/commands/pin/rm.js @@ -0,0 +1,29 @@ +'use strict' + +const print = require('../../utils').print + +module.exports = { + command: 'rm ', + + describe: 'Removes the pinned object from local storage.', + + builder: { + recursive: { + type: 'boolean', + alias: 'r', + default: true, + describe: 'Recursively unpin the objects linked to by the specified object(s).' + } + }, + + handler: (argv) => { + const paths = argv['ipfs-path'].split(' ') + const recursive = argv.recursive + argv.ipfs.pin.rm(paths, { recursive: recursive }, (err, results) => { + if (err) { throw err } + results.forEach((res) => { + print(`unpinned ${res.hash}`) + }) + }) + } +} diff --git a/src/core/components/files.js b/src/core/components/files.js index e223e26d13..b7faf7e9d9 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -21,6 +21,7 @@ const toB58String = require('multihashes').toB58String const WRAPPER = 'wrapper/' function noop () {} +function identity (x) { return x } function prepareFile (self, opts, file, callback) { opts = opts || {} @@ -130,7 +131,8 @@ module.exports = function files (self) { } let total = 0 - let prog = opts.progress || (() => {}) + const shouldPin = 'pin' in opts ? opts.pin : true + const prog = opts.progress || noop const progress = (bytes) => { total += bytes prog(total) @@ -141,7 +143,8 @@ module.exports = function files (self) { pull.map(normalizeContent.bind(null, opts)), pull.flatten(), importer(self._ipld, opts), - pull.asyncMap(prepareFile.bind(null, self, opts)) + pull.asyncMap(prepareFile.bind(null, self, opts)), + shouldPin ? pull.asyncMap(pinFile.bind(null, self)) : identity ) } diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js index 00c37120a8..79e96a9fdf 100644 --- a/src/core/components/init-assets.js +++ b/src/core/components/init-assets.js @@ -15,23 +15,20 @@ module.exports = function addDefaultAssets (self, log, callback) { pull( pull.values([initDocsPath]), - pull.asyncMap((val, cb) => glob(path.join(val, '/**/*'), cb)), + pull.asyncMap((val, cb) => + glob(path.join(val, '/**/*'), { nodir: true }, cb) + ), pull.flatten(), - pull.map((element) => { + pull.map(element => { const addPath = element.substring(index + 1) - - if (fs.statSync(element).isDirectory()) { return } - return { path: addPath, content: file(element) } }), - // Filter out directories, which are undefined from above - pull.filter(Boolean), - importer(self._ipld), - pull.through((el) => { - if (el.path === 'init-docs') { - const cid = new CID(el.multihash) + self.files.addPullStream(), + pull.through(file => { + if (file.path === 'init-docs') { + const cid = new CID(file.hash) log('to get started, enter:\n') - log(`\t jsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) + log(`\tjsipfs files cat /ipfs/${cid.toBaseEncodedString()}/readme\n`) } }), pull.collect((err) => { diff --git a/src/core/components/init.js b/src/core/components/init.js index 59555f383a..2db6377c66 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -86,16 +86,11 @@ module.exports = function init (self) { } self.log('adding assets') - const tasks = [ + parallel([ // add empty unixfs dir object (go-ipfs assumes this exists) - (cb) => self.object.new('unixfs-dir', cb) - ] - - if (typeof addDefaultAssets === 'function') { - tasks.push((cb) => addDefaultAssets(self, opts.log, cb)) - } - - parallel(tasks, (err) => { + (cb) => self.object.new('unixfs-dir', cb), + (cb) => addDefaultAssets(self, opts.log, cb) + ], (err) => { if (err) { cb(err) } else { diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js new file mode 100644 index 0000000000..a0ea499051 --- /dev/null +++ b/src/core/components/pin-set.js @@ -0,0 +1,255 @@ +'use strict' + +const multihashes = require('multihashes') +const toB58String = multihashes.toB58String +const CID = require('cids') +const protobuf = require('protons') +const fnv1a = require('fnv1a') +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const varint = require('varint') +const once = require('once') + +const pbSchema = require('./pin.proto') + +const emptyKeyHash = 'QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n' +const emptyKey = multihashes.fromB58String(emptyKeyHash) +const defaultFanout = 256 +const maxItems = 8192 +const pb = protobuf(pbSchema) + +function readHeader (rootNode) { + // rootNode.data should be a buffer of the format: + // < varint(headerLength) | header | itemData... > + const rootData = rootNode.data + const hdrLength = varint.decode(rootData) + const vBytes = varint.decode.bytes + if (vBytes <= 0) { + return { err: 'Invalid Set header length' } + } + if (vBytes + hdrLength > rootData.length) { + return { err: 'Impossibly large set header length' } + } + const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) + const header = pb.Set.decode(hdrSlice) + if (header.version !== 1) { + return { err: 'Unsupported Set version: ' + header.version } + } + if (header.fanout > rootNode.links.length) { + return { err: 'Impossibly large fanout' } + } + return { + header: header, + data: rootData.slice(hdrLength + vBytes) + } +} + +exports = module.exports = function (dag) { + const pinSet = { + // should this be part of `object` API? + hasChild: (root, childhash, callback, _links, _checked, _seen) => { + callback = once(callback) + if (typeof childhash === 'object') { + childhash = toB58String(childhash) + } + _links = _links || root.links.length + _checked = _checked || 0 + _seen = _seen || {} + + if (!root.links.length && _links === _checked) { + // all nodes have been checked + return callback(null, false) + } + root.links.forEach((link) => { + const bs58link = toB58String(link.multihash) + if (bs58link === childhash) { + return callback(null, true) + } + + // don't check the same links twice + if (bs58link in _seen) { return } + _seen[bs58link] = true + + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + + _checked++ + _links += res.value.links.length + pinSet.hasChild(res.value, childhash, callback, _links, _checked, _seen) + }) + }) + }, + + storeSet: (keys, logInternalKey, callback) => { + callback = once(callback) + const items = keys.map((key) => { + return { + key: key, + data: null + } + }) + pinSet.storeItems(items, logInternalKey, (err, rootNode) => { + if (err) { return callback(err) } + const opts = { cid: new CID(rootNode.multihash) } + dag.put(rootNode, opts, (err, cid) => { + if (err) { return callback(err) } + logInternalKey(rootNode.multihash) + callback(null, rootNode) + }) + }) + }, + + storeItems: (items, logInternalKey, callback, _depth, _subcalls, _done) => { + callback = once(callback) + const seed = _depth + const pbHeader = pb.Set.encode({ + version: 1, + fanout: defaultFanout, + seed: seed + }) + let rootData = Buffer.concat([ + Buffer.from(varint.encode(pbHeader.length)), pbHeader + ]) + let rootLinks = [] + for (let i = 0; i < defaultFanout; i++) { + rootLinks.push(new DAGLink('', 1, emptyKey)) + } + logInternalKey(emptyKey) + + if (items.length <= maxItems) { + // the items will fit in a single root node + const itemLinks = [] + const itemData = [] + const indices = [] + for (let i = 0; i < items.length; i++) { + itemLinks.push(new DAGLink('', 1, items[i].key)) + itemData.push(items[i].data || Buffer.alloc(0)) + indices.push(i) + } + indices.sort((a, b) => { + const x = Buffer.compare(itemLinks[a].multihash, itemLinks[b].multihash) + if (x) { return x } + return (a < b ? -1 : 1) + }) + const sortedLinks = indices.map((i) => { return itemLinks[i] }) + const sortedData = indices.map((i) => { return itemData[i] }) + rootLinks = rootLinks.concat(sortedLinks) + rootData = Buffer.concat([rootData].concat(sortedData)) + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return callback(err) } + return callback(null, rootNode) + }) + } else { + // need to split up the items into multiple root nodes + // (using go-ipfs "wasteful but simple" approach for consistency) + _subcalls = _subcalls || 0 + _done = _done || 0 + const hashed = {} + const hashFn = (seed, key) => { + const buf = Buffer.alloc(4) + buf.writeUInt32LE(seed, 0) + const data = Buffer.concat([ + buf, Buffer.from(toB58String(key)) + ]) + return fnv1a(data.toString('binary')) + } + // items will be distributed among `defaultFanout` bins + for (let i = 0; i < items.length; i++) { + let h = hashFn(seed, items[i].key) % defaultFanout + hashed[h] = hashed[h] || [] + hashed[h].push(items[i]) + } + const storeItemsCb = (err, child) => { + if (err) { return callback(err) } + dag.put(child, (err) => { + if (err) { return callback(err) } + logInternalKey(child.multihash) + rootLinks[this.h] = new DAGLink( + '', child.size, child.multihash + ) + _done++ + if (_done === _subcalls) { + // all finished + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return callback(err) } + return callback(null, rootNode) + }) + } + }) + } + const hashedKeys = Object.keys(hashed) + _subcalls += hashedKeys.length + hashedKeys.forEach(h => { + pinSet.storeItems( + hashed[h], + logInternalKey, + storeItemsCb.bind({h: h}), + _depth + 1, + _subcalls, + _done + ) + }) + } + }, + + loadSet: (rootNode, name, logInternalKey, callback) => { + callback = once(callback) + const link = rootNode.links.find(l => l.name === name) + if (!link) { return callback(new Error('No link found with name ' + name)) } + logInternalKey(link.multihash) + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + const keys = [] + const walkerFn = (link) => { + keys.push(link.multihash) + } + pinSet.walkItems(res.value, walkerFn, logInternalKey, (err) => { + if (err) { return callback(err) } + return callback(null, keys) + }) + }) + }, + + walkItems: (node, walkerFn, logInternalKey, callback) => { + callback = once(callback) + const h = readHeader(node) + if (h.err) { return callback(h.err) } + const fanout = h.header.fanout + let subwalkCount = 0 + let finishedCount = 0 + + const walkCb = (err) => { + if (err) { return callback(err) } + finishedCount++ + if (subwalkCount === finishedCount) { + return callback() + } + } + + for (let i = 0; i < node.links.length; i++) { + const link = node.links[i] + if (i >= fanout) { + // item link + walkerFn(link, i, h.data) + } else { + // fanout link + logInternalKey(link.multihash) + if (!emptyKey.equals(link.multihash)) { + subwalkCount++ + dag.get(new CID(link.multihash), (err, res) => { + if (err) { return callback(err) } + pinSet.walkItems( + res.value, walkerFn, logInternalKey, walkCb + ) + }) + } + } + } + if (!subwalkCount) { + return callback() + } + } + } + return pinSet +} diff --git a/src/core/components/pin.js b/src/core/components/pin.js new file mode 100644 index 0000000000..00959d24f0 --- /dev/null +++ b/src/core/components/pin.js @@ -0,0 +1,414 @@ +'use strict' + +const dagPB = require('ipld-dag-pb') +const DAGNode = dagPB.DAGNode +const DAGLink = dagPB.DAGLink +const CID = require('cids') +const pinSet = require('./pin-set') +const normalizeHashes = require('../utils').normalizeHashes +const promisify = require('promisify-es6') +const multihashes = require('multihashes') +const each = require('async/each') +const series = require('async/series') +const waterfall = require('async/waterfall') +const until = require('async/until') +const once = require('once') + +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} + +module.exports = function pin (self) { + let directPins = new Set() + let recursivePins = new Set() + let internalPins = new Set() + + const pinDataStoreKey = '/local/pins' + + const repo = self._repo + const dag = self.dag + + const pin = { + types: { + direct: 'direct', + recursive: 'recursive', + indirect: 'indirect', + internal: 'internal', + all: 'all' + }, + + clear: () => { + directPins.clear() + recursivePins.clear() + internalPins.clear() + }, + + set: pinSet(dag), + + add: promisify((hashes, options, callback) => { + if (typeof options === 'function') { + callback = options + options = null + } + callback = once(callback) + const recursive = options ? options.recursive : true + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + // verify that each hash can be pinned + series(mhs.map(multihash => cb => { + const key = toB58String(multihash) + if (recursive) { + if (recursivePins.has(key)) { + // it's already pinned recursively + return cb(null, key) + } + // entire graph of nested links should be pinned, + // so make sure we have all the objects + dag._getRecursive(multihash, (err) => { + if (err) { return cb(err) } + // found all objects, we can add the pin + return cb(null, key) + }) + } else { + if (recursivePins.has(key)) { + // recursive supersedes direct, can't have both + return cb(new Error(`${key} already pinned recursively`)) + } + if (directPins.has(key)) { + // already directly pinned + return cb(null, key) + } + // make sure we have the object + dag.get(new CID(multihash), (err) => { + if (err) { return cb(err) } + // found the object, we can add the pin + return cb(null, key) + }) + } + }), (err, results) => { + if (err) { return callback(err) } + // update the pin sets in memory + if (recursive) { + results.forEach(key => { + // recursive pin should replace direct pin + directPins.delete(key) + recursivePins.add(key) + }) + } else { + results.forEach(key => directPins.add(key)) + } + // persist updated pin sets to datastore + pin.flush((err, root) => { + if (err) { return callback(err) } + self.log(`Added pins: ${results}`) + return callback(null, results.map(key => ({hash: key}))) + }) + }) + }) + }), + + rm: promisify((hashes, options, callback) => { + let recursive = true + if (typeof options === 'function') { + callback = options + } else if (options && options.recursive === false) { + recursive = false + } + callback = once(callback) + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + // verify that each hash can be unpinned + series(mhs.map(multihash => cb => { + pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + if (err) { return cb(err) } + const { pinned, reason } = res + const key = toB58String(multihash) + if (!pinned) { + return cb(new Error(`${key} is not pinned`)) + } + switch (reason) { + case (pin.types.recursive): + if (recursive) { + return cb(null, key) + } else { + return cb(new Error(`${key} is pinned recursively`)) + } + case (pin.types.direct): + return cb(null, key) + default: + return cb(new Error( + `${key} is pinned indirectly under ${reason}` + )) + } + }) + }), (err, results) => { + if (err) { return callback(err) } + // update the pin sets in memory + const pins = recursive ? recursivePins : directPins + results.forEach(key => pins.delete(key)) + // persist updated pin sets to datastore + pin.flush((err, root) => { + if (err) { return callback(err) } + self.log(`Removed pins: ${results}`) + return callback(null, results.map(key => ({hash: key}))) + }) + }) + }) + }), + + ls: promisify((hashes, options, callback) => { + let type = pin.types.all + if (typeof hashes === 'function') { + callback = hashes + options = null + hashes = null + } + if (typeof options === 'function') { + callback = options + } + if (hashes && hashes.type) { + options = hashes + hashes = null + } + if (options && options.type) { + type = options.type.toLowerCase() + } + callback = once(callback) + if (!pin.types[type]) { + return callback(new Error( + `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` + )) + } + if (hashes) { + // check the pinned state of specific hashes + normalizeHashes(self, hashes, (err, mhs) => { + if (err) { return callback(err) } + series(mhs.map(multihash => cb => { + pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + if (err) { return cb(err) } + const { pinned, reason } = res + const key = toB58String(multihash) + if (!pinned) { + return cb(new Error( + `Path ${key} is not pinned` + )) + } + switch (reason) { + case pin.types.direct: + case pin.types.recursive: + return cb(null, { + hash: key, + type: reason + }) + default: + return cb(null, { + hash: key, + type: `${pin.types.indirect} through ${reason}` + }) + } + }) + }), callback) + }) + } else { + // show all pinned items of type + const result = [] + if (type === pin.types.direct || type === pin.types.all) { + pin.directKeyStrings().forEach((hash) => { + result.push({ + type: pin.types.direct, + hash: hash + }) + }) + } + if (type === pin.types.recursive || type === pin.types.all) { + pin.recursiveKeyStrings().forEach((hash) => { + result.push({ + type: pin.types.recursive, + hash: hash + }) + }) + } + if (type === pin.types.indirect || type === pin.types.all) { + pin.getIndirectKeys((err, hashes) => { + if (err) { return callback(err) } + hashes.forEach((hash) => { + result.push({ + type: pin.types.indirect, + hash: hash + }) + }) + return callback(null, result) + }) + } else { + return callback(null, result) + } + } + }), + + isPinned: (multihash, callback) => { + pin.isPinnedWithType(multihash, pin.types.all, callback) + }, + + isPinnedWithType: (multihash, pinType, callback) => { + const key = toB58String(multihash) + // recursive + if ((pinType === pin.types.recursive || pinType === pin.types.all) && + recursivePins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.recursive}) + } + if ((pinType === pin.types.recursive)) { + return callback(null, {pinned: false}) + } + // direct + if ((pinType === pin.types.direct || pinType === pin.types.all) && + directPins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.direct}) + } + if ((pinType === pin.types.direct)) { + return callback(null, {pinned: false}) + } + // internal + if ((pinType === pin.types.internal || pinType === pin.types.all) && + internalPins.has(key)) { + return callback(null, {pinned: true, reason: pin.types.internal}) + } + if ((pinType === pin.types.internal)) { + return callback(null, {pinned: false}) + } + + // indirect (default) + // check each recursive key to see if multihash is under it + const rKeys = pin.recursiveKeys() + let found = false + until( + // search until multihash was found or no more keys to check + () => (found || !rKeys.length), + (cb) => { + const key = rKeys.pop() + dag.get(new CID(key), (err, res) => { + if (err) { return cb(err) } + pin.set.hasChild(res.value, multihash, (err, has) => { + if (err) { return cb(err) } + found = has + // if found, return the hash of the parent recursive pin + cb(null, found ? toB58String(res.value.multihash) : null) + }) + }) + }, + (err, result) => { + if (err) { return callback(err) } + return callback(null, {pinned: found, reason: result}) + } + ) + }, + + directKeyStrings: () => Array.from(directPins), + + recursiveKeyStrings: () => Array.from(recursivePins), + + internalKeyStrings: () => Array.from(internalPins), + + directKeys: () => pin.directKeyStrings().map(key => multihashes.fromB58String(key)), + + recursiveKeys: () => pin.recursiveKeyStrings().map(key => multihashes.fromB58String(key)), + + internalKeys: () => pin.internalKeyStrings().map(key => multihashes.fromB58String(key)), + + getIndirectKeys: (callback) => { + const indirectKeys = new Set() + const rKeys = pin.recursiveKeys() + each(rKeys, (multihash, cb) => { + dag._getRecursive(multihash, (err, nodes) => { + if (err) { return cb(err) } + nodes.forEach((node) => { + const key = toB58String(node.multihash) + if (!directPins.has(key) && !recursivePins.has(key)) { + // not already pinned recursively or directly + indirectKeys.add(key) + } + }) + cb() + }) + }, (err) => { + if (err) { return callback(err) } + callback(null, Array.from(indirectKeys)) + }) + }, + + // encodes and writes pin key sets to the datastore + // each key set will be stored as a DAG node, and a root node will link to both + flush: promisify((callback) => { + const newInternalPins = new Set() + const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) + const handle = { + put: (k, v, cb) => { + handle[k] = v + cb() + } + } + waterfall([ + // create link to direct keys node + (cb) => pin.set.storeSet(pin.directKeys(), logInternalKey, cb), + (dRoot, cb) => DAGLink.create(pin.types.direct, dRoot.size, dRoot.multihash, cb), + (dLink, cb) => handle.put('dLink', dLink, cb), + // create link to recursive keys node + (cb) => pin.set.storeSet(pin.recursiveKeys(), logInternalKey, cb), + (rRoot, cb) => DAGLink.create(pin.types.recursive, rRoot.size, rRoot.multihash, cb), + (rLink, cb) => handle.put('rLink', rLink, cb), + // the pin-set nodes link to an empty node, so make sure it's added to dag + (cb) => DAGNode.create(Buffer.alloc(0), cb), + (empty, cb) => dag.put(empty, {cid: new CID(empty.multihash)}, cb), + // create root node with links to direct and recursive nodes + (cid, cb) => DAGNode.create(Buffer.alloc(0), [handle.dLink, handle.rLink], cb), + (root, cb) => handle.put('root', root, cb), + // add the root node to dag + (cb) => dag.put(handle.root, {cid: new CID(handle.root.multihash)}, cb), + // update the internal pin set + (cid, cb) => cb(null, logInternalKey(handle.root.multihash)), + // save serialized root to datastore under a consistent key + (_, cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests + (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) + ], (err, result) => { + if (err) { return callback(err) } + self.log(`Flushed ${handle.root} to the datastore.`) + internalPins = newInternalPins + return callback(null, handle.root) + }) + }), + + load: promisify((callback) => { + const newInternalPins = new Set() + const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) + const handle = { + put: (k, v, cb) => { + handle[k] = v + cb() + } + } + waterfall([ + (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests + (_, cb) => repo.datastore.has(pinDataStoreKey, cb), + (has, cb) => has ? cb() : cb('No pins to load'), + (cb) => repo.datastore.get(pinDataStoreKey, cb), + (mh, cb) => dag.get(new CID(mh), cb), + (root, cb) => handle.put('root', root.value, cb), + (cb) => pin.set.loadSet(handle.root, pin.types.recursive, logInternalKey, cb), + (rKeys, cb) => handle.put('rKeys', rKeys, cb), + (cb) => pin.set.loadSet(handle.root, pin.types.direct, logInternalKey, cb) + ], (err, dKeys) => { + if (err && err !== 'No pins to load') { + return callback(err) + } + if (dKeys) { + directPins = new Set(dKeys.map(mh => toB58String(mh))) + recursivePins = new Set(handle.rKeys.map(mh => toB58String(mh))) + logInternalKey(handle.root.multihash) + internalPins = newInternalPins + } + self.log('Loaded pins from the datastore') + return callback() + }) + }) + } + return pin +} diff --git a/src/core/components/pin.proto.js b/src/core/components/pin.proto.js new file mode 100644 index 0000000000..db2391c91f --- /dev/null +++ b/src/core/components/pin.proto.js @@ -0,0 +1,17 @@ +/** + * Protobuf interface + * from go-ipfs/pin/internal/pb/header.proto + */ +module.exports = ` + syntax = "proto2"; + + package ipfs.pin; + + option go_package = "pb"; + + message Set { + optional uint32 version = 1; + optional uint32 fanout = 2; + optional fixed32 seed = 3; + } +` diff --git a/src/core/utils.js b/src/core/utils.js index a492250383..be101273bb 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,3 +1,96 @@ 'use strict' +const multihashes = require('multihashes') +const mapSeries = require('async/mapSeries') +const CID = require('cids') +const isIPFS = require('is-ipfs') + exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' + +/** + * Break an ipfs-path down into it's root hash and an array of links. + * + * examples: + * b58Hash -> { root: 'b58Hash', links: [] } + * b58Hash/mercury/venus -> { root: 'b58Hash', links: ['mercury', 'venus']} + * /ipfs/b58Hash/links/by/name -> { root: 'b58Hash', links: ['links', 'by', 'name'] } + * + * @param {String} ipfsPath An ipfs-path + * @return {Object} { root: base58 string, links: [string], ?err: Error } + */ +exports.parseIpfsPath = function parseIpfsPath (ipfsPath) { + const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) + const errorResult = { + error: new Error('invalid ipfs ref path') + } + if (!matched) { + return errorResult + } + + const [root, ...links] = matched[1].split('/') + + if (isIPFS.multihash(root)) { + return { + root: root, + links: links + } + } else { + return errorResult + } +} + +/** + * Resolve various styles of an ipfs-path to the hash of the target node. + * Follows links in the path. + * + * Handles formats: + * - + * - /link/to/another/planet + * - /ipfs/ + * - Buffers of any of the above + * - multihash Buffer + * + * @param {IPFS} ipfs the IPFS node + * @param {Described above} ipfsPaths A single or collection of ipfs-paths + * @param {Function} callback Node-style callback. res is Array + * @return {void} + */ +exports.normalizeHashes = function normalizeHashes (ipfs, ipfsPaths, callback) { + if (!Array.isArray(ipfsPaths)) { + ipfsPaths = [ipfsPaths] + } + mapSeries(ipfsPaths, (path, cb) => { + const validate = (mh) => { + try { + multihashes.validate(mh) + cb(null, mh) + } catch (err) { cb(err) } + } + if (typeof path !== 'string') { + return validate(path) + } + const {error, root, links} = exports.parseIpfsPath(path) + const rootHash = multihashes.fromB58String(root) + if (error) return cb(error) + if (!links.length) { + return validate(rootHash) + } + // recursively follow named links to the target node + const pathFn = (err, obj) => { + if (err) { return cb(err) } + if (!links.length) { + // done tracing, we have the target node + return validate(obj.multihash) + } + const linkName = links.shift() + const nextLink = obj.links.find(link => link.name === linkName) + if (!nextLink) { + return cb(new Error( + `no link named ${linkName} under ${obj.toJSON().Hash}` + )) + } + ipfs.object.get(nextLink.multihash, pathFn) + } + ipfs.object.get(rootHash, pathFn) + }, callback) +} diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js index b62fb67aa1..6f889926ef 100644 --- a/src/http/api/resources/files.js +++ b/src/http/api/resources/files.js @@ -166,6 +166,7 @@ exports.add = { otherwise: Joi.boolean().valid(false) }), 'only-hash': Joi.boolean(), + pin: Joi.boolean().default(true), 'wrap-with-directory': Joi.boolean() }) // TODO: Necessary until validate "recursive", "stream-channels" etc. diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js new file mode 100644 index 0000000000..a2c89ad29b --- /dev/null +++ b/src/http/api/resources/pin.js @@ -0,0 +1,101 @@ +'use strict' + +const _ = require('lodash') +const debug = require('debug') +const log = debug('jsipfs:http-api:pin') +log.error = debug('jsipfs:http-api:pin:error') + +exports = module.exports + +function parseArgs (request, reply) { + if (!request.query.arg) { + return reply({ + Message: "Argument 'arg' is required", + Code: 0 + }).code(400).takeover() + } + + const recursive = request.query.recursive !== 'false' + + return reply({ + path: request.query.arg, + recursive: recursive, + }) +} + +exports.ls = { + parseArgs: (request, reply) => { + const ipfs = request.server.app.ipfs + const type = request.query.type || ipfs.pin.types.all + + return reply({ + path: request.query.arg, + type: type + }) + }, + + handler: (request, reply) => { + const { path, type } = request.pre.args + const ipfs = request.server.app.ipfs + ipfs.pin.ls(path, { type }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to list pins: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Keys: _.mapValues( + _.keyBy(result, obj => obj.hash), + obj => ({Type: obj.type}) + ) + }) + }) + } +} + +exports.add = { + parseArgs: parseArgs, + + handler: (request, reply) => { + const ipfs = request.server.app.ipfs + const { path, recursive } = request.pre.args + ipfs.pin.add(path, { recursive }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to add pin: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Pins: result.map(obj => obj.hash) + }) + }) + } +} + +exports.rm = { + parseArgs: parseArgs, + + handler: (request, reply) => { + const ipfs = request.server.app.ipfs + const { path, recursive } = request.pre.args + ipfs.pin.rm(path, { recursive }, (err, result) => { + if (err) { + log.error(err) + return reply({ + Message: `Failed to remove pin: ${err.message}`, + Code: 0 + }).code(500) + } + + return reply({ + Pins: result.map(obj => obj.hash) + }) + }) + } +} diff --git a/test/cli/files.js b/test/cli/files.js index 60175ddfab..eced37b173 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -287,10 +287,10 @@ describe('files', () => runOnAndOff((thing) => { it('add --only-hash outputs correct hash', function () { return ipfs('files add --only-hash src/init-files/init-docs/readme') - .then(out => - expect(out) - .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') - ) + .then(out => + expect(out) + .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') + ) }) it('add --only-hash does not add a file to the datastore', function () { @@ -301,17 +301,42 @@ describe('files', () => runOnAndOff((thing) => { fs.writeFileSync(filepath, content) return ipfs(`files add --only-hash ${filepath}`) + .then(out => { + const hash = out.split(' ')[1] + + // 'jsipfs object get ' should timeout with the daemon on + // and should fail fast with the daemon off + return Promise.race([ + ipfs.fail(`object get ${hash}`), + new Promise((resolve, reject) => setTimeout(resolve, 4000)) + ]) + .then(() => fs.unlinkSync(filepath)) + }) + }) + + it('add pins by default', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q ${filePath}`) .then(out => { - const hash = out.split(' ')[1] - - // 'jsipfs object get ' should timeout with the daemon on - // and should fail fast with the daemon off - return Promise.race([ - ipfs.fail(`object get ${hash}`), - new Promise((resolve, reject) => setTimeout(resolve, 4000)) - ]) - .then(() => fs.unlinkSync(filepath)) + const hash = out.trim() + return ipfs(`pin ls ${hash}`) + .then(ls => expect(ls).to.include(hash)) }) + .then(() => fs.unlinkSync(filePath)) + }) + + it('add does not pin with --pin=false', function () { + const filePath = path.join(os.tmpdir(), hat()) + const content = String(Math.random()) + const file = fs.writeFileSync(filePath, content) + + return ipfs(`files add -Q --pin=false ${filePath}`) + .then(out => ipfs(`pin ls ${out.trim()}`)) + .then(ls => expect(ls.trim()).to.eql('')) + .then(() => fs.unlinkSync(filePath)) }) HASH_ALGS.forEach((name) => { diff --git a/test/cli/pin.js b/test/cli/pin.js new file mode 100644 index 0000000000..9271ffe410 --- /dev/null +++ b/test/cli/pin.js @@ -0,0 +1,90 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const runOnAndOff = require('../utils/on-and-off') + +// file structure for recursive tests: +// root (init-docs) +// |`readme +// `docs +// `index + +const keys = { + root: 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU', + readme: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', + docs: 'QmegvLXxpVKiZ4b57Xs1syfBVRd8CbucVHAp7KpLQdGieC', + index: 'QmQN88TEidd3RY2u3dpib49fERTDfKtDpvxnvczATNsfKT' +} + +describe('pin', () => runOnAndOff((thing) => { + const filesDir = 'test/fixtures/test-data/recursive-get-dir/init-docs' + + let ipfs + + before(() => { + ipfs = thing.ipfs + return ipfs(`files add -r ${filesDir}`) + }) + + // rm first because `files add` should pin recursively by default + it('rm (recursively by default)', () => { + return ipfs(`pin rm ${keys.root}`) + .then((out) => expect(out).to.equal(`unpinned ${keys.root}\n`)) + .then(() => ipfs('pin ls')) + .then((out) => expect(out).to.equal('')) + }) + + it('add (recursively by default)', () => { + return ipfs(`pin add ${keys.root}`).then((out) => { + expect(out).to.eql(`pinned ${keys.root} recursively\n`) + }) + }) + + it('add (direct)', () => { + return ipfs(`pin add ${keys.readme} --recursive false`).then((out) => { + expect(out).to.eql(`pinned ${keys.readme} directly\n`) + }) + }) + + it('ls (recursive)', () => { + return ipfs(`pin ls ${keys.root}`).then((out) => { + expect(out).to.eql(`${keys.root} recursive\n`) + }) + }) + + it('ls (direct)', () => { + return ipfs(`pin ls ${keys.readme}`).then((out) => { + expect(out).to.eql(`${keys.readme} direct\n`) + }) + }) + + it('ls (indirect)', () => { + return ipfs(`pin ls ${keys.index}`).then((out) => { + expect(out).to.eql(`${keys.index} indirect through ${keys.root}\n`) + }) + }) + + it('ls with multiple keys', () => { + return ipfs(`pin ls ${keys.root} ${keys.readme}`).then((out) => { + expect(out).to.eql(`${keys.root} recursive\n${keys.readme} direct\n`) + }) + }) + + it('ls (all)', () => { + return ipfs('pin ls').then((out) => { + expect(out.split('\n').length).to.eql(12) + expect(out).to.include(`${keys.root} recursive\n`) + expect(out).to.include(`${keys.readme} direct\n`) + expect(out).to.include(`${keys.docs} indirect\n`) + expect(out).to.include(`${keys.index} indirect\n`) + }) + }) + + it('rm (direct)', () => { + return ipfs(`pin rm --recursive false ${keys.readme}`) + .then((out) => expect(out).to.equal(`unpinned ${keys.readme}\n`)) + .then(() => ipfs('pin ls')) + .then((out) => expect(out).to.not.include(`${keys.readme} direct\n`)) + }) +})) diff --git a/test/core/utils.spec.js b/test/core/utils.spec.js new file mode 100644 index 0000000000..638aebf837 --- /dev/null +++ b/test/core/utils.spec.js @@ -0,0 +1,136 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) +const multihashes = require('multihashes') + +// This gets replaced by `create-repo-browser.js` in the browser +const createTempRepo = require('../utils/create-repo-nodejs.js') +const IPFS = require('../../src/core') +const utils = require('../../src/core/utils') + +describe('utils', () => { + const rootHashString = 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU' + const rootHash = multihashes.fromB58String(rootHashString) + const rootPathString = `/ipfs/${rootHashString}/` + const aboutHashString = 'QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V' + const aboutHash = multihashes.fromB58String(aboutHashString) + const aboutPathString = `/ipfs/${rootHashString}/about` + + describe('parseIpfsPath', () => { + it('parses path with no links', function () { + expect(utils.parseIpfsPath(rootHashString)) + .to.deep.equal({ + root: rootHashString, + links: [] + }) + }) + + it('parses path with links', function () { + expect(utils.parseIpfsPath(`${rootHashString}/docs/index`)) + .to.deep.equal({ + root: rootHashString, + links: ['docs', 'index'] + }) + }) + + it('parses path with /ipfs/ prefix', function () { + expect(utils.parseIpfsPath(`/ipfs/${rootHashString}/about`)) + .to.deep.equal({ + root: rootHashString, + links: ['about'] + }) + }) + + it('returns error for malformed path', function () { + const result = utils.parseIpfsPath(`${rootHashString}//about`) + expect(result.error).to.be.instanceof(Error) + .and.have.property('message', 'invalid ipfs ref path') + }) + + it('returns error if root is not a valid CID', function () { + const result = utils.parseIpfsPath('invalid/ipfs/path') + expect(result.error).to.be.instanceof(Error) + .and.have.property('message', 'invalid ipfs ref path') + }) + }) + + describe('normalizeHashes', function () { + this.timeout(80 * 1000) + let node + let repo + + before((done) => { + repo = createTempRepo() + node = new IPFS({ + repo: repo + }) + node.once('ready', done) + }) + + after((done) => { + repo.teardown(done) + }) + + it('normalizes hash string to array with multihash object', (done) => { + utils.normalizeHashes(node, rootHashString, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(rootHash) + done() + }) + }) + + it('normalizes array of hash strings to array of multihash objects', (done) => { + utils.normalizeHashes(node, [rootHashString, aboutHashString], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(rootHash) + expect(hashes[1]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes multihash object to array with multihash object', (done) => { + utils.normalizeHashes(node, aboutHash, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes array of multihash objects to array of multihash objects', (done) => { + utils.normalizeHashes(node, [rootHash, aboutHash], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(rootHash) + expect(hashes[1]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes ipfs path string to array with multihash object', (done) => { + utils.normalizeHashes(node, aboutPathString, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutHash) + done() + }) + }) + + it('normalizes array of ipfs path strings to array with multihash objects', (done) => { + utils.normalizeHashes(node, [aboutPathString, rootPathString], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(2) + expect(hashes[0]).to.deep.equal(aboutHash) + expect(hashes[1]).to.deep.equal(rootHash) + done() + }) + }) + }) +}) diff --git a/test/http-api/spec/pin.js b/test/http-api/spec/pin.js new file mode 100644 index 0000000000..f58ff3a10f --- /dev/null +++ b/test/http-api/spec/pin.js @@ -0,0 +1,147 @@ +/* eslint-env mocha */ +'use strict' + +const expect = require('chai').expect +const fs = require('fs') +const FormData = require('form-data') +const streamToPromise = require('stream-to-promise') +const each = require('async/each') + +// use a tree of ipfs objects for recursive tests: +// root +// |`leaf +// `branch +// `subLeaf + +const keys = { + root: 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer', + leaf: 'QmaZoTQ6wFe7EtvaePBUeXavfeRqCAq3RUMomFxBpZLrLA', + branch: 'QmNxjjP7dtx6pzxWGBRCrgmjX3JqKL7uF2Kjx7ExiZDbSB', + subLeaf: 'QmUzzznkyQL7FjjBztG3D1tTjBuxeArLceDZnuSowUggXL' +} + +module.exports = (http) => { + describe('pin', () => { + let api + + before((done) => { + // add test tree to repo + api = http.api.server.select('API') + const putFile = (filename, cb) => { + const filePath = `test/test-data/tree/${filename}.json` + const form = new FormData() + form.append('file', fs.createReadStream(filePath)) + const headers = form.getHeaders() + streamToPromise(form).then((payload) => { + api.inject({ + method: 'POST', + url: '/api/v0/object/put', + headers: headers, + payload: payload + }, (res) => { + expect(res.statusCode).to.equal(200) + cb() + }) + }) + } + each(Object.keys(keys), putFile, (err) => { + expect(err).to.not.exist() + done() + }) + }) + + describe('/pin/add', () => { + it('pins object recursively by default', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${keys.root}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.root]}) + done() + }) + }) + }) + + describe('/pin/add (direct)', () => { + it('pins object directly if specified', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${keys.leaf}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.leaf]}) + done() + }) + }) + }) + + describe('/pin/ls (with path)', () => { + it('finds specified pinned object', (done) => { + api.inject({ + method: 'GET', + url: `/api/v0/pin/ls?arg=/ipfs/${keys.root}/branch/subLeaf` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys[keys.subLeaf].Type) + .to.equal(`indirect through ${keys.root}`) + done() + }) + }) + }) + + describe('/pin/ls (without path or type)', () => { + it('finds all pinned objects', (done) => { + api.inject({ + method: 'GET', + url: '/api/v0/pin/ls' + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys[keys.root].Type).to.equal('recursive') + expect(res.result.Keys[keys.leaf].Type).to.equal('direct') + expect(res.result.Keys[keys.branch].Type).to.equal('indirect') + expect(res.result.Keys[keys.subLeaf].Type).to.equal('indirect') + done() + }) + }) + }) + + describe('/pin/rm (direct)', () => { + it('unpins only directly pinned objects if specified', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.leaf}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.leaf]}) + + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.root}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.equal( + 'Failed to remove pin: ' + + 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer ' + + 'is pinned recursively' + ) + done() + }) + }) + }) + }) + + describe('/pin/rm', () => { + it('unpins recursively by default', (done) => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${keys.root}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result).to.deep.equal({Pins: [keys.root]}) + done() + }) + }) + }) + }) +} From ee813ebcd776cf4a1cc03a4797137296b1f7d662 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Fri, 16 Mar 2018 13:05:23 -0400 Subject: [PATCH 02/21] revert: default assets are not added when running on a browser refactor: change pin.flush logging message --- src/core/components/init.js | 17 ++++++++++++----- src/core/components/pin.js | 2 +- 2 files changed, 13 insertions(+), 6 deletions(-) diff --git a/src/core/components/init.js b/src/core/components/init.js index 2db6377c66..5b5f77d8e7 100644 --- a/src/core/components/init.js +++ b/src/core/components/init.js @@ -85,12 +85,19 @@ module.exports = function init (self) { return cb(null, true) } - self.log('adding assets') - parallel([ + const tasks = [ // add empty unixfs dir object (go-ipfs assumes this exists) - (cb) => self.object.new('unixfs-dir', cb), - (cb) => addDefaultAssets(self, opts.log, cb) - ], (err) => { + (cb) => self.object.new('unixfs-dir', cb) + ] + + if (typeof addDefaultAssets === 'function') { + // addDefaultAssets is undefined on browsers. + // See package.json browser config + tasks.push((cb) => addDefaultAssets(self, opts.log, cb)) + } + + self.log('adding assets') + parallel(tasks, (err) => { if (err) { cb(err) } else { diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 00959d24f0..554a082c36 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -370,7 +370,7 @@ module.exports = function pin (self) { (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) ], (err, result) => { if (err) { return callback(err) } - self.log(`Flushed ${handle.root} to the datastore.`) + self.log(`Flushed pins with root: ${handle.root}.`) internalPins = newInternalPins return callback(null, handle.root) }) From f847f98d387583dd1710582a0ada2c8015fdc81f Mon Sep 17 00:00:00 2001 From: jonkrone Date: Fri, 16 Mar 2018 14:15:45 -0400 Subject: [PATCH 03/21] feat(test): add tests for failure cases of normalizeHashes fix: don't need to cast the object.get result with toJSON revert: use interface-datastore.Key for datastore pin storage The proper change would be that datastore-level automatically casts operations into Keys fix: do not invoke callback within a try/catch feat(test): make cli pin tests more robust By using files that aren't added on IPFS initialization. Still needs work on files.rm (direct) and ipfs ls (indirect). fix: remove commented code, traced test failures to pin-set Got to go for the night, though, so will checkpoint here and address tomorrow. feat: parseIpfsPath now throws errors for consistency feat: resolveIpfsPaths error message lists the relative path that failed feat: use follow.bind instead of mutating the links Also decided not show relative paths. Less human friendly but probably cleaner otherwise. refactor: resolveIpfsPaths -> resolvePaths feat: promisify resolvePaths test: change parseIpfsPath failure tests to use try/catch docs: edit resolvePath doc revert: accidentally deleted commands/pin.js --- src/cli/commands/pin.js | 15 ++++ src/cli/commands/pin/ls.js | 1 + src/core/components/pin.js | 32 +++---- src/core/utils.js | 109 +++++++++++++---------- test/cli/pin.js | 120 ++++++++++++++------------ test/core/utils.spec.js | 52 +++++++---- test/fixtures/planets/mercury/wiki.md | 12 +++ test/fixtures/planets/solar-system.md | 10 +++ 8 files changed, 217 insertions(+), 134 deletions(-) create mode 100644 src/cli/commands/pin.js create mode 100644 test/fixtures/planets/mercury/wiki.md create mode 100644 test/fixtures/planets/solar-system.md diff --git a/src/cli/commands/pin.js b/src/cli/commands/pin.js new file mode 100644 index 0000000000..d7a68fb023 --- /dev/null +++ b/src/cli/commands/pin.js @@ -0,0 +1,15 @@ +'use strict' + +module.exports = { + command: 'pin', + + description: 'Pin and unpin objects to local storage.', + + builder (yargs) { + return yargs + .commandDir('pin') + }, + + handler (argv) { + } +} diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index 869d684f73..164c544523 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -28,6 +28,7 @@ module.exports = { const paths = argv.ipfsPath || '' const type = argv.type const quiet = argv.quiet + argv.ipfs.pin.ls(paths, { type: type }, (err, results) => { if (err) { throw err } results.forEach((res) => { diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 554a082c36..e554284088 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -5,9 +5,10 @@ const DAGNode = dagPB.DAGNode const DAGLink = dagPB.DAGLink const CID = require('cids') const pinSet = require('./pin-set') -const normalizeHashes = require('../utils').normalizeHashes +const resolvePaths = require('../utils').resolvePaths const promisify = require('promisify-es6') const multihashes = require('multihashes') +const Key = require('interface-datastore').Key const each = require('async/each') const series = require('async/series') const waterfall = require('async/waterfall') @@ -23,7 +24,7 @@ module.exports = function pin (self) { let recursivePins = new Set() let internalPins = new Set() - const pinDataStoreKey = '/local/pins' + const pinDataStoreKey = new Key('/local/pins') const repo = self._repo const dag = self.dag @@ -45,14 +46,15 @@ module.exports = function pin (self) { set: pinSet(dag), - add: promisify((hashes, options, callback) => { + add: promisify((paths, options, callback) => { if (typeof options === 'function') { callback = options options = null } callback = once(callback) const recursive = options ? options.recursive : true - normalizeHashes(self, hashes, (err, mhs) => { + + resolvePaths(self, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be pinned series(mhs.map(multihash => cb => { @@ -107,7 +109,7 @@ module.exports = function pin (self) { }) }), - rm: promisify((hashes, options, callback) => { + rm: promisify((paths, options, callback) => { let recursive = true if (typeof options === 'function') { callback = options @@ -115,7 +117,7 @@ module.exports = function pin (self) { recursive = false } callback = once(callback) - normalizeHashes(self, hashes, (err, mhs) => { + resolvePaths(self, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be unpinned series(mhs.map(multihash => cb => { @@ -156,19 +158,19 @@ module.exports = function pin (self) { }) }), - ls: promisify((hashes, options, callback) => { + ls: promisify((paths, options, callback) => { let type = pin.types.all - if (typeof hashes === 'function') { - callback = hashes + if (typeof paths === 'function') { + callback = paths options = null - hashes = null + paths = null } if (typeof options === 'function') { callback = options } - if (hashes && hashes.type) { - options = hashes - hashes = null + if (paths && paths.type) { + options = paths + paths = null } if (options && options.type) { type = options.type.toLowerCase() @@ -179,9 +181,9 @@ module.exports = function pin (self) { `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` )) } - if (hashes) { + if (paths) { // check the pinned state of specific hashes - normalizeHashes(self, hashes, (err, mhs) => { + resolvePaths(self, paths, (err, mhs) => { if (err) { return callback(err) } series(mhs.map(multihash => cb => { pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { diff --git a/src/core/utils.js b/src/core/utils.js index be101273bb..b5cf7b3cb0 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,41 +1,41 @@ 'use strict' const multihashes = require('multihashes') -const mapSeries = require('async/mapSeries') +const promisify = require('promisify-es6') +const map = require('async/map') const CID = require('cids') const isIPFS = require('is-ipfs') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' /** - * Break an ipfs-path down into it's root hash and an array of links. + * Break an ipfs-path down into it's hash hash and an array of links. * * examples: - * b58Hash -> { root: 'b58Hash', links: [] } - * b58Hash/mercury/venus -> { root: 'b58Hash', links: ['mercury', 'venus']} - * /ipfs/b58Hash/links/by/name -> { root: 'b58Hash', links: ['links', 'by', 'name'] } + * b58Hash -> { hash: 'b58Hash', links: [] } + * b58Hash/mercury/venus -> { hash: 'b58Hash', links: ['mercury', 'venus']} + * /ipfs/b58Hash/links/by/name -> { hash: 'b58Hash', links: ['links', 'by', 'name'] } * * @param {String} ipfsPath An ipfs-path - * @return {Object} { root: base58 string, links: [string], ?err: Error } + * @return {Object} { hash: base58 string, links: [string], ?err: Error } + * @throws on an invalid @param ipfsPath */ -exports.parseIpfsPath = function parseIpfsPath (ipfsPath) { +function parseIpfsPath (ipfsPath) { const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) - const errorResult = { - error: new Error('invalid ipfs ref path') - } + const invalidPathErr = new Error('invalid ipfs ref path') if (!matched) { - return errorResult + throw invalidPathErr } - const [root, ...links] = matched[1].split('/') + const [hash, ...links] = matched[1].split('/') - if (isIPFS.multihash(root)) { + if (isIPFS.multihash(hash)) { return { - root: root, + hash: hash, links: links } } else { - return errorResult + throw invalidPathErr } } @@ -43,54 +43,71 @@ exports.parseIpfsPath = function parseIpfsPath (ipfsPath) { * Resolve various styles of an ipfs-path to the hash of the target node. * Follows links in the path. * - * Handles formats: + * Accepts formats: * - - * - /link/to/another/planet - * - /ipfs/ - * - Buffers of any of the above + * - /link/to/venus + * - /ipfs//link/to/pluto * - multihash Buffer + * - Arrays of the above * - * @param {IPFS} ipfs the IPFS node + * @param {IPFS} ipfs the IPFS node * @param {Described above} ipfsPaths A single or collection of ipfs-paths - * @param {Function} callback Node-style callback. res is Array - * @return {void} + * @param {Function} callback res is Array + * if no callback is passed, returns a Promise + * @return {Promise|void} */ -exports.normalizeHashes = function normalizeHashes (ipfs, ipfsPaths, callback) { +const resolvePaths = promisify(function (ipfs, ipfsPaths, callback) { if (!Array.isArray(ipfsPaths)) { ipfsPaths = [ipfsPaths] } - mapSeries(ipfsPaths, (path, cb) => { - const validate = (mh) => { + + map(ipfsPaths, (path, cb) => { + if (typeof path !== 'string') { try { - multihashes.validate(mh) - cb(null, mh) - } catch (err) { cb(err) } + multihashes.validate(path) + } catch (err) { + return cb(err) + } + return cb(null, path) } - if (typeof path !== 'string') { - return validate(path) + + let parsedPath + try { + parsedPath = exports.parseIpfsPath(path) + } catch(err) { + return cb(err) } - const {error, root, links} = exports.parseIpfsPath(path) - const rootHash = multihashes.fromB58String(root) - if (error) return cb(error) - if (!links.length) { - return validate(rootHash) + + const rootHash = multihashes.fromB58String(parsedPath.hash) + const rootLinks = parsedPath.links + if (!rootLinks.length) { + return cb(null, rootHash) } + + ipfs.object.get(rootHash, follow.bind(null, rootLinks)) + // recursively follow named links to the target node - const pathFn = (err, obj) => { - if (err) { return cb(err) } + function follow (links, err, obj) { + if (err) { + return cb(err) + } if (!links.length) { - // done tracing, we have the target node - return validate(obj.multihash) + // done tracing, obj is the target node + return cb(null, obj.multihash) } - const linkName = links.shift() - const nextLink = obj.links.find(link => link.name === linkName) - if (!nextLink) { + + const linkName = links[0] + const nextObj = obj.links.find(link => link.name === linkName) + if (!nextObj) { return cb(new Error( - `no link named ${linkName} under ${obj.toJSON().Hash}` + `no link named "${linkName}" under ${obj.toJSON().multihash}` )) } - ipfs.object.get(nextLink.multihash, pathFn) + + ipfs.object.get(nextObj.multihash, follow.bind(null, links.slice(1))) } - ipfs.object.get(rootHash, pathFn) }, callback) -} +}) + +exports.parseIpfsPath = parseIpfsPath +exports.resolvePaths = resolvePaths diff --git a/test/cli/pin.js b/test/cli/pin.js index 9271ffe410..387f0a65ff 100644 --- a/test/cli/pin.js +++ b/test/cli/pin.js @@ -5,86 +5,98 @@ const expect = require('chai').expect const runOnAndOff = require('../utils/on-and-off') // file structure for recursive tests: -// root (init-docs) -// |`readme -// `docs -// `index +// root (planets/) +// |`solar-system +// `mercury +// `wiki const keys = { - root: 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU', - readme: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', - docs: 'QmegvLXxpVKiZ4b57Xs1syfBVRd8CbucVHAp7KpLQdGieC', - index: 'QmQN88TEidd3RY2u3dpib49fERTDfKtDpvxnvczATNsfKT' + root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', + mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', + mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi', + solarSystem: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG' } -describe('pin', () => runOnAndOff((thing) => { - const filesDir = 'test/fixtures/test-data/recursive-get-dir/init-docs' +describe('pin', () => runOnAndOff.off((thing) => { + // const filesDir = 'test/fixtures/test-data/recursive-get-dir/init-mercuryDir' + const filesDir = 'test/fixtures/planets' let ipfs - before(() => { + before(function () { + this.timeout(15 * 1000) ipfs = thing.ipfs - return ipfs(`files add -r ${filesDir}`) - }) - // rm first because `files add` should pin recursively by default - it('rm (recursively by default)', () => { - return ipfs(`pin rm ${keys.root}`) - .then((out) => expect(out).to.equal(`unpinned ${keys.root}\n`)) - .then(() => ipfs('pin ls')) - .then((out) => expect(out).to.equal('')) + return ipfs(`files add -r ${filesDir}`) }) - it('add (recursively by default)', () => { - return ipfs(`pin add ${keys.root}`).then((out) => { - expect(out).to.eql(`pinned ${keys.root} recursively\n`) + describe('rm', function () { + it('recursively (default)', function () { + this.timeout(10 * 1000) + return ipfs(`pin rm ${keys.root}`) + .then(out => expect(out).to.equal(`unpinned ${keys.root}\n`)) + .then(() => ipfs('pin ls')) + .then(out => { + Object.values(keys).forEach(hash => expect(out).to.not.include(hash)) + }) }) + + // it('direct', () => { + // return ipfs(`pin rm --recursive false ${keys.solarSystem}`) + // .then(out => expect(out).to.equal(`unpinned ${keys.solarSystem}\n`)) + // .then(() => ipfs('pin ls')) + // .then(out => expect(out).to.not.include(`${keys.solarSystem} direct\n`)) + // }) }) - it('add (direct)', () => { - return ipfs(`pin add ${keys.readme} --recursive false`).then((out) => { - expect(out).to.eql(`pinned ${keys.readme} directly\n`) + describe('add', function () { + it('recursively (default)', () => { + return ipfs(`pin add ${keys.root}`).then(out => { + expect(out).to.eql(`pinned ${keys.root} recursively\n`) + }) }) - }) - it('ls (recursive)', () => { - return ipfs(`pin ls ${keys.root}`).then((out) => { - expect(out).to.eql(`${keys.root} recursive\n`) + it.skip('direct', () => { + return ipfs(`pin add ${keys.solarSystem} --recursive false`).then(out => { + expect(out).to.eql(`pinned ${keys.solarSystem} directly\n`) + }) }) }) - it('ls (direct)', () => { - return ipfs(`pin ls ${keys.readme}`).then((out) => { - expect(out).to.eql(`${keys.readme} direct\n`) + describe('ls', function () { + it('lists recursive pins', () => { + return ipfs(`pin ls ${keys.root}`).then(out => { + expect(out).to.eql(`${keys.root} recursive\n`) + }) }) - }) - it('ls (indirect)', () => { - return ipfs(`pin ls ${keys.index}`).then((out) => { - expect(out).to.eql(`${keys.index} indirect through ${keys.root}\n`) + it('lists direct pins', () => { + return ipfs(`pin ls ${keys.solarSystem}`).then(out => { + expect(out).to.eql(`${keys.solarSystem} direct\n`) + }) }) - }) - it('ls with multiple keys', () => { - return ipfs(`pin ls ${keys.root} ${keys.readme}`).then((out) => { - expect(out).to.eql(`${keys.root} recursive\n${keys.readme} direct\n`) + it.skip('lists indirect pins', function () { + this.timeout(25 * 1000) + + return ipfs(`pin ls ${keys.mercuryWiki}`).then(out => { + expect(out).to.include(keys.mercuryWiki) + }) }) - }) - it('ls (all)', () => { - return ipfs('pin ls').then((out) => { - expect(out.split('\n').length).to.eql(12) - expect(out).to.include(`${keys.root} recursive\n`) - expect(out).to.include(`${keys.readme} direct\n`) - expect(out).to.include(`${keys.docs} indirect\n`) - expect(out).to.include(`${keys.index} indirect\n`) + it('handles multiple hashes', () => { + return ipfs(`pin ls ${keys.root} ${keys.solarSystem}`).then(out => { + expect(out).to.eql(`${keys.root} recursive\n${keys.solarSystem} direct\n`) + }) }) - }) - it('rm (direct)', () => { - return ipfs(`pin rm --recursive false ${keys.readme}`) - .then((out) => expect(out).to.equal(`unpinned ${keys.readme}\n`)) - .then(() => ipfs('pin ls')) - .then((out) => expect(out).to.not.include(`${keys.readme} direct\n`)) + it('lists all pins when no hash is passed', () => { + return ipfs('pin ls').then(out => { + expect(out).to.include(`${keys.root} recursive\n`) + expect(out).to.include(`${keys.solarSystem} direct\n`) + expect(out).to.include(`${keys.mercuryDir} indirect\n`) + expect(out).to.include(`${keys.mercuryWiki} indirect\n`) + }) + }) }) })) diff --git a/test/core/utils.spec.js b/test/core/utils.spec.js index 638aebf837..c51b609e1e 100644 --- a/test/core/utils.spec.js +++ b/test/core/utils.spec.js @@ -16,16 +16,16 @@ const utils = require('../../src/core/utils') describe('utils', () => { const rootHashString = 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU' const rootHash = multihashes.fromB58String(rootHashString) - const rootPathString = `/ipfs/${rootHashString}/` + const rootPathString = `/ipfs/${rootHashString}` const aboutHashString = 'QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V' const aboutHash = multihashes.fromB58String(aboutHashString) - const aboutPathString = `/ipfs/${rootHashString}/about` + const aboutPathString = `${rootPathString}/about` describe('parseIpfsPath', () => { it('parses path with no links', function () { expect(utils.parseIpfsPath(rootHashString)) .to.deep.equal({ - root: rootHashString, + hash: rootHashString, links: [] }) }) @@ -33,7 +33,7 @@ describe('utils', () => { it('parses path with links', function () { expect(utils.parseIpfsPath(`${rootHashString}/docs/index`)) .to.deep.equal({ - root: rootHashString, + hash: rootHashString, links: ['docs', 'index'] }) }) @@ -41,25 +41,23 @@ describe('utils', () => { it('parses path with /ipfs/ prefix', function () { expect(utils.parseIpfsPath(`/ipfs/${rootHashString}/about`)) .to.deep.equal({ - root: rootHashString, + hash: rootHashString, links: ['about'] }) }) it('returns error for malformed path', function () { - const result = utils.parseIpfsPath(`${rootHashString}//about`) - expect(result.error).to.be.instanceof(Error) - .and.have.property('message', 'invalid ipfs ref path') + const fn = () => utils.parseIpfsPath(`${rootHashString}//about`) + expect(fn).to.throw('invalid ipfs ref path') }) - it('returns error if root is not a valid CID', function () { - const result = utils.parseIpfsPath('invalid/ipfs/path') - expect(result.error).to.be.instanceof(Error) - .and.have.property('message', 'invalid ipfs ref path') + it('returns error if root is not a valid multihash', function () { + const fn = () => utils.parseIpfsPath('invalid/ipfs/path') + expect(fn).to.throw('invalid ipfs ref path') }) }) - describe('normalizeHashes', function () { + describe('resolvePaths', function () { this.timeout(80 * 1000) let node let repo @@ -77,7 +75,7 @@ describe('utils', () => { }) it('normalizes hash string to array with multihash object', (done) => { - utils.normalizeHashes(node, rootHashString, (err, hashes) => { + utils.resolvePaths(node, rootHashString, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(rootHash) @@ -86,7 +84,7 @@ describe('utils', () => { }) it('normalizes array of hash strings to array of multihash objects', (done) => { - utils.normalizeHashes(node, [rootHashString, aboutHashString], (err, hashes) => { + utils.resolvePaths(node, [rootHashString, aboutHashString], (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(2) expect(hashes[0]).to.deep.equal(rootHash) @@ -96,7 +94,7 @@ describe('utils', () => { }) it('normalizes multihash object to array with multihash object', (done) => { - utils.normalizeHashes(node, aboutHash, (err, hashes) => { + utils.resolvePaths(node, aboutHash, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(aboutHash) @@ -105,7 +103,7 @@ describe('utils', () => { }) it('normalizes array of multihash objects to array of multihash objects', (done) => { - utils.normalizeHashes(node, [rootHash, aboutHash], (err, hashes) => { + utils.resolvePaths(node, [rootHash, aboutHash], (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(2) expect(hashes[0]).to.deep.equal(rootHash) @@ -115,7 +113,7 @@ describe('utils', () => { }) it('normalizes ipfs path string to array with multihash object', (done) => { - utils.normalizeHashes(node, aboutPathString, (err, hashes) => { + utils.resolvePaths(node, aboutPathString, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(aboutHash) @@ -124,7 +122,7 @@ describe('utils', () => { }) it('normalizes array of ipfs path strings to array with multihash objects', (done) => { - utils.normalizeHashes(node, [aboutPathString, rootPathString], (err, hashes) => { + utils.resolvePaths(node, [aboutPathString, rootPathString], (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(2) expect(hashes[0]).to.deep.equal(aboutHash) @@ -132,5 +130,21 @@ describe('utils', () => { done() }) }) + + it('should error on invalid hashes', function (done) { + utils.resolvePaths(node, '/ipfs/asdlkjahsdfkjahsdfd', err => { + expect(err).to.exist() + done() + }) + }) + + it(`should error when a link doesn't exist`, function (done) { + utils.resolvePaths(node, `${aboutPathString}/fusion`, err => { + expect(err.message).to.include( + `no link named "fusion" under QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V` + ) + done() + }) + }) }) }) diff --git a/test/fixtures/planets/mercury/wiki.md b/test/fixtures/planets/mercury/wiki.md new file mode 100644 index 0000000000..1b4039ba80 --- /dev/null +++ b/test/fixtures/planets/mercury/wiki.md @@ -0,0 +1,12 @@ +# Mercury (planet) +> From Wikipedia, the free encyclopedia + +Mercury is the smallest and innermost planet in the Solar System. Its orbital period around the Sun of 87.97 days is the shortest of all the planets in the Solar System. It is named after the Roman deity Mercury, the messenger of the gods. + +Like Venus, Mercury orbits the Sun within Earth's orbit as an inferior planet, and never exceeds 28° away from the Sun. When viewed from Earth, this proximity to the Sun means the planet can only be seen near the western or eastern horizon during the early evening or early morning. At this time it may appear as a bright star-like object, but is often far more difficult to observe than Venus. The planet telescopically displays the complete range of phases, similar to Venus and the Moon, as it moves in its inner orbit relative to Earth, which reoccurs over the so-called synodic period approximately every 116 days. + +Mercury is gravitationally locked with the Sun in a 3:2 spin-orbit resonance, and rotates in a way that is unique in the Solar System. As seen relative to the fixed stars, it rotates on its axis exactly three times for every two revolutions it makes around the Sun. As seen from the Sun, in a frame of reference that rotates with the orbital motion, it appears to rotate only once every two Mercurian years. An observer on Mercury would therefore see only one day every two years. + +Mercury's axis has the smallest tilt of any of the Solar System's planets (about ​1⁄30 degree). Its orbital eccentricity is the largest of all known planets in the Solar System; at perihelion, Mercury's distance from the Sun is only about two-thirds (or 66%) of its distance at aphelion. Mercury's surface appears heavily cratered and is similar in appearance to the Moon's, indicating that it has been geologically inactive for billions of years. Having almost no atmosphere to retain heat, it has surface temperatures that vary diurnally more than on any other planet in the Solar System, ranging from 100 K (−173 °C; −280 °F) at night to 700 K (427 °C; 800 °F) during the day across the equatorial regions. The polar regions are constantly below 180 K (−93 °C; −136 °F). The planet has no known natural satellites. + +Two spacecraft have visited Mercury: Mariner 10 flew by in 1974 and 1975; and MESSENGER, launched in 2004, orbited Mercury over 4,000 times in four years before exhausting its fuel and crashing into the planet's surface on April 30, 2015. diff --git a/test/fixtures/planets/solar-system.md b/test/fixtures/planets/solar-system.md new file mode 100644 index 0000000000..f249cd3a53 --- /dev/null +++ b/test/fixtures/planets/solar-system.md @@ -0,0 +1,10 @@ +# Solar System +> From Wikipedia, the free encyclopedia + +The Solar System is the gravitationally bound system comprising the Sun and the objects that orbit it, either directly or indirectly. Of those objects that orbit the Sun directly, the largest eight are the planets, with the remainder being smaller objects, such as dwarf planets and small Solar System bodies. Of the objects that orbit the Sun indirectly, the moons, two are larger than the smallest planet, Mercury. + +The Solar System formed 4.6 billion years ago from the gravitational collapse of a giant interstellar molecular cloud. The vast majority of the system's mass is in the Sun, with the majority of the remaining mass contained in Jupiter. The four smaller inner planets, Mercury, Venus, Earth and Mars, are terrestrial planets, being primarily composed of rock and metal. The four outer planets are giant planets, being substantially more massive than the terrestrials. The two largest, Jupiter and Saturn, are gas giants, being composed mainly of hydrogen and helium; the two outermost planets, Uranus and Neptune, are ice giants, being composed mostly of substances with relatively high melting points compared with hydrogen and helium, called volatiles, such as water, ammonia and methane. All eight planets have almost circular orbits that lie within a nearly flat disc called the ecliptic. + +The Solar System also contains smaller objects. The asteroid belt, which lies between the orbits of Mars and Jupiter, mostly contains objects composed, like the terrestrial planets, of rock and metal. Beyond Neptune's orbit lie the Kuiper belt and scattered disc, which are populations of trans-Neptunian objects composed mostly of ices, and beyond them a newly discovered population of sednoids. Within these populations are several dozen to possibly tens of thousands of objects large enough that they have been rounded by their own gravity. Such objects are categorized as dwarf planets. Identified dwarf planets include the asteroid Ceres and the trans-Neptunian objects Pluto and Eris. In addition to these two regions, various other small-body populations, including comets, centaurs and interplanetary dust clouds, freely travel between regions. Six of the planets, at least four of the dwarf planets, and many of the smaller bodies are orbited by natural satellites, usually termed "moons" after the Moon. Each of the outer planets is encircled by planetary rings of dust and other small objects. + +The solar wind, a stream of charged particles flowing outwards from the Sun, creates a bubble-like region in the interstellar medium known as the heliosphere. The heliopause is the point at which pressure from the solar wind is equal to the opposing pressure of the interstellar medium; it extends out to the edge of the scattered disc. The Oort cloud, which is thought to be the source for long-period comets, may also exist at a distance roughly a thousand times further than the heliosphere. The Solar System is located in the Orion Arm, 26,000 light-years from the center of the Milky Way. From be8747ee937db15766ea41089cb5727f55222b71 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Sun, 18 Mar 2018 23:41:49 -0400 Subject: [PATCH 04/21] fix: add some changes missed during rebase, syntax fixes, etc I think my original rebase for this branch 2 weeks ago might have changed history for the intervening commits, indirectly causing some of these missed changes. or I just rebase onto the wrong oldparent. fix: some onlyHash and pin tests broke after merging onlyHash and pin interact: shouldn't pin when --only-hash. fix: trim output for 'pin ls when no hash is passed' test: indirect pins supersede direct pins: turns out we had a bug feat: add expectTimeout test utility feat: promisify some additional pin utils --- package.json | 4 ++- src/cli/commands/files/add.js | 3 ++- src/cli/commands/pin/add.js | 7 +++-- src/cli/commands/pin/rm.js | 5 ++-- src/core/boot.js | 1 + src/core/components/dag.js | 27 +++++++++++++++++++ src/core/components/files.js | 24 ++++++++++++++--- src/core/components/index.js | 1 + src/core/components/pin.js | 12 ++++----- src/core/index.js | 1 + src/http/api/resources/files.js | 3 ++- src/http/api/resources/index.js | 1 + src/http/api/routes/index.js | 1 + src/http/api/routes/pin.js | 40 ++++++++++++++++++++++++++++ test/cli/commands.js | 3 +-- test/cli/files.js | 47 ++++++++++++++++++++------------- test/cli/pin.js | 20 +++++++------- test/utils/expect-timeout.js | 16 +++++++++++ 18 files changed, 168 insertions(+), 48 deletions(-) create mode 100644 src/http/api/routes/pin.js create mode 100644 test/utils/expect-timeout.js diff --git a/package.json b/package.json index dba09a090d..84d66557a2 100644 --- a/package.json +++ b/package.json @@ -98,6 +98,7 @@ "debug": "^3.1.0", "file-type": "^7.7.1", "filesize": "^3.6.1", + "fnv1a": "^1.0.1", "fsm-event": "^2.1.0", "get-folder-size": "^1.0.1", "glob": "^7.1.2", @@ -106,6 +107,7 @@ "hoek": "^5.0.3", "human-to-milliseconds": "^1.0.0", "ipfs-api": "^22.0.0", + "interface-datastore": "^0.4.1", "ipfs-bitswap": "~0.20.0", "ipfs-block": "~0.7.1", "ipfs-block-service": "~0.14.0", @@ -218,7 +220,7 @@ "Jade Meskill ", "Johannes Wikner ", "Jon Schlinkert ", - "Jonathan ", + "Jonathan Krone ", "João Antunes ", "João Santos ", "Kevin Wang ", diff --git a/src/cli/commands/files/add.js b/src/cli/commands/files/add.js index cd0106e86d..d4a9ab764a 100644 --- a/src/cli/commands/files/add.js +++ b/src/cli/commands/files/add.js @@ -193,7 +193,8 @@ module.exports = { rawLeaves: argv.rawLeaves, onlyHash: argv.onlyHash, hashAlg: argv.hash, - wrapWithDirectory: argv.wrapWithDirectory + wrapWithDirectory: argv.wrapWithDirectory, + pin: argv.pin } // Temporary restriction on raw-leaves: diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js index 7b1a220aa5..3acf438022 100644 --- a/src/cli/commands/pin/add.js +++ b/src/cli/commands/pin/add.js @@ -3,9 +3,9 @@ const print = require('../../utils').print module.exports = { - command: 'add ', + command: 'add ', - describe: 'Pins objects to local storage.', + describe: 'Pins object to local storage.', builder: { recursive: { @@ -17,10 +17,9 @@ module.exports = { }, handler (argv) { - const paths = argv['ipfs-path'].split(' ') const recursive = argv.recursive const type = recursive ? 'recursive' : 'direct' - argv.ipfs.pin.add(paths[0], { recursive: recursive }, (err, results) => { + argv.ipfs.pin.add(argv.ipfsPath, { recursive: recursive }, (err, results) => { if (err) { throw err } results.forEach((res) => { print(`pinned ${res.hash} ${type}ly`) diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js index f3d1e7cf7f..b19454d268 100644 --- a/src/cli/commands/pin/rm.js +++ b/src/cli/commands/pin/rm.js @@ -3,7 +3,7 @@ const print = require('../../utils').print module.exports = { - command: 'rm ', + command: 'rm ', describe: 'Removes the pinned object from local storage.', @@ -17,9 +17,8 @@ module.exports = { }, handler: (argv) => { - const paths = argv['ipfs-path'].split(' ') const recursive = argv.recursive - argv.ipfs.pin.rm(paths, { recursive: recursive }, (err, results) => { + argv.ipfs.pin.rm(argv.ipfsPath, { recursive: recursive }, (err, results) => { if (err) { throw err } results.forEach((res) => { print(`unpinned ${res.hash}`) diff --git a/src/core/boot.js b/src/core/boot.js index 113c9919bf..2d40ff5f7c 100644 --- a/src/core/boot.js +++ b/src/core/boot.js @@ -29,6 +29,7 @@ module.exports = (self) => { series([ (cb) => self._repo.open(cb), + (cb) => self.pin.load(cb), (cb) => self.preStart(cb), (cb) => { self.log('initialized') diff --git a/src/core/components/dag.js b/src/core/components/dag.js index b5d77e0c6c..5c7f749935 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -3,6 +3,8 @@ const promisify = require('promisify-es6') const CID = require('cids') const pull = require('pull-stream') +const _ = require('lodash') +const once = require('once') module.exports = function dag (self) { return { @@ -73,6 +75,31 @@ module.exports = function dag (self) { self._ipld.treeStream(cid, path, options), pull.collect(callback) ) + }), + + // TODO - move to IPLD resolver and generalize to other IPLD formats + _getRecursive: promisify((multihash, callback) => { + // gets flat array of all DAGNodes in tree given by multihash + callback = once(callback) + self.dag.get(new CID(multihash), (err, res) => { + if (err) { return callback(err) } + const links = res.value.links + const nodes = [res.value] + // leaf case + if (!links.length) { + return callback(null, nodes) + } + // branch case + links.forEach(link => { + self.dag._getRecursive(link.multihash, (err, subNodes) => { + if (err) { return callback(err) } + nodes.push(subNodes) + if (nodes.length === links.length + 1) { + return callback(null, _.flattenDeep(nodes)) + } + }) + }) + }) }) } } diff --git a/src/core/components/files.js b/src/core/components/files.js index b7faf7e9d9..e40b6fb1d0 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -33,7 +33,9 @@ function prepareFile (self, opts, file, callback) { } waterfall([ - (cb) => opts.onlyHash ? cb(null, file) : self.object.get(file.multihash, opts, cb), + (cb) => opts.onlyHash + ? cb(null, file) + : self.object.get(file.multihash, opts, cb), (node, cb) => { const b58Hash = cid.toBaseEncodedString() @@ -88,6 +90,22 @@ function normalizeContent (opts, content) { }) } +function pinFile (self, opts, file, cb) { + // since adding paths like `directory/filename` automatically + // adds the directory as well as the file, we can just pin the target file + // and all parent dirs will be pinned indirectly + const pin = 'pin' in opts ? opts.pin : true + const isTargetFile = !file.path.includes('/') + const shouldPin = pin && isTargetFile && !opts.onlyHash + if (shouldPin) { + self.pin.add(file.hash, (err) => { + cb(err, file) + }) + } else { + cb(null, file) + } +} + class AddHelper extends Duplex { constructor (pullStream, push, options) { super(Object.assign({ objectMode: true }, options)) @@ -131,7 +149,7 @@ module.exports = function files (self) { } let total = 0 - const shouldPin = 'pin' in opts ? opts.pin : true + const prog = opts.progress || noop const progress = (bytes) => { total += bytes @@ -144,7 +162,7 @@ module.exports = function files (self) { pull.flatten(), importer(self._ipld, opts), pull.asyncMap(prepareFile.bind(null, self, opts)), - shouldPin ? pull.asyncMap(pinFile.bind(null, self)) : identity + pull.asyncMap(pinFile.bind(null, self, opts)) ) } diff --git a/src/core/components/index.js b/src/core/components/index.js index ce95b27a53..7221575007 100644 --- a/src/core/components/index.js +++ b/src/core/components/index.js @@ -18,6 +18,7 @@ exports.swarm = require('./swarm') exports.ping = require('./ping') exports.pingPullStream = require('./ping-pull-stream') exports.pingReadableStream = require('./ping-readable-stream') +exports.pin = require('./pin') exports.files = require('./files') exports.bitswap = require('./bitswap') exports.pubsub = require('./pubsub') diff --git a/src/core/components/pin.js b/src/core/components/pin.js index e554284088..31d7d047e0 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -247,11 +247,11 @@ module.exports = function pin (self) { } }), - isPinned: (multihash, callback) => { + isPinned: promisify((multihash, callback) => { pin.isPinnedWithType(multihash, pin.types.all, callback) - }, + }), - isPinnedWithType: (multihash, pinType, callback) => { + isPinnedWithType: promisify((multihash, pinType, callback) => { const key = toB58String(multihash) // recursive if ((pinType === pin.types.recursive || pinType === pin.types.all) && @@ -302,7 +302,7 @@ module.exports = function pin (self) { return callback(null, {pinned: found, reason: result}) } ) - }, + }), directKeyStrings: () => Array.from(directPins), @@ -316,7 +316,7 @@ module.exports = function pin (self) { internalKeys: () => pin.internalKeyStrings().map(key => multihashes.fromB58String(key)), - getIndirectKeys: (callback) => { + getIndirectKeys: promisify(callback => { const indirectKeys = new Set() const rKeys = pin.recursiveKeys() each(rKeys, (multihash, cb) => { @@ -335,7 +335,7 @@ module.exports = function pin (self) { if (err) { return callback(err) } callback(null, Array.from(indirectKeys)) }) - }, + }), // encodes and writes pin key sets to the datastore // each key set will be stored as a DAG node, and a root node will link to both diff --git a/src/core/index.js b/src/core/index.js index 0b19160429..57efa58385 100644 --- a/src/core/index.js +++ b/src/core/index.js @@ -101,6 +101,7 @@ class IPFS extends EventEmitter { this.swarm = components.swarm(this) this.files = components.files(this) this.bitswap = components.bitswap(this) + this.pin = components.pin(this) this.ping = components.ping(this) this.pingPullStream = components.pingPullStream(this) this.pingReadableStream = components.pingReadableStream(this) diff --git a/src/http/api/resources/files.js b/src/http/api/resources/files.js index 6f889926ef..1cd93e158e 100644 --- a/src/http/api/resources/files.js +++ b/src/http/api/resources/files.js @@ -228,7 +228,8 @@ exports.add = { progress: request.query.progress ? progressHandler : null, onlyHash: request.query['only-hash'], hashAlg: request.query['hash'], - wrapWithDirectory: request.query['wrap-with-directory'] + wrapWithDirectory: request.query['wrap-with-directory'], + pin: request.query.pin } const aborter = abortable() diff --git a/src/http/api/resources/index.js b/src/http/api/resources/index.js index 37f38f246b..59040a99d8 100644 --- a/src/http/api/resources/index.js +++ b/src/http/api/resources/index.js @@ -7,6 +7,7 @@ exports.ping = require('./ping') exports.bootstrap = require('./bootstrap') exports.repo = require('./repo') exports.object = require('./object') +exports.pin = require('./pin') exports.config = require('./config') exports.block = require('./block') exports.swarm = require('./swarm') diff --git a/src/http/api/routes/index.js b/src/http/api/routes/index.js index d7c30851f7..bfec26a460 100644 --- a/src/http/api/routes/index.js +++ b/src/http/api/routes/index.js @@ -7,6 +7,7 @@ module.exports = (server) => { require('./bootstrap')(server) require('./block')(server) require('./object')(server) + require('./pin')(server) require('./repo')(server) require('./config')(server) require('./ping')(server) diff --git a/src/http/api/routes/pin.js b/src/http/api/routes/pin.js new file mode 100644 index 0000000000..657bb375ac --- /dev/null +++ b/src/http/api/routes/pin.js @@ -0,0 +1,40 @@ +'use strict' + +const resources = require('./../resources') + +module.exports = (server) => { + const api = server.select('API') + + api.route({ + method: '*', + path: '/api/v0/pin/add', + config: { + pre: [ + { method: resources.pin.add.parseArgs, assign: 'args' } + ], + handler: resources.pin.add.handler + } + }) + + api.route({ + method: '*', + path: '/api/v0/pin/rm', + config: { + pre: [ + { method: resources.pin.rm.parseArgs, assign: 'args' } + ], + handler: resources.pin.rm.handler + } + }) + + api.route({ + method: '*', + path: '/api/v0/pin/ls', + config: { + pre: [ + { method: resources.pin.ls.parseArgs, assign: 'args' } + ], + handler: resources.pin.ls.handler + } + }) +} diff --git a/test/cli/commands.js b/test/cli/commands.js index 1e194eb143..37ecc0ae74 100644 --- a/test/cli/commands.js +++ b/test/cli/commands.js @@ -4,8 +4,7 @@ const expect = require('chai').expect const runOnAndOff = require('../utils/on-and-off') -const commandCount = 74 - +const commandCount = 77 describe('commands', () => runOnAndOff((thing) => { let ipfs diff --git a/test/cli/files.js b/test/cli/files.js index eced37b173..aa7f881a9e 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -5,6 +5,7 @@ const fs = require('fs') const os = require('os') const expect = require('chai').expect const path = require('path') +const hat = require('hat') const compareDir = require('dir-compare').compareSync const rimraf = require('rimraf').sync const CID = require('cids') @@ -287,31 +288,31 @@ describe('files', () => runOnAndOff((thing) => { it('add --only-hash outputs correct hash', function () { return ipfs('files add --only-hash src/init-files/init-docs/readme') - .then(out => - expect(out) - .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') - ) + .then(out => + expect(out) + .to.eql('added QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB readme\n') + ) }) it('add --only-hash does not add a file to the datastore', function () { this.timeout(30 * 1000) this.slow(10 * 1000) - const content = String(Math.random() + Date.now()) + const content = String(Math.random()) const filepath = path.join(os.tmpdir(), `${content}.txt`) fs.writeFileSync(filepath, content) return ipfs(`files add --only-hash ${filepath}`) - .then(out => { - const hash = out.split(' ')[1] - - // 'jsipfs object get ' should timeout with the daemon on - // and should fail fast with the daemon off - return Promise.race([ - ipfs.fail(`object get ${hash}`), - new Promise((resolve, reject) => setTimeout(resolve, 4000)) - ]) - .then(() => fs.unlinkSync(filepath)) - }) + .then(out => { + const hash = out.split(' ')[1] + + // 'jsipfs object get ' should timeout with the daemon on + // and should fail fast with the daemon off + return Promise.race([ + ipfs.fail(`object get ${hash}`), + new Promise((resolve, reject) => setTimeout(resolve, 4000)) + ]) + .then(() => fs.unlinkSync(filepath)) + }) }) it('add pins by default', function () { @@ -329,13 +330,23 @@ describe('files', () => runOnAndOff((thing) => { }) it('add does not pin with --pin=false', function () { + this.timeout(20 * 1000) const filePath = path.join(os.tmpdir(), hat()) const content = String(Math.random()) const file = fs.writeFileSync(filePath, content) return ipfs(`files add -Q --pin=false ${filePath}`) - .then(out => ipfs(`pin ls ${out.trim()}`)) - .then(ls => expect(ls.trim()).to.eql('')) + .then(out => { + const lsAttempt = ipfs(`pin ls ${out.trim()}`) + .then(ls => { + expect(ls.trim()).to.eql('') + }) + + return Promise.race([ + lsAttempt, + new Promise((resolve, reject) => setTimeout(resolve, 4000)) + ]) + }) .then(() => fs.unlinkSync(filePath)) }) diff --git a/test/cli/pin.js b/test/cli/pin.js index 387f0a65ff..54ca95672a 100644 --- a/test/cli/pin.js +++ b/test/cli/pin.js @@ -56,9 +56,9 @@ describe('pin', () => runOnAndOff.off((thing) => { }) }) - it.skip('direct', () => { + it('direct', () => { return ipfs(`pin add ${keys.solarSystem} --recursive false`).then(out => { - expect(out).to.eql(`pinned ${keys.solarSystem} directly\n`) + expect(out).to.eql(`pinned ${keys.solarSystem} indirectly\n`) }) }) }) @@ -76,11 +76,15 @@ describe('pin', () => runOnAndOff.off((thing) => { }) }) - it.skip('lists indirect pins', function () { + it('lists indirect pins', function () { this.timeout(25 * 1000) - return ipfs(`pin ls ${keys.mercuryWiki}`).then(out => { - expect(out).to.include(keys.mercuryWiki) + return ipfs('pin ls').then(out => { + console.log('pin ls out:', out) + return ipfs(`pin ls ${keys.mercuryWiki}`).then(out => { + console.log('ls mercuryWiki:', out) + expect(out).to.include(keys.mercuryWiki) + }) }) }) @@ -92,10 +96,8 @@ describe('pin', () => runOnAndOff.off((thing) => { it('lists all pins when no hash is passed', () => { return ipfs('pin ls').then(out => { - expect(out).to.include(`${keys.root} recursive\n`) - expect(out).to.include(`${keys.solarSystem} direct\n`) - expect(out).to.include(`${keys.mercuryDir} indirect\n`) - expect(out).to.include(`${keys.mercuryWiki} indirect\n`) + const hashes = out.split('\n').map(line => line.split(' ')[0]) + expect(hashes).to.include.members(Object.values(keys)) }) }) }) diff --git a/test/utils/expect-timeout.js b/test/utils/expect-timeout.js new file mode 100644 index 0000000000..51c7330755 --- /dev/null +++ b/test/utils/expect-timeout.js @@ -0,0 +1,16 @@ +'use strict' + +/** + * Resolve if @param promise hangs for at least @param ms, throw otherwise + * @param {Promise} promise promise that you expect to hang + * @param {Number} ms millis to wait + * @return {Promise} + */ +module.exports = (promise, ms) => { + return Promise.race([ + promise.then((out) => { + throw new Error('Expected Promise to timeout but it was successful.') + }), + new Promise((resolve, reject) => setTimeout(resolve, ms)) + ]) +} From beead9a968f322fe6c31907bb913c8e72f72bc26 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Mon, 19 Mar 2018 20:41:00 -0400 Subject: [PATCH 05/21] test: initial work testing the core/pin.js implementation I think I'll end up moving most tests here. test: add tests for pin.ls and pin.rm Based tests on other pin fixtures, need to migrate the isPinned* tests to them as well. fix: direct pins are now deleted by a default pin.rm(hash) test: prepare for pin.add tests 'indirect supersedes direct' test exposes a bug in pin.ls feat: switch away from multihashes for isPinned* tests test: impl pin.add tests fix: add fixture files only once test: add test for a potential bug, clean isPinned* tests refactor: remove a test that's no longer needed fix: pin.ls, indirect pins should supersede direct pins test: naive pin.load, pin.flush tests feat: remove most pin cli tests as functionality is tested in pin core tests refactor: rename solarSystem --- src/cli/commands/pin/ls.js | 2 +- src/core/components/pin-set.js | 4 +- src/core/components/pin.js | 41 ++++- test/cli/pin.js | 125 +++++-------- test/core/pin.js | 328 +++++++++++++++++++++++++++++++++ 5 files changed, 412 insertions(+), 88 deletions(-) create mode 100644 test/core/pin.js diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index 164c544523..04b687ace5 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -25,7 +25,7 @@ module.exports = { }, handler: (argv) => { - const paths = argv.ipfsPath || '' + const paths = argv.ipfsPath const type = argv.type const quiet = argv.quiet diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index a0ea499051..469217337d 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -196,7 +196,9 @@ exports = module.exports = function (dag) { loadSet: (rootNode, name, logInternalKey, callback) => { callback = once(callback) const link = rootNode.links.find(l => l.name === name) - if (!link) { return callback(new Error('No link found with name ' + name)) } + if (!link) { + return callback(new Error('No link found with name ' + name)) + } logInternalKey(link.multihash) dag.get(new CID(link.multihash), (err, res) => { if (err) { return callback(err) } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 31d7d047e0..25c7c6fd3e 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -93,7 +93,7 @@ module.exports = function pin (self) { if (recursive) { results.forEach(key => { // recursive pin should replace direct pin - directPins.delete(key) + directPins.delete(key) // TODO questionable recursivePins.add(key) }) } else { @@ -128,6 +128,7 @@ module.exports = function pin (self) { if (!pinned) { return cb(new Error(`${key} is not pinned`)) } + switch (reason) { case (pin.types.recursive): if (recursive) { @@ -145,9 +146,20 @@ module.exports = function pin (self) { }) }), (err, results) => { if (err) { return callback(err) } + // update the pin sets in memory - const pins = recursive ? recursivePins : directPins - results.forEach(key => pins.delete(key)) + results.forEach(key => { + if (recursive) { + if (recursivePins.has(key)) { + recursivePins.delete(key) + } else { + directPins.delete(key) + } + } else { + directPins.delete(key) + } + }) + // persist updated pin sets to datastore pin.flush((err, root) => { if (err) { return callback(err) } @@ -213,9 +225,9 @@ module.exports = function pin (self) { }) } else { // show all pinned items of type - const result = [] + let result = [] if (type === pin.types.direct || type === pin.types.all) { - pin.directKeyStrings().forEach((hash) => { + pin.directKeyStrings().forEach(hash => { result.push({ type: pin.types.direct, hash: hash @@ -223,7 +235,7 @@ module.exports = function pin (self) { }) } if (type === pin.types.recursive || type === pin.types.all) { - pin.recursiveKeyStrings().forEach((hash) => { + pin.recursiveKeyStrings().forEach(hash => { result.push({ type: pin.types.recursive, hash: hash @@ -233,7 +245,12 @@ module.exports = function pin (self) { if (type === pin.types.indirect || type === pin.types.all) { pin.getIndirectKeys((err, hashes) => { if (err) { return callback(err) } - hashes.forEach((hash) => { + hashes.forEach(hash => { + if (directPins.has(hash)) { + // if an indirect pin is also pinned directly, + // use only the indirect entry + result = result.filter(pin => pin.hash !== hash) + } result.push({ type: pin.types.indirect, hash: hash @@ -321,14 +338,18 @@ module.exports = function pin (self) { const rKeys = pin.recursiveKeys() each(rKeys, (multihash, cb) => { dag._getRecursive(multihash, (err, nodes) => { - if (err) { return cb(err) } - nodes.forEach((node) => { + if (err) { + return cb(err) + } + + nodes.forEach(node => { const key = toB58String(node.multihash) - if (!directPins.has(key) && !recursivePins.has(key)) { + if (!recursivePins.has(key)) { // not already pinned recursively or directly indirectKeys.add(key) } }) + cb() }) }, (err) => { diff --git a/test/cli/pin.js b/test/cli/pin.js index 54ca95672a..4543d97d1f 100644 --- a/test/cli/pin.js +++ b/test/cli/pin.js @@ -4,101 +4,74 @@ const expect = require('chai').expect const runOnAndOff = require('../utils/on-and-off') -// file structure for recursive tests: -// root (planets/) -// |`solar-system -// `mercury -// `wiki - -const keys = { +// fixture structure: +// planets/ +// solar-system.md +// mercury/ +// wiki.md +const fixturePath = 'test/fixtures/planets' +const hashes = { root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', + solarWiki: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG', mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', - mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi', - solarSystem: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG' + mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' } -describe('pin', () => runOnAndOff.off((thing) => { - // const filesDir = 'test/fixtures/test-data/recursive-get-dir/init-mercuryDir' - const filesDir = 'test/fixtures/planets' - - let ipfs - - before(function () { - this.timeout(15 * 1000) - ipfs = thing.ipfs - - return ipfs(`files add -r ${filesDir}`) - }) - - describe('rm', function () { - it('recursively (default)', function () { - this.timeout(10 * 1000) - return ipfs(`pin rm ${keys.root}`) - .then(out => expect(out).to.equal(`unpinned ${keys.root}\n`)) - .then(() => ipfs('pin ls')) - .then(out => { - Object.values(keys).forEach(hash => expect(out).to.not.include(hash)) - }) - }) - - // it('direct', () => { - // return ipfs(`pin rm --recursive false ${keys.solarSystem}`) - // .then(out => expect(out).to.equal(`unpinned ${keys.solarSystem}\n`)) - // .then(() => ipfs('pin ls')) - // .then(out => expect(out).to.not.include(`${keys.solarSystem} direct\n`)) - // }) - }) +describe('pin', () => runOnAndOff(thing => { + let ipfs - describe('add', function () { - it('recursively (default)', () => { - return ipfs(`pin add ${keys.root}`).then(out => { - expect(out).to.eql(`pinned ${keys.root} recursively\n`) - }) + before(function () { + this.timeout(15 * 1000) + ipfs = thing.ipfs + return ipfs(`files add -r ${fixturePath}`) }) - it('direct', () => { - return ipfs(`pin add ${keys.solarSystem} --recursive false`).then(out => { - expect(out).to.eql(`pinned ${keys.solarSystem} indirectly\n`) + describe('rm', function () { + it('recursively (default)', function () { + this.timeout(10 * 1000) + return ipfs(`pin rm ${hashes.root}`) + .then(out => expect(out).to.equal(`unpinned ${hashes.root}\n`)) }) }) - }) - describe('ls', function () { - it('lists recursive pins', () => { - return ipfs(`pin ls ${keys.root}`).then(out => { - expect(out).to.eql(`${keys.root} recursive\n`) + describe('add', function () { + it('recursively (default)', () => { + return ipfs(`pin add ${hashes.root}`) + .then(out => + expect(out).to.eql(`pinned ${hashes.root} recursively\n`) + ) }) - }) - it('lists direct pins', () => { - return ipfs(`pin ls ${keys.solarSystem}`).then(out => { - expect(out).to.eql(`${keys.solarSystem} direct\n`) + it('direct', () => { + return ipfs(`pin add ${hashes.solarWiki} --recursive false`) + .then(out => + expect(out).to.eql(`pinned ${hashes.solarWiki} directly\n`) + ) }) }) - it('lists indirect pins', function () { - this.timeout(25 * 1000) - - return ipfs('pin ls').then(out => { - console.log('pin ls out:', out) - return ipfs(`pin ls ${keys.mercuryWiki}`).then(out => { - console.log('ls mercuryWiki:', out) - expect(out).to.include(keys.mercuryWiki) + describe('ls', function () { + it('lists all pins when no hash is passed', function () { + return ipfs('pin ls -q').then(out => { + const results = out.split('\n') + expect(results).to.include.members(Object.values(hashes)) }) }) - }) - it('handles multiple hashes', () => { - return ipfs(`pin ls ${keys.root} ${keys.solarSystem}`).then(out => { - expect(out).to.eql(`${keys.root} recursive\n${keys.solarSystem} direct\n`) + it('handles multiple hashes', function () { + return ipfs(`pin ls ${hashes.root} ${hashes.solarWiki}`) + .then(out => { + expect(out).to.eql( + `${hashes.root} recursive\n${hashes.solarWiki} direct\n` + ) + }) }) - }) - it('lists all pins when no hash is passed', () => { - return ipfs('pin ls').then(out => { - const hashes = out.split('\n').map(line => line.split(' ')[0]) - expect(hashes).to.include.members(Object.values(keys)) + it('can print quietly', function () { + return ipfs('pin ls -q').then(out => { + const firstLineParts = out.split(/\s/)[0].split(' ') + expect(firstLineParts).to.have.length(1) + }) }) }) - }) -})) + })) diff --git a/test/core/pin.js b/test/core/pin.js new file mode 100644 index 0000000000..fbf76dce46 --- /dev/null +++ b/test/core/pin.js @@ -0,0 +1,328 @@ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const fs = require('fs') + +const IPFS = require('../../src/core') +const createTempRepo = require('../utils/create-repo-nodejs') +const expectTimeout = require('../utils/expect-timeout') + +// fixture structure: +// planets/ +// solar-system.md +// mercury/ +// wiki.md +const fixturePath = 'test/fixtures/planets' +const hashes = { + root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', + solarWiki: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG', + mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', + mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' +} + +describe('pin', function () { + const fixtures = [ + 'test/fixtures/planets/mercury/wiki.md', + 'test/fixtures/planets/solar-system.md' + ].map(path => ({ + path, + content: fs.readFileSync(path) + })) + + let ipfs + let pin + let repo + + function expectPinned (hash, type, pinned = true) { + if (typeof type === 'boolean') { + pinned = type + type = undefined + } + + return pin.isPinnedWithType(hash, type || pin.types.all) + .then(result => expect(result.pinned).to.eql(pinned)) + } + + before(function (done) { + this.timeout(20 * 1000) + repo = createTempRepo() + ipfs = new IPFS({ repo }) + ipfs.on('ready', () => { + pin = ipfs.pin + ipfs.files.add(fixtures, done) + }) + }) + + after(done => repo.teardown(done)) + + /** + getIndirectKeys, + */ + + describe('isPinned', function () { + beforeEach(function () { + pin.clear() + }) + + it('when node is pinned', function () { + return pin.add(hashes.solarWiki) + .then(() => pin.isPinned(hashes.solarWiki)) + .then(pinned => expect(pinned.pinned).to.eql(true)) + }) + + it('when node is not in datastore', function () { + const falseHash = `${hashes.root.slice(0, -2)}ss` + return pin.isPinned(falseHash) + .then(pinned => { + expect(pinned.pinned).to.eql(false) + expect(pinned.reason).to.eql(undefined) + }) + }) + + it('when node is in datastore but not pinned', function () { + return expectPinned(hashes.root, false) + }) + }) + + describe('isPinnedWithType', function () { + beforeEach(function () { + pin.clear() + return pin.add(hashes.root) + }) + + it('when pinned recursively', function () { + return pin.isPinnedWithType(hashes.root, pin.types.recursive) + .then(result => { + expect(result.pinned).to.eql(true) + expect(result.reason).to.eql(pin.types.recursive) + }) + }) + + it('when pinned indirectly', function () { + return pin.isPinnedWithType(hashes.mercuryWiki, pin.types.indirect) + .then(result => { + expect(result.pinned).to.eql(true) + expect(result.reason).to.eql(hashes.root) + }) + }) + + it('when pinned directly', function () { + return pin.add(hashes.mercuryDir, { recursive: false }) + .then(() => { + return pin.isPinnedWithType(hashes.mercuryDir, pin.types.direct) + .then(result => { + expect(result.pinned).to.eql(true) + expect(result.reason).to.eql(pin.types.direct) + }) + }) + }) + + it('when not pinned', function () { + pin.clear() + return pin.isPinnedWithType(hashes.mercuryDir, pin.types.direct) + .then(pin => expect(pin.pinned).to.eql(false)) + }) + }) + + describe('add', function () { + beforeEach(function () { + pin.clear() + }) + + it('recursive', function () { + return pin.add(hashes.root) + .then(() => { + const pinChecks = Object.values(hashes) + .map(hash => expectPinned(hash)) + + return Promise.all(pinChecks) + }) + }) + + it('direct', function () { + return pin.add(hashes.root, { recursive: false }) + .then(() => Promise.all([ + expectPinned(hashes.root), + expectPinned(hashes.solarWiki, false) + ])) + }) + + it('recursive pin parent of direct pin', function () { + return pin.add(hashes.solarWiki, { recursive: false }) + .then(() => pin.add(hashes.root)) + .then(() => Promise.all([ + // solarWiki is pinned both directly and indirectly o.O + expectPinned(hashes.solarWiki, pin.types.direct), + expectPinned(hashes.solarWiki, pin.types.indirect), + ])) + }) + + it('directly pinning a recursive pin fails', function () { + return pin.add(hashes.root) + .then(() => pin.add(hashes.root, { recursive: false })) + .catch(err => expect(err).to.match(/already pinned recursively/)) + }) + + it('can\'t pin item not in datastore', function () { + this.timeout(10 * 1000) + const falseHash = `${hashes.root.slice(0, -2)}ss` + return expectTimeout(pin.add(falseHash), 4000) + }) + + // TODO block rm breaks subsequent tests + it.skip('needs all children in datastore to pin recursively', function () { + this.timeout(10 * 1000) + return ipfs.block.rm(hashes.mercuryWiki) + .then(() => expectTimeout(pin.add(hashes.root), 4000)) + }) + }) + + describe('ls', function () { + before(function () { + pin.clear() + return Promise.all([ + pin.add(hashes.root), + pin.add(hashes.mercuryDir, { recursive: false }) + ]) + }) + + it('lists pins of a particular hash', function () { + return pin.ls(hashes.mercuryDir) + .then(out => expect(out[0].hash).to.eql(hashes.mercuryDir)) + }) + + it('indirect pins supersedes direct pins', function () { + return pin.ls() + .then(ls => { + const pinType = ls.find(out => out.hash === hashes.mercuryDir).type + expect(pinType).to.eql(pin.types.indirect) + }) + }) + + describe('list pins of type', function () { + it('all', function () { + return pin.ls() + .then(out => + expect(out).to.deep.eql([ + { type: 'recursive', + hash: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' }, + { type: 'indirect', + hash: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG' }, + { type: 'indirect', + hash: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' }, + { type: 'indirect', + hash: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' } + ]) + ) + }) + + it('direct', function () { + return pin.ls({ type: 'direct' }) + .then(out => + expect(out).to.deep.eql([ + { type: 'direct', + hash: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' } + ]) + ) + }) + + it('recursive', function() { + return pin.ls({ type: 'recursive' }) + .then(out => + expect(out).to.deep.eql([ + { type: 'recursive', + hash: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' } + ]) + ) + }) + + it('indirect', function () { + return pin.ls({ type: 'indirect' }) + .then(out => + expect(out).to.deep.eql([ + { type: 'indirect', + hash: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG' }, + { type: 'indirect', + hash: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' }, + { type: 'indirect', + hash: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' } + ]) + ) + }) + }) + }) + + describe('rm', function () { + beforeEach(function () { + pin.clear() + return pin.add(hashes.root) + }) + + it('a recursive pin', function () { + return pin.rm(hashes.root) + .then(() => { + return Promise.all([ + expectPinned(hashes.root, false), + expectPinned(hashes.mercuryWiki, false) + ]) + }) + }) + + it('a direct pin', function () { + pin.clear() + return pin.add(hashes.mercuryDir, { recursive: false }) + .then(() => pin.rm(hashes.mercuryDir)) + .then(() => expectPinned(hashes.mercuryDir, false)) + }) + + it('fails to remove an indirect pin', function () { + return pin.rm(hashes.solarWiki) + .catch(err => expect(err).to.match(/is pinned indirectly under/)) + .then(() => expectPinned(hashes.solarWiki)) + }) + + it('fails when an item is not pinned', function () { + return pin.rm(hashes.root) + .then(() => pin.rm(hashes.root)) + .catch(err => expect(err).to.match(/is not pinned/)) + }) + }) + + describe('load', function () { + before(function () { + return pin.add(hashes.root) + }) + + it('loads', function () { + pin.clear() + return pin.ls() + .then(ls => expect(ls.length).to.eql(0)) + .then(() => pin.load()) + .then(() => pin.ls()) + .then(ls => expect(ls.length).to.be.gt(0)) + }) + }) + + describe('flush', function () { + beforeEach(function () { + return pin.add(hashes.root) + }) + + it('flushes', function () { + return pin.ls() + .then(ls => expect(ls.length).to.be.gt(0)) + .then(() => { + pin.clear() + return pin.flush() + }) + .then(() => pin.load()) + .then(() => pin.ls()) + .then(ls => expect(ls.length).to.eql(0)) + }) + }) +}) From 868570a847358172df229c17d28fda2194b5af66 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Tue, 20 Mar 2018 19:26:07 -0400 Subject: [PATCH 06/21] refactor: move pin http-api tests to http-api/inject fix: attempt to find a way to use http-api/inject test structure for pin tests test: fix pin.rm http-api tests test: fix pin.add http-api tests docs: docs and cleanup of http-api pin tests refactor: renaming fix: lint errors fix: resolvePaths tests are failing on CI, it might be long ops, testing a timeout bump fix: add files explicitly before testing resolvePaths fix: remove mocha.only from resolvePaths. let's hope tests pass, they are passing CI now fix: rename test/core/utils.spec.js -> utils.js so it's not run during browser tests --- src/core/components/files.js | 1 - src/core/components/init-assets.js | 2 - src/core/components/pin.js | 4 +- src/core/components/pin.proto.js | 2 + src/core/utils.js | 3 +- src/http/api/resources/pin.js | 2 +- test/cli/files.js | 4 +- test/cli/pin.js | 91 +++++++-------- test/core/pin.js | 92 +++++++-------- test/core/utils.js | 142 +++++++++++++++++++++++ test/core/utils.spec.js | 150 ------------------------- test/http-api/inject/pin.js | 174 +++++++++++++++++++++++++++++ test/http-api/spec/pin.js | 147 ------------------------ 13 files changed, 416 insertions(+), 398 deletions(-) create mode 100644 test/core/utils.js delete mode 100644 test/core/utils.spec.js create mode 100644 test/http-api/inject/pin.js delete mode 100644 test/http-api/spec/pin.js diff --git a/src/core/components/files.js b/src/core/components/files.js index e40b6fb1d0..1607d5dcaf 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -21,7 +21,6 @@ const toB58String = require('multihashes').toB58String const WRAPPER = 'wrapper/' function noop () {} -function identity (x) { return x } function prepareFile (self, opts, file, callback) { opts = opts || {} diff --git a/src/core/components/init-assets.js b/src/core/components/init-assets.js index 79e96a9fdf..096a3d20c4 100644 --- a/src/core/components/init-assets.js +++ b/src/core/components/init-assets.js @@ -1,9 +1,7 @@ 'use strict' const path = require('path') -const fs = require('fs') const glob = require('glob') -const importer = require('ipfs-unixfs-engine').importer const pull = require('pull-stream') const file = require('pull-file') const CID = require('cids') diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 25c7c6fd3e..33163fab2a 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -411,7 +411,7 @@ module.exports = function pin (self) { waterfall([ (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests (_, cb) => repo.datastore.has(pinDataStoreKey, cb), - (has, cb) => has ? cb() : cb('No pins to load'), + (has, cb) => has ? cb() : cb(new Error('No pins to load')), (cb) => repo.datastore.get(pinDataStoreKey, cb), (mh, cb) => dag.get(new CID(mh), cb), (root, cb) => handle.put('root', root.value, cb), @@ -419,7 +419,7 @@ module.exports = function pin (self) { (rKeys, cb) => handle.put('rKeys', rKeys, cb), (cb) => pin.set.loadSet(handle.root, pin.types.direct, logInternalKey, cb) ], (err, dKeys) => { - if (err && err !== 'No pins to load') { + if (err && err.message !== 'No pins to load') { return callback(err) } if (dKeys) { diff --git a/src/core/components/pin.proto.js b/src/core/components/pin.proto.js index db2391c91f..8e94fd8f52 100644 --- a/src/core/components/pin.proto.js +++ b/src/core/components/pin.proto.js @@ -1,3 +1,5 @@ +'use strict' + /** * Protobuf interface * from go-ipfs/pin/internal/pb/header.proto diff --git a/src/core/utils.js b/src/core/utils.js index b5cf7b3cb0..4eaa83f746 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -3,7 +3,6 @@ const multihashes = require('multihashes') const promisify = require('promisify-es6') const map = require('async/map') -const CID = require('cids') const isIPFS = require('is-ipfs') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' @@ -74,7 +73,7 @@ const resolvePaths = promisify(function (ipfs, ipfsPaths, callback) { let parsedPath try { parsedPath = exports.parseIpfsPath(path) - } catch(err) { + } catch (err) { return cb(err) } diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js index a2c89ad29b..73c97ac2c3 100644 --- a/src/http/api/resources/pin.js +++ b/src/http/api/resources/pin.js @@ -19,7 +19,7 @@ function parseArgs (request, reply) { return reply({ path: request.query.arg, - recursive: recursive, + recursive: recursive }) } diff --git a/test/cli/files.js b/test/cli/files.js index aa7f881a9e..123fd62c03 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -318,7 +318,7 @@ describe('files', () => runOnAndOff((thing) => { it('add pins by default', function () { const filePath = path.join(os.tmpdir(), hat()) const content = String(Math.random()) - const file = fs.writeFileSync(filePath, content) + fs.writeFileSync(filePath, content) return ipfs(`files add -Q ${filePath}`) .then(out => { @@ -333,7 +333,7 @@ describe('files', () => runOnAndOff((thing) => { this.timeout(20 * 1000) const filePath = path.join(os.tmpdir(), hat()) const content = String(Math.random()) - const file = fs.writeFileSync(filePath, content) + fs.writeFileSync(filePath, content) return ipfs(`files add -Q --pin=false ${filePath}`) .then(out => { diff --git a/test/cli/pin.js b/test/cli/pin.js index 4543d97d1f..f6c77eb67a 100644 --- a/test/cli/pin.js +++ b/test/cli/pin.js @@ -1,4 +1,5 @@ /* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' const expect = require('chai').expect @@ -10,7 +11,7 @@ const runOnAndOff = require('../utils/on-and-off') // mercury/ // wiki.md const fixturePath = 'test/fixtures/planets' -const hashes = { +const pins = { root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', solarWiki: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG', mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', @@ -18,60 +19,60 @@ const hashes = { } describe('pin', () => runOnAndOff(thing => { - let ipfs + let ipfs - before(function () { - this.timeout(15 * 1000) - ipfs = thing.ipfs - return ipfs(`files add -r ${fixturePath}`) + before(function () { + this.timeout(15 * 1000) + ipfs = thing.ipfs + return ipfs(`files add -r ${fixturePath}`) + }) + + describe('rm', function () { + it('recursively (default)', function () { + this.timeout(10 * 1000) + return ipfs(`pin rm ${pins.root}`) + .then(out => expect(out).to.equal(`unpinned ${pins.root}\n`)) }) + }) - describe('rm', function () { - it('recursively (default)', function () { - this.timeout(10 * 1000) - return ipfs(`pin rm ${hashes.root}`) - .then(out => expect(out).to.equal(`unpinned ${hashes.root}\n`)) - }) + describe('add', function () { + it('recursively (default)', () => { + return ipfs(`pin add ${pins.root}`) + .then(out => + expect(out).to.eql(`pinned ${pins.root} recursively\n`) + ) }) - describe('add', function () { - it('recursively (default)', () => { - return ipfs(`pin add ${hashes.root}`) - .then(out => - expect(out).to.eql(`pinned ${hashes.root} recursively\n`) - ) - }) + it('direct', () => { + return ipfs(`pin add ${pins.solarWiki} --recursive false`) + .then(out => + expect(out).to.eql(`pinned ${pins.solarWiki} directly\n`) + ) + }) + }) - it('direct', () => { - return ipfs(`pin add ${hashes.solarWiki} --recursive false`) - .then(out => - expect(out).to.eql(`pinned ${hashes.solarWiki} directly\n`) - ) + describe('ls', function () { + it('lists all pins when no hash is passed', function () { + return ipfs('pin ls -q').then(out => { + const results = out.split('\n') + expect(results).to.include.members(Object.values(pins)) }) }) - describe('ls', function () { - it('lists all pins when no hash is passed', function () { - return ipfs('pin ls -q').then(out => { - const results = out.split('\n') - expect(results).to.include.members(Object.values(hashes)) + it('handles multiple hashes', function () { + return ipfs(`pin ls ${pins.root} ${pins.solarWiki}`) + .then(out => { + expect(out).to.eql( + `${pins.root} recursive\n${pins.solarWiki} direct\n` + ) }) - }) - - it('handles multiple hashes', function () { - return ipfs(`pin ls ${hashes.root} ${hashes.solarWiki}`) - .then(out => { - expect(out).to.eql( - `${hashes.root} recursive\n${hashes.solarWiki} direct\n` - ) - }) - }) + }) - it('can print quietly', function () { - return ipfs('pin ls -q').then(out => { - const firstLineParts = out.split(/\s/)[0].split(' ') - expect(firstLineParts).to.have.length(1) - }) + it('can print quietly', function () { + return ipfs('pin ls -q').then(out => { + const firstLineParts = out.split(/\s/)[0].split(' ') + expect(firstLineParts).to.have.length(1) }) }) - })) + }) +})) diff --git a/test/core/pin.js b/test/core/pin.js index fbf76dce46..4fa846c05b 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -1,3 +1,4 @@ +/* eslint max-nested-callbacks: ["error", 8] */ /* eslint-env mocha */ 'use strict' @@ -17,8 +18,7 @@ const expectTimeout = require('../utils/expect-timeout') // solar-system.md // mercury/ // wiki.md -const fixturePath = 'test/fixtures/planets' -const hashes = { +const pins = { root: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys', solarWiki: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG', mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', @@ -70,13 +70,13 @@ describe('pin', function () { }) it('when node is pinned', function () { - return pin.add(hashes.solarWiki) - .then(() => pin.isPinned(hashes.solarWiki)) + return pin.add(pins.solarWiki) + .then(() => pin.isPinned(pins.solarWiki)) .then(pinned => expect(pinned.pinned).to.eql(true)) }) it('when node is not in datastore', function () { - const falseHash = `${hashes.root.slice(0, -2)}ss` + const falseHash = `${pins.root.slice(0, -2)}ss` return pin.isPinned(falseHash) .then(pinned => { expect(pinned.pinned).to.eql(false) @@ -85,18 +85,18 @@ describe('pin', function () { }) it('when node is in datastore but not pinned', function () { - return expectPinned(hashes.root, false) + return expectPinned(pins.root, false) }) }) describe('isPinnedWithType', function () { beforeEach(function () { pin.clear() - return pin.add(hashes.root) + return pin.add(pins.root) }) it('when pinned recursively', function () { - return pin.isPinnedWithType(hashes.root, pin.types.recursive) + return pin.isPinnedWithType(pins.root, pin.types.recursive) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pin.types.recursive) @@ -104,17 +104,17 @@ describe('pin', function () { }) it('when pinned indirectly', function () { - return pin.isPinnedWithType(hashes.mercuryWiki, pin.types.indirect) + return pin.isPinnedWithType(pins.mercuryWiki, pin.types.indirect) .then(result => { expect(result.pinned).to.eql(true) - expect(result.reason).to.eql(hashes.root) + expect(result.reason).to.eql(pins.root) }) }) it('when pinned directly', function () { - return pin.add(hashes.mercuryDir, { recursive: false }) + return pin.add(pins.mercuryDir, { recursive: false }) .then(() => { - return pin.isPinnedWithType(hashes.mercuryDir, pin.types.direct) + return pin.isPinnedWithType(pins.mercuryDir, pin.types.direct) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pin.types.direct) @@ -124,7 +124,7 @@ describe('pin', function () { it('when not pinned', function () { pin.clear() - return pin.isPinnedWithType(hashes.mercuryDir, pin.types.direct) + return pin.isPinnedWithType(pins.mercuryDir, pin.types.direct) .then(pin => expect(pin.pinned).to.eql(false)) }) }) @@ -135,9 +135,9 @@ describe('pin', function () { }) it('recursive', function () { - return pin.add(hashes.root) + return pin.add(pins.root) .then(() => { - const pinChecks = Object.values(hashes) + const pinChecks = Object.values(pins) .map(hash => expectPinned(hash)) return Promise.all(pinChecks) @@ -145,40 +145,40 @@ describe('pin', function () { }) it('direct', function () { - return pin.add(hashes.root, { recursive: false }) + return pin.add(pins.root, { recursive: false }) .then(() => Promise.all([ - expectPinned(hashes.root), - expectPinned(hashes.solarWiki, false) + expectPinned(pins.root), + expectPinned(pins.solarWiki, false) ])) }) it('recursive pin parent of direct pin', function () { - return pin.add(hashes.solarWiki, { recursive: false }) - .then(() => pin.add(hashes.root)) + return pin.add(pins.solarWiki, { recursive: false }) + .then(() => pin.add(pins.root)) .then(() => Promise.all([ // solarWiki is pinned both directly and indirectly o.O - expectPinned(hashes.solarWiki, pin.types.direct), - expectPinned(hashes.solarWiki, pin.types.indirect), + expectPinned(pins.solarWiki, pin.types.direct), + expectPinned(pins.solarWiki, pin.types.indirect) ])) }) it('directly pinning a recursive pin fails', function () { - return pin.add(hashes.root) - .then(() => pin.add(hashes.root, { recursive: false })) + return pin.add(pins.root) + .then(() => pin.add(pins.root, { recursive: false })) .catch(err => expect(err).to.match(/already pinned recursively/)) }) it('can\'t pin item not in datastore', function () { this.timeout(10 * 1000) - const falseHash = `${hashes.root.slice(0, -2)}ss` + const falseHash = `${pins.root.slice(0, -2)}ss` return expectTimeout(pin.add(falseHash), 4000) }) // TODO block rm breaks subsequent tests it.skip('needs all children in datastore to pin recursively', function () { this.timeout(10 * 1000) - return ipfs.block.rm(hashes.mercuryWiki) - .then(() => expectTimeout(pin.add(hashes.root), 4000)) + return ipfs.block.rm(pins.mercuryWiki) + .then(() => expectTimeout(pin.add(pins.root), 4000)) }) }) @@ -186,20 +186,20 @@ describe('pin', function () { before(function () { pin.clear() return Promise.all([ - pin.add(hashes.root), - pin.add(hashes.mercuryDir, { recursive: false }) + pin.add(pins.root), + pin.add(pins.mercuryDir, { recursive: false }) ]) }) it('lists pins of a particular hash', function () { - return pin.ls(hashes.mercuryDir) - .then(out => expect(out[0].hash).to.eql(hashes.mercuryDir)) + return pin.ls(pins.mercuryDir) + .then(out => expect(out[0].hash).to.eql(pins.mercuryDir)) }) it('indirect pins supersedes direct pins', function () { return pin.ls() .then(ls => { - const pinType = ls.find(out => out.hash === hashes.mercuryDir).type + const pinType = ls.find(out => out.hash === pins.mercuryDir).type expect(pinType).to.eql(pin.types.indirect) }) }) @@ -231,7 +231,7 @@ describe('pin', function () { ) }) - it('recursive', function() { + it('recursive', function () { return pin.ls({ type: 'recursive' }) .then(out => expect(out).to.deep.eql([ @@ -260,42 +260,42 @@ describe('pin', function () { describe('rm', function () { beforeEach(function () { pin.clear() - return pin.add(hashes.root) + return pin.add(pins.root) }) it('a recursive pin', function () { - return pin.rm(hashes.root) + return pin.rm(pins.root) .then(() => { return Promise.all([ - expectPinned(hashes.root, false), - expectPinned(hashes.mercuryWiki, false) + expectPinned(pins.root, false), + expectPinned(pins.mercuryWiki, false) ]) }) }) it('a direct pin', function () { pin.clear() - return pin.add(hashes.mercuryDir, { recursive: false }) - .then(() => pin.rm(hashes.mercuryDir)) - .then(() => expectPinned(hashes.mercuryDir, false)) + return pin.add(pins.mercuryDir, { recursive: false }) + .then(() => pin.rm(pins.mercuryDir)) + .then(() => expectPinned(pins.mercuryDir, false)) }) it('fails to remove an indirect pin', function () { - return pin.rm(hashes.solarWiki) + return pin.rm(pins.solarWiki) .catch(err => expect(err).to.match(/is pinned indirectly under/)) - .then(() => expectPinned(hashes.solarWiki)) + .then(() => expectPinned(pins.solarWiki)) }) it('fails when an item is not pinned', function () { - return pin.rm(hashes.root) - .then(() => pin.rm(hashes.root)) + return pin.rm(pins.root) + .then(() => pin.rm(pins.root)) .catch(err => expect(err).to.match(/is not pinned/)) }) }) describe('load', function () { before(function () { - return pin.add(hashes.root) + return pin.add(pins.root) }) it('loads', function () { @@ -310,7 +310,7 @@ describe('pin', function () { describe('flush', function () { beforeEach(function () { - return pin.add(hashes.root) + return pin.add(pins.root) }) it('flushes', function () { diff --git a/test/core/utils.js b/test/core/utils.js new file mode 100644 index 0000000000..d706511f1f --- /dev/null +++ b/test/core/utils.js @@ -0,0 +1,142 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const fs = require('fs') +const fromB58String = require('multihashes').fromB58String + +// This gets replaced by `create-repo-browser.js` in the browser +const createTempRepo = require('../utils/create-repo-nodejs.js') +const IPFS = require('../../src/core') +const utils = require('../../src/core/utils') + +describe('utils', () => { + const rootHash = 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' + const rootPath = `/ipfs/${rootHash}` + const rootMultihash = fromB58String(rootHash) + const aboutHash = 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' + const aboutPath = `${rootPath}/mercury` + const aboutMultihash = fromB58String(aboutHash) + + describe('parseIpfsPath', () => { + it('parses path with no links', function () { + expect(utils.parseIpfsPath(rootHash)) + .to.deep.equal({ + hash: rootHash, + links: [] + }) + }) + + it('parses path with links', function () { + expect(utils.parseIpfsPath(`${rootHash}/docs/index`)) + .to.deep.equal({ + hash: rootHash, + links: ['docs', 'index'] + }) + }) + + it('parses path with /ipfs/ prefix', function () { + expect(utils.parseIpfsPath(`/ipfs/${rootHash}/about`)) + .to.deep.equal({ + hash: rootHash, + links: ['about'] + }) + }) + + it('returns error for malformed path', function () { + const fn = () => utils.parseIpfsPath(`${rootHash}//about`) + expect(fn).to.throw('invalid ipfs ref path') + }) + + it('returns error if root is not a valid multihash', function () { + const fn = () => utils.parseIpfsPath('invalid/ipfs/path') + expect(fn).to.throw('invalid ipfs ref path') + }) + }) + + describe('resolvePaths', function () { + this.timeout(80 * 1000) + const fixtures = [ + 'test/fixtures/planets/mercury/wiki.md', + 'test/fixtures/planets/solar-system.md' + ].map(path => ({ + path, + content: fs.readFileSync(path) + })) + + let node + let repo + + before(done => { + repo = createTempRepo() + node = new IPFS({ + repo: repo + }) + node.once('ready', () => node.files.add(fixtures, done)) + }) + + after(done => { + repo.teardown(done) + }) + + it('handles base58 hash format', (done) => { + utils.resolvePaths(node, rootHash, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(rootMultihash) + done() + }) + }) + + it('handles multihash format', (done) => { + utils.resolvePaths(node, aboutMultihash, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutMultihash) + done() + }) + }) + + it('handles ipfs paths format', function (done) { + this.timeout(200 * 1000) + utils.resolvePaths(node, aboutPath, (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(1) + expect(hashes[0]).to.deep.equal(aboutMultihash) + done() + }) + }) + + it('handles an array', (done) => { + utils.resolvePaths(node, [rootHash, rootPath, rootMultihash], (err, hashes) => { + expect(err).to.not.exist() + expect(hashes.length).to.equal(3) + expect(hashes[0]).to.deep.equal(rootMultihash) + expect(hashes[1]).to.deep.equal(rootMultihash) + expect(hashes[2]).to.deep.equal(rootMultihash) + done() + }) + }) + + it('should error on invalid hashes', function (done) { + utils.resolvePaths(node, '/ipfs/asdlkjahsdfkjahsdfd', err => { + expect(err).to.exist() + done() + }) + }) + + it(`should error when a link doesn't exist`, function (done) { + utils.resolvePaths(node, `${aboutPath}/fusion`, err => { + expect(err.message).to.include( + `no link named "fusion" under QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q` + ) + done() + }) + }) + }) +}) diff --git a/test/core/utils.spec.js b/test/core/utils.spec.js deleted file mode 100644 index c51b609e1e..0000000000 --- a/test/core/utils.spec.js +++ /dev/null @@ -1,150 +0,0 @@ -/* eslint max-nested-callbacks: ["error", 8] */ -/* eslint-env mocha */ -'use strict' - -const chai = require('chai') -const dirtyChai = require('dirty-chai') -const expect = chai.expect -chai.use(dirtyChai) -const multihashes = require('multihashes') - -// This gets replaced by `create-repo-browser.js` in the browser -const createTempRepo = require('../utils/create-repo-nodejs.js') -const IPFS = require('../../src/core') -const utils = require('../../src/core/utils') - -describe('utils', () => { - const rootHashString = 'QmUhUuiTKkkK8J6JZ9zmj8iNHPuNfGYcszgRumzhHBxEEU' - const rootHash = multihashes.fromB58String(rootHashString) - const rootPathString = `/ipfs/${rootHashString}` - const aboutHashString = 'QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V' - const aboutHash = multihashes.fromB58String(aboutHashString) - const aboutPathString = `${rootPathString}/about` - - describe('parseIpfsPath', () => { - it('parses path with no links', function () { - expect(utils.parseIpfsPath(rootHashString)) - .to.deep.equal({ - hash: rootHashString, - links: [] - }) - }) - - it('parses path with links', function () { - expect(utils.parseIpfsPath(`${rootHashString}/docs/index`)) - .to.deep.equal({ - hash: rootHashString, - links: ['docs', 'index'] - }) - }) - - it('parses path with /ipfs/ prefix', function () { - expect(utils.parseIpfsPath(`/ipfs/${rootHashString}/about`)) - .to.deep.equal({ - hash: rootHashString, - links: ['about'] - }) - }) - - it('returns error for malformed path', function () { - const fn = () => utils.parseIpfsPath(`${rootHashString}//about`) - expect(fn).to.throw('invalid ipfs ref path') - }) - - it('returns error if root is not a valid multihash', function () { - const fn = () => utils.parseIpfsPath('invalid/ipfs/path') - expect(fn).to.throw('invalid ipfs ref path') - }) - }) - - describe('resolvePaths', function () { - this.timeout(80 * 1000) - let node - let repo - - before((done) => { - repo = createTempRepo() - node = new IPFS({ - repo: repo - }) - node.once('ready', done) - }) - - after((done) => { - repo.teardown(done) - }) - - it('normalizes hash string to array with multihash object', (done) => { - utils.resolvePaths(node, rootHashString, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(rootHash) - done() - }) - }) - - it('normalizes array of hash strings to array of multihash objects', (done) => { - utils.resolvePaths(node, [rootHashString, aboutHashString], (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(2) - expect(hashes[0]).to.deep.equal(rootHash) - expect(hashes[1]).to.deep.equal(aboutHash) - done() - }) - }) - - it('normalizes multihash object to array with multihash object', (done) => { - utils.resolvePaths(node, aboutHash, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(aboutHash) - done() - }) - }) - - it('normalizes array of multihash objects to array of multihash objects', (done) => { - utils.resolvePaths(node, [rootHash, aboutHash], (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(2) - expect(hashes[0]).to.deep.equal(rootHash) - expect(hashes[1]).to.deep.equal(aboutHash) - done() - }) - }) - - it('normalizes ipfs path string to array with multihash object', (done) => { - utils.resolvePaths(node, aboutPathString, (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(1) - expect(hashes[0]).to.deep.equal(aboutHash) - done() - }) - }) - - it('normalizes array of ipfs path strings to array with multihash objects', (done) => { - utils.resolvePaths(node, [aboutPathString, rootPathString], (err, hashes) => { - expect(err).to.not.exist() - expect(hashes.length).to.equal(2) - expect(hashes[0]).to.deep.equal(aboutHash) - expect(hashes[1]).to.deep.equal(rootHash) - done() - }) - }) - - it('should error on invalid hashes', function (done) { - utils.resolvePaths(node, '/ipfs/asdlkjahsdfkjahsdfd', err => { - expect(err).to.exist() - done() - }) - }) - - it(`should error when a link doesn't exist`, function (done) { - utils.resolvePaths(node, `${aboutPathString}/fusion`, err => { - expect(err.message).to.include( - `no link named "fusion" under QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V` - ) - done() - }) - }) - }) -}) diff --git a/test/http-api/inject/pin.js b/test/http-api/inject/pin.js new file mode 100644 index 0000000000..72d9374e14 --- /dev/null +++ b/test/http-api/inject/pin.js @@ -0,0 +1,174 @@ +/* eslint-env mocha */ +/* eslint max-nested-callbacks: ["error", 8] */ +'use strict' + +const expect = require('chai').expect + +// We use existing pin structure in the go-ipfs-repo fixture +// so that we don't have to stream a bunch of object/put operations +// This is suitable because these tests target the functionality +// of the /pin endpoints and don't delve into the pin core +// +// fixture's pins: +// - root1 +// - c1 +// - c2 +// - c3 +// - c4 +// - c5 +// - c6 +// - root2 + +const pins = { + root1: 'QmVtU7ths96fMgZ8YSZAbKghyieq7AjxNdcqyVzxTt3qVe', + c1: 'QmZTR5bcpQD7cFgTorqxZDYaew1Wqgfbd2ud9QqGPAkK2V', + c2: 'QmYCvbfNbCwFR45HiNP45rwJgvatpiW38D961L5qAhUM5Y', + c3: 'QmY5heUM5qgRubMDD1og9fhCPA6QdkMp3QCwd4s7gJsyE7', + c4: 'QmUzLxaXnM8RYCPEqLDX5foToi5aNZHqfYr285w2BKhkft', + c5: 'QmPZ9gcCEpqKTo6aq61g2nXGUhM4iCL3ewB6LDXZCtioEB', + c6: 'QmTumTjvcYCAvRRwQ8sDRxh8ezmrcr88YFU7iYNroGGTBZ', + root2: 'QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn' +} + +module.exports = (http) => { + describe('pin', () => { + let api + + before(() => { + api = http.api.server.select('API') + }) + + describe('rm', () => { + it('fails on invalid args', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=invalid` + }, res => { + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.match(/invalid ipfs ref path/) + done() + }) + }) + + it('unpins recursive pins', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${pins.root1}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Pins).to.deep.eql([pins.root1]) + done() + }) + }) + + it('unpins direct pins', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${pins.root1}&recursive=false` + }, res => { + expect(res.statusCode).to.equal(200) + api.inject({ + method: 'POST', + url: `/api/v0/pin/rm?arg=${pins.root1}&recursive=false` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Pins).to.deep.eql([pins.root1]) + done() + }) + }) + }) + }) + + describe('add', () => { + it('fails on invalid args', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=invalid` + }, res => { + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.match(/invalid ipfs ref path/) + done() + }) + }) + + it('recursively', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${pins.root1}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Pins).to.deep.eql([pins.root1]) + done() + }) + }) + + it('directly', done => { + api.inject({ + method: 'POST', + url: `/api/v0/pin/add?arg=${pins.root1}&recursive=false` + }, (res) => { + // by directly pinning a node that is already recursively pinned, + // it should error and verifies that the endpoint is parsing + // the recursive arg correctly. + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.match(/already pinned recursively/) + done() + }) + }) + }) + + describe('ls', () => { + it('fails on invalid args', done => { + api.inject({ + method: 'GET', + url: `/api/v0/pin/ls?arg=invalid` + }, res => { + expect(res.statusCode).to.equal(500) + expect(res.result.Message).to.match(/invalid ipfs ref path/) + done() + }) + }) + + it('finds all pinned objects', done => { + api.inject({ + method: 'GET', + url: '/api/v0/pin/ls' + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys).to.have.all.keys(Object.values(pins)) + done() + }) + }) + + it('finds specific pinned objects', done => { + api.inject({ + method: 'GET', + url: `/api/v0/pin/ls?arg=${pins.c1}` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys[pins.c1].Type) + .to.equal(`indirect through ${pins.root1}`) + done() + }) + }) + + it('finds pins of type', done => { + api.inject({ + method: 'GET', + url: `/api/v0/pin/ls?type=recursive` + }, (res) => { + expect(res.statusCode).to.equal(200) + expect(res.result.Keys).to.deep.eql({ + QmUNLLsPACCz1vLxQVkXqqLX5R1X345qqfHbsf67hvA3Nn: { + Type: 'recursive' + }, + QmVtU7ths96fMgZ8YSZAbKghyieq7AjxNdcqyVzxTt3qVe: { + Type: 'recursive' + } + }) + done() + }) + }) + }) + }) +} diff --git a/test/http-api/spec/pin.js b/test/http-api/spec/pin.js deleted file mode 100644 index f58ff3a10f..0000000000 --- a/test/http-api/spec/pin.js +++ /dev/null @@ -1,147 +0,0 @@ -/* eslint-env mocha */ -'use strict' - -const expect = require('chai').expect -const fs = require('fs') -const FormData = require('form-data') -const streamToPromise = require('stream-to-promise') -const each = require('async/each') - -// use a tree of ipfs objects for recursive tests: -// root -// |`leaf -// `branch -// `subLeaf - -const keys = { - root: 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer', - leaf: 'QmaZoTQ6wFe7EtvaePBUeXavfeRqCAq3RUMomFxBpZLrLA', - branch: 'QmNxjjP7dtx6pzxWGBRCrgmjX3JqKL7uF2Kjx7ExiZDbSB', - subLeaf: 'QmUzzznkyQL7FjjBztG3D1tTjBuxeArLceDZnuSowUggXL' -} - -module.exports = (http) => { - describe('pin', () => { - let api - - before((done) => { - // add test tree to repo - api = http.api.server.select('API') - const putFile = (filename, cb) => { - const filePath = `test/test-data/tree/${filename}.json` - const form = new FormData() - form.append('file', fs.createReadStream(filePath)) - const headers = form.getHeaders() - streamToPromise(form).then((payload) => { - api.inject({ - method: 'POST', - url: '/api/v0/object/put', - headers: headers, - payload: payload - }, (res) => { - expect(res.statusCode).to.equal(200) - cb() - }) - }) - } - each(Object.keys(keys), putFile, (err) => { - expect(err).to.not.exist() - done() - }) - }) - - describe('/pin/add', () => { - it('pins object recursively by default', (done) => { - api.inject({ - method: 'POST', - url: `/api/v0/pin/add?arg=${keys.root}` - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result).to.deep.equal({Pins: [keys.root]}) - done() - }) - }) - }) - - describe('/pin/add (direct)', () => { - it('pins object directly if specified', (done) => { - api.inject({ - method: 'POST', - url: `/api/v0/pin/add?arg=${keys.leaf}&recursive=false` - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result).to.deep.equal({Pins: [keys.leaf]}) - done() - }) - }) - }) - - describe('/pin/ls (with path)', () => { - it('finds specified pinned object', (done) => { - api.inject({ - method: 'GET', - url: `/api/v0/pin/ls?arg=/ipfs/${keys.root}/branch/subLeaf` - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result.Keys[keys.subLeaf].Type) - .to.equal(`indirect through ${keys.root}`) - done() - }) - }) - }) - - describe('/pin/ls (without path or type)', () => { - it('finds all pinned objects', (done) => { - api.inject({ - method: 'GET', - url: '/api/v0/pin/ls' - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result.Keys[keys.root].Type).to.equal('recursive') - expect(res.result.Keys[keys.leaf].Type).to.equal('direct') - expect(res.result.Keys[keys.branch].Type).to.equal('indirect') - expect(res.result.Keys[keys.subLeaf].Type).to.equal('indirect') - done() - }) - }) - }) - - describe('/pin/rm (direct)', () => { - it('unpins only directly pinned objects if specified', (done) => { - api.inject({ - method: 'POST', - url: `/api/v0/pin/rm?arg=${keys.leaf}&recursive=false` - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result).to.deep.equal({Pins: [keys.leaf]}) - - api.inject({ - method: 'POST', - url: `/api/v0/pin/rm?arg=${keys.root}&recursive=false` - }, (res) => { - expect(res.statusCode).to.equal(500) - expect(res.result.Message).to.equal( - 'Failed to remove pin: ' + - 'QmWQwS2Xh1SFGMPzUVYQ52b7RC7fTfiaPHm3ZyTRZuHmer ' + - 'is pinned recursively' - ) - done() - }) - }) - }) - }) - - describe('/pin/rm', () => { - it('unpins recursively by default', (done) => { - api.inject({ - method: 'POST', - url: `/api/v0/pin/rm?arg=${keys.root}` - }, (res) => { - expect(res.statusCode).to.equal(200) - expect(res.result).to.deep.equal({Pins: [keys.root]}) - done() - }) - }) - }) - }) -} From 066c1134ece5708d776543f051535ab5184d5a0e Mon Sep 17 00:00:00 2001 From: jonkrone Date: Thu, 22 Mar 2018 23:22:19 -0400 Subject: [PATCH 07/21] test: first draft of pin-set tests Need to leave computer, this is a checkpoint. test: add sanity test for walkItems and hasChild, clean others These tests are more descriptive than really pushing the impl. I'd love others' thoughts on what else should be hit and how. I also need to compare go's pinset impl against ours fix: stop daemons feat: documentation and multihash buffer handling for dag.get fix: lint --- src/core/components/dag.js | 2 + src/core/components/files.js | 11 +- src/core/components/pin-set.js | 184 ++++++++++++++++--------------- src/core/components/pin.js | 3 +- test/core/pin-set.js | 191 +++++++++++++++++++++++++++++++++ test/core/pin.js | 2 +- 6 files changed, 300 insertions(+), 93 deletions(-) create mode 100644 test/core/pin-set.js diff --git a/src/core/components/dag.js b/src/core/components/dag.js index 5c7f749935..d67668083b 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -35,6 +35,8 @@ module.exports = function dag (self) { } else { path = '/' } + } else if (Buffer.isBuffer(cid)) { + cid = new CID(cid) } self._ipld.get(cid, path, options, callback) diff --git a/src/core/components/files.js b/src/core/components/files.js index 1607d5dcaf..5146118a56 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -90,14 +90,13 @@ function normalizeContent (opts, content) { } function pinFile (self, opts, file, cb) { - // since adding paths like `directory/filename` automatically - // adds the directory as well as the file, we can just pin the target file - // and all parent dirs will be pinned indirectly + // Pin a file if it is the root dir of a recursive add or the single file + // of a direct add. const pin = 'pin' in opts ? opts.pin : true - const isTargetFile = !file.path.includes('/') - const shouldPin = pin && isTargetFile && !opts.onlyHash + const isRootDir = !file.path.includes('/') + const shouldPin = pin && isRootDir && !opts.onlyHash if (shouldPin) { - self.pin.add(file.hash, (err) => { + return self.pin.add(file.hash, (err) => { cb(err, file) }) } else { diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index 469217337d..cfca7078e6 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -1,7 +1,6 @@ 'use strict' const multihashes = require('multihashes') -const toB58String = multihashes.toB58String const CID = require('cids') const protobuf = require('protons') const fnv1a = require('fnv1a') @@ -10,6 +9,7 @@ const DAGNode = dagPB.DAGNode const DAGLink = dagPB.DAGLink const varint = require('varint') const once = require('once') +const some = require('async/some') const pbSchema = require('./pin.proto') @@ -19,6 +19,10 @@ const defaultFanout = 256 const maxItems = 8192 const pb = protobuf(pbSchema) +function toB58String (hash) { + return new CID(hash).toBaseEncodedString() +} + function readHeader (rootNode) { // rootNode.data should be a buffer of the format: // < varint(headerLength) | header | itemData... > @@ -26,18 +30,18 @@ function readHeader (rootNode) { const hdrLength = varint.decode(rootData) const vBytes = varint.decode.bytes if (vBytes <= 0) { - return { err: 'Invalid Set header length' } + throw new Error('Invalid Set header length') } if (vBytes + hdrLength > rootData.length) { - return { err: 'Impossibly large set header length' } + throw new Error('Impossibly large set header length') } const hdrSlice = rootData.slice(vBytes, hdrLength + vBytes) const header = pb.Set.decode(hdrSlice) if (header.version !== 1) { - return { err: 'Unsupported Set version: ' + header.version } + throw new Error(`Unsupported Set version: ${header.version}`) } if (header.fanout > rootNode.links.length) { - return { err: 'Impossibly large fanout' } + throw new Error('Impossibly large fanout') } return { header: header, @@ -45,50 +49,58 @@ function readHeader (rootNode) { } } +function hash (seed, key) { + const buf = Buffer.alloc(4) + buf.writeUInt32LE(seed, 0) + const data = Buffer.concat([ + buf, Buffer.from(toB58String(key)) + ]) + return fnv1a(data.toString('binary')) +} + exports = module.exports = function (dag) { const pinSet = { // should this be part of `object` API? - hasChild: (root, childhash, callback, _links, _checked, _seen) => { - callback = once(callback) + hasChild: (root, childhash, callback) => { + let seen = {} if (typeof childhash === 'object') { childhash = toB58String(childhash) } - _links = _links || root.links.length - _checked = _checked || 0 - _seen = _seen || {} - if (!root.links.length && _links === _checked) { - // all nodes have been checked - return callback(null, false) - } - root.links.forEach((link) => { - const bs58link = toB58String(link.multihash) - if (bs58link === childhash) { - return callback(null, true) + function searchChildren (root, cb, _links, _checked) { + if (!root.links.length && _links === _checked) { + // all nodes have been checked + return cb(null, false) } - // don't check the same links twice - if (bs58link in _seen) { return } - _seen[bs58link] = true + some(root.links, (link, cb) => { + const bs58Link = toB58String(link.multihash) + if (bs58Link in seen) return + if (bs58Link === childhash) { + return cb(null, true) + } - dag.get(new CID(link.multihash), (err, res) => { - if (err) { return callback(err) } + seen[bs58Link] = true - _checked++ - _links += res.value.links.length - pinSet.hasChild(res.value, childhash, callback, _links, _checked, _seen) - }) - }) + dag.get(link.multihash, (err, res) => { + if (err) return cb(err) + + _checked++ + _links += res.value.links.length + searchChildren(res.value, cb, _links, _checked) + }) + }, cb) + } + + return searchChildren(root, callback, root.links.length, 0) }, storeSet: (keys, logInternalKey, callback) => { - callback = once(callback) - const items = keys.map((key) => { - return { - key: key, - data: null - } - }) + const items = keys.map(key => ({ + key: key, + data: null + })) + pinSet.storeItems(items, logInternalKey, (err, rootNode) => { if (err) { return callback(err) } const opts = { cid: new CID(rootNode.multihash) } @@ -102,11 +114,10 @@ exports = module.exports = function (dag) { storeItems: (items, logInternalKey, callback, _depth, _subcalls, _done) => { callback = once(callback) - const seed = _depth const pbHeader = pb.Set.encode({ version: 1, fanout: defaultFanout, - seed: seed + seed: _depth }) let rootData = Buffer.concat([ Buffer.from(varint.encode(pbHeader.length)), pbHeader @@ -122,6 +133,7 @@ exports = module.exports = function (dag) { const itemLinks = [] const itemData = [] const indices = [] + for (let i = 0; i < items.length; i++) { itemLinks.push(new DAGLink('', 1, items[i].key)) itemData.push(items[i].data || Buffer.alloc(0)) @@ -132,65 +144,63 @@ exports = module.exports = function (dag) { if (x) { return x } return (a < b ? -1 : 1) }) - const sortedLinks = indices.map((i) => { return itemLinks[i] }) - const sortedData = indices.map((i) => { return itemData[i] }) + + const sortedLinks = indices.map(i => { return itemLinks[i] }) + const sortedData = indices.map(i => { return itemData[i] }) rootLinks = rootLinks.concat(sortedLinks) rootData = Buffer.concat([rootData].concat(sortedData)) + DAGNode.create(rootData, rootLinks, (err, rootNode) => { if (err) { return callback(err) } return callback(null, rootNode) }) } else { // need to split up the items into multiple root nodes - // (using go-ipfs "wasteful but simple" approach for consistency) + // (using go-ipfs' "wasteful but simple" approach for consistency) _subcalls = _subcalls || 0 _done = _done || 0 const hashed = {} - const hashFn = (seed, key) => { - const buf = Buffer.alloc(4) - buf.writeUInt32LE(seed, 0) - const data = Buffer.concat([ - buf, Buffer.from(toB58String(key)) - ]) - return fnv1a(data.toString('binary')) - } + // items will be distributed among `defaultFanout` bins - for (let i = 0; i < items.length; i++) { - let h = hashFn(seed, items[i].key) % defaultFanout - hashed[h] = hashed[h] || [] - hashed[h].push(items[i]) - } - const storeItemsCb = (err, child) => { - if (err) { return callback(err) } - dag.put(child, (err) => { - if (err) { return callback(err) } - logInternalKey(child.multihash) - rootLinks[this.h] = new DAGLink( - '', child.size, child.multihash - ) - _done++ - if (_done === _subcalls) { - // all finished - DAGNode.create(rootData, rootLinks, (err, rootNode) => { - if (err) { return callback(err) } - return callback(null, rootNode) - }) - } - }) - } + items.forEach(item => { + const bin = hash(_depth, item.key) % defaultFanout + hashed[bin] = hashed[bin] || [] + hashed[bin].push(item) + }) + const hashedKeys = Object.keys(hashed) _subcalls += hashedKeys.length - hashedKeys.forEach(h => { + hashedKeys.forEach(bin => { pinSet.storeItems( - hashed[h], + hashed[bin], logInternalKey, - storeItemsCb.bind({h: h}), + (err, child) => storeItemsCb(err, child, bin), _depth + 1, _subcalls, _done ) }) } + + function storeItemsCb (err, child, bin) { + if (err) { return callback(err) } + const cid = new CID(child._multihash) + dag.put(child, { cid }, (err) => { + if (err) { return callback(err) } + + logInternalKey(child.multihash) + rootLinks[bin] = new DAGLink('', child.size, child.multihash) + _done++ + + if (_done === _subcalls) { + // all finished + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return callback(err) } + return callback(null, rootNode) + }) + } + }) + } }, loadSet: (rootNode, name, logInternalKey, callback) => { @@ -200,13 +210,12 @@ exports = module.exports = function (dag) { return callback(new Error('No link found with name ' + name)) } logInternalKey(link.multihash) - dag.get(new CID(link.multihash), (err, res) => { + + dag.get(link.multihash, (err, res) => { if (err) { return callback(err) } const keys = [] - const walkerFn = (link) => { - keys.push(link.multihash) - } - pinSet.walkItems(res.value, walkerFn, logInternalKey, (err) => { + const walkerFn = link => keys.push(link.multihash) + pinSet.walkItems(res.value, walkerFn, logInternalKey, err => { if (err) { return callback(err) } return callback(null, keys) }) @@ -215,9 +224,12 @@ exports = module.exports = function (dag) { walkItems: (node, walkerFn, logInternalKey, callback) => { callback = once(callback) - const h = readHeader(node) - if (h.err) { return callback(h.err) } - const fanout = h.header.fanout + let pbh + try { + pbh = readHeader(node) + } catch (err) { + return callback(err) + } let subwalkCount = 0 let finishedCount = 0 @@ -225,21 +237,23 @@ exports = module.exports = function (dag) { if (err) { return callback(err) } finishedCount++ if (subwalkCount === finishedCount) { + // done walking return callback() } } for (let i = 0; i < node.links.length; i++) { const link = node.links[i] - if (i >= fanout) { + if (i >= pbh.header.fanout) { // item link - walkerFn(link, i, h.data) + walkerFn(link, i, pbh.data) } else { // fanout link logInternalKey(link.multihash) if (!emptyKey.equals(link.multihash)) { subwalkCount++ - dag.get(new CID(link.multihash), (err, res) => { + + dag.get(link.multihash, (err, res) => { if (err) { return callback(err) } pinSet.walkItems( res.value, walkerFn, logInternalKey, walkCb diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 33163fab2a..667e5bdc2a 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -12,6 +12,7 @@ const Key = require('interface-datastore').Key const each = require('async/each') const series = require('async/series') const waterfall = require('async/waterfall') +const parallel = require('async/parallel') const until = require('async/until') const once = require('once') @@ -57,7 +58,7 @@ module.exports = function pin (self) { resolvePaths(self, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be pinned - series(mhs.map(multihash => cb => { + parallel(mhs.map(multihash => cb => { const key = toB58String(multihash) if (recursive) { if (recursivePins.has(key)) { diff --git a/test/core/pin-set.js b/test/core/pin-set.js new file mode 100644 index 0000000000..cd1e241294 --- /dev/null +++ b/test/core/pin-set.js @@ -0,0 +1,191 @@ +/* eslint max-nested-callbacks: ["error", 8] */ +/* eslint-env mocha */ +'use strict' + +const chai = require('chai') +const dirtyChai = require('dirty-chai') +const expect = chai.expect +chai.use(dirtyChai) + +const parallelLimit = require('async/parallelLimit') +const series = require('async/series') +const { fromB58String } = require('multihashes') +const { DAGNode } = require('ipld-dag-pb') +const CID = require('CIDs') + +const IPFS = require('../../src/core') +const createTempRepo = require('../utils/create-repo-nodejs') + +const defaultFanout = 256 +const maxItems = 8192 + +function noop () {} + +/** + * Creates @param num DAGNodes, limited to 500 at a time to save memory + * @param {[type]} num the number of nodes to create + * @param {Function} callback node-style callback, result is an Array of all + * created nodes + * @return {void} + */ +function createNodes (num, callback) { + let items = [] + for (let i = 0; i < num; i++) { + items.push(cb => + createNode(String(i), (err, node) => cb(err, node._multihash)) + ) + } + + parallelLimit(items, 500, callback) +} + +function createNode (data, links = [], callback) { + if (typeof links === 'function') { + callback = links + links = [] + } + + DAGNode.create(data, links, callback) +} + +describe('pinset', function () { + let ipfs + let pinset + let repo + + before(function (done) { + this.timeout(20 * 1000) + repo = createTempRepo() + ipfs = new IPFS({ repo }) + ipfs.on('ready', () => { + pinset = ipfs.pin.set + done() + }) + }) + + after(done => ipfs.stop(done)) + + describe('storeItems', function () { + it('generates a root node with links and hash', function (done) { + const expectedRootHash = 'QmYrQ8xraCsNsvziXhMLgCCcaiLqRGVXcTwsynrJkacDPq' + + createNode('data', (err, node) => { + expect(err).to.not.exist() + const nodeHash = node._multihash + + pinset.storeSet([nodeHash], noop, (err, rootNode) => { + expect(err).to.not.exist() + const node = rootNode.toJSON() + expect(node.multihash).to.eql(expectedRootHash) + expect(node.links).to.have.length(defaultFanout + 1) + + const lastLink = node.links[node.links.length - 1] + const mhash = fromB58String(lastLink.multihash) + expect(mhash).to.eql(nodeHash) + done() + }) + }) + }) + }) + + describe('handles large sets', function () { + it('handles storing items > maxItems', function (done) { + this.timeout(19 * 1000) + const expectedHash = 'QmWKEc6JAq1bKQ6jyFLtoVB5PBApBk1FYjgYekj9sMQgT6' + const count = maxItems + 1 + createNodes(count, (err, nodes) => { + expect(err).to.not.exist() + pinset.storeSet(nodes, noop, (err, node) => { + expect(err).to.not.exist() + + node = node.toJSON() + expect(node.size).to.eql(3183411) + expect(node.links).to.have.length(defaultFanout) + expect(node.multihash).to.eql(expectedHash) + + pinset.loadSet(node, '', noop, (err, loaded) => { + expect(err).to.not.exist() + expect(loaded).to.have.length(30) + const hashes = loaded.map(l => new CID(l).toBaseEncodedString()) + + // just check the first node, assume all are children if successful + pinset.hasChild(node, hashes[0], (err, has) => { + expect(err).to.not.exist() + expect(has).to.eql(true) + done() + }) + }) + }) + }) + }) + + // This test is largely taken from go-ipfs/pin/set_test.go + // It fails after reaching maximum call stack depth but I don't believe it's + // infinite. We need to reference go's pinset impl to make sure + // our sharding behaves correctly, or perhaps this test is misguided + it.skip('stress test: stores items > (maxItems * defaultFanout) + 1', function (done) { + this.timeout(180 * 1000) + + // this value triggers the creation of a recursive shard. + // If the recursive sharding is done improperly, this will result in + // an infinite recursion and crash (OOM) + const limit = (defaultFanout * maxItems) + 1 + + createNodes(limit, (err, nodes) => { + expect(err).to.not.exist() + series([ + cb => pinset.storeSet(nodes.slice(0, -1), noop, (err, res) => { + expect(err).to.not.exist() + cb(null, res) + }), + cb => pinset.storeSet(nodes, noop, (err, res) => { + expect(err).to.not.exist() + cb(null, res) + }) + ], (err, rootNodes) => { + expect(err).to.not.exist() + expect(rootNodes[1].length - rootNodes[2].length).to.eql(2) + done() + }) + }) + }) + }) + + describe('walkItems', function () { + it(`fails if node doesn't have a pin-set protobuf header`, function (done) { + createNode('datum', (err, node) => { + expect(err).to.not.exist() + + pinset.walkItems(node, noop, noop, (err, res) => { + expect(err).to.exist() + expect(res).to.not.exist() + done() + }) + }) + }) + + it('visits all non-fanout links of a root node', function (done) { + const seen = [] + const walk = (link, idx, data) => seen.push({ link, idx, data }) + + createNodes(defaultFanout, (err, nodes) => { + expect(err).to.not.exist() + + pinset.storeSet(nodes, noop, (err, node) => { + expect(err).to.not.exist() + + pinset.walkItems(node, walk, noop, err => { + expect(err).to.not.exist() + expect(seen).to.have.length(defaultFanout) + expect(seen[0].idx).to.eql(defaultFanout) + seen.forEach(item => { + expect(item.data).to.eql(Buffer.alloc(0)) + expect(item.link).to.exist() + }) + done() + }) + }) + }) + }) + }) +}) diff --git a/test/core/pin.js b/test/core/pin.js index 4fa846c05b..525e491883 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -58,7 +58,7 @@ describe('pin', function () { }) }) - after(done => repo.teardown(done)) + after(done => ipfs.stop(done)) /** getIndirectKeys, From 90dad5798bf6a16e5043c8f9d9640d8dc730dba1 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Fri, 30 Mar 2018 17:38:30 -0500 Subject: [PATCH 08/21] feat: simplify root dagnode generation for storeItems base case --- src/core/components/pin-set.js | 27 ++++++++------------------- 1 file changed, 8 insertions(+), 19 deletions(-) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index cfca7078e6..bd92b10b2d 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -129,26 +129,15 @@ exports = module.exports = function (dag) { logInternalKey(emptyKey) if (items.length <= maxItems) { - // the items will fit in a single root node - const itemLinks = [] - const itemData = [] - const indices = [] + const nodes = items + .map(item => ({ + link: new DAGLink('', 1, item.key), + data: item.data || Buffer.alloc(0) + })) + .sort((a, b) => Buffer.compare(a.link.multihash, b.link.multihash)) - for (let i = 0; i < items.length; i++) { - itemLinks.push(new DAGLink('', 1, items[i].key)) - itemData.push(items[i].data || Buffer.alloc(0)) - indices.push(i) - } - indices.sort((a, b) => { - const x = Buffer.compare(itemLinks[a].multihash, itemLinks[b].multihash) - if (x) { return x } - return (a < b ? -1 : 1) - }) - - const sortedLinks = indices.map(i => { return itemLinks[i] }) - const sortedData = indices.map(i => { return itemData[i] }) - rootLinks = rootLinks.concat(sortedLinks) - rootData = Buffer.concat([rootData].concat(sortedData)) + rootLinks = rootLinks.concat(nodes.map(item => item.link)) + rootData = Buffer.concat([rootData].concat(nodes.map(item => item.data))) DAGNode.create(rootData, rootLinks, (err, rootNode) => { if (err) { return callback(err) } From 17b81eaeac79514ab2dd789a55bdf62ad9a845de Mon Sep 17 00:00:00 2001 From: jonkrone Date: Sun, 1 Apr 2018 16:40:06 -0500 Subject: [PATCH 09/21] feat: rename vars, fix _depth default value, add docs fix: pinset.hasChild buffer check feat: hardcode expected length for flush/load tests --- src/core/components/pin-set.js | 147 +++++++++++++++++---------------- src/core/components/pin.js | 40 ++++----- test/core/pin-set.js | 13 ++- test/core/pin.js | 4 +- 4 files changed, 102 insertions(+), 102 deletions(-) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index bd92b10b2d..ee171070f9 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -62,32 +62,32 @@ exports = module.exports = function (dag) { const pinSet = { // should this be part of `object` API? hasChild: (root, childhash, callback) => { - let seen = {} - if (typeof childhash === 'object') { + const seen = {} + if (CID.isCID(childhash) || Buffer.isBuffer(childhash)) { childhash = toB58String(childhash) } - function searchChildren (root, cb, _links, _checked) { - if (!root.links.length && _links === _checked) { + function searchChildren (root, cb, numToCheck, numChecked) { + if (!root.links.length && numToCheck === numChecked) { // all nodes have been checked return cb(null, false) } - some(root.links, (link, cb) => { - const bs58Link = toB58String(link.multihash) + some(root.links, ({ multihash }, someCb) => { + const bs58Link = toB58String(multihash) if (bs58Link in seen) return if (bs58Link === childhash) { - return cb(null, true) + return someCb(null, true) } seen[bs58Link] = true - dag.get(link.multihash, (err, res) => { - if (err) return cb(err) + dag.get(multihash, (err, { value }) => { + if (err) { return someCb(err) } - _checked++ - _links += res.value.links.length - searchChildren(res.value, cb, _links, _checked) + numChecked++ + numToCheck += value.links.length + searchChildren(value, someCb, numToCheck, numChecked) }) }, cb) } @@ -96,12 +96,12 @@ exports = module.exports = function (dag) { }, storeSet: (keys, logInternalKey, callback) => { - const items = keys.map(key => ({ + const pins = keys.map(key => ({ key: key, data: null })) - pinSet.storeItems(items, logInternalKey, (err, rootNode) => { + pinSet.storeItems(pins, logInternalKey, (err, rootNode) => { if (err) { return callback(err) } const opts = { cid: new CID(rootNode.multihash) } dag.put(rootNode, opts, (err, cid) => { @@ -112,61 +112,66 @@ exports = module.exports = function (dag) { }) }, - storeItems: (items, logInternalKey, callback, _depth, _subcalls, _done) => { + storeItems: (pins, logInternalKey, callback, _depth = 0, _binsToFill = 0, _binsFilled = 0) => { callback = once(callback) const pbHeader = pb.Set.encode({ version: 1, fanout: defaultFanout, seed: _depth }) - let rootData = Buffer.concat([ + const headerBuf = Buffer.concat([ Buffer.from(varint.encode(pbHeader.length)), pbHeader ]) - let rootLinks = [] + const fanoutLinks = [] for (let i = 0; i < defaultFanout; i++) { - rootLinks.push(new DAGLink('', 1, emptyKey)) + fanoutLinks.push(new DAGLink('', 1, emptyKey)) } logInternalKey(emptyKey) - if (items.length <= maxItems) { - const nodes = items + if (pins.length <= maxItems) { + const nodes = pins .map(item => ({ link: new DAGLink('', 1, item.key), data: item.data || Buffer.alloc(0) })) + // sorting makes any ordering of `pins` produce the same DAGNode .sort((a, b) => Buffer.compare(a.link.multihash, b.link.multihash)) - rootLinks = rootLinks.concat(nodes.map(item => item.link)) - rootData = Buffer.concat([rootData].concat(nodes.map(item => item.data))) + const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) + const rootData = Buffer.concat( + [headerBuf].concat(nodes.map(item => item.data)) + ) DAGNode.create(rootData, rootLinks, (err, rootNode) => { if (err) { return callback(err) } return callback(null, rootNode) }) } else { - // need to split up the items into multiple root nodes + // If the array of pins is > maxItems, we: + // - distribute the pins among `defaultFanout` bins + // - create a DAGNode for each bin + // - add each pin of that bin as a DAGLink + // - create a root DAGNode + // - store each bin as a DAGLink + // - send that root DAGNode via `callback` // (using go-ipfs' "wasteful but simple" approach for consistency) - _subcalls = _subcalls || 0 - _done = _done || 0 - const hashed = {} - - // items will be distributed among `defaultFanout` bins - items.forEach(item => { - const bin = hash(_depth, item.key) % defaultFanout - hashed[bin] = hashed[bin] || [] - hashed[bin].push(item) - }) - const hashedKeys = Object.keys(hashed) - _subcalls += hashedKeys.length - hashedKeys.forEach(bin => { + const bins = pins.reduce((bins, pin) => { + const n = hash(_depth, pin.key) % defaultFanout + bins[n] = n in bins ? bins[n].concat([pin]) : [pin] + return bins + }, {}) + + const binKeys = Object.keys(bins) + _binsToFill += binKeys.length + binKeys.forEach(n => { pinSet.storeItems( - hashed[bin], + bins[n], logInternalKey, - (err, child) => storeItemsCb(err, child, bin), + (err, child) => storeItemsCb(err, child, n), _depth + 1, - _subcalls, - _done + _binsToFill, + _binsFilled ) }) } @@ -174,16 +179,17 @@ exports = module.exports = function (dag) { function storeItemsCb (err, child, bin) { if (err) { return callback(err) } const cid = new CID(child._multihash) + dag.put(child, { cid }, (err) => { if (err) { return callback(err) } logInternalKey(child.multihash) - rootLinks[bin] = new DAGLink('', child.size, child.multihash) - _done++ + fanoutLinks[bin] = new DAGLink('', child.size, child.multihash) + _binsFilled++ - if (_done === _subcalls) { + if (_binsFilled === _binsToFill) { // all finished - DAGNode.create(rootData, rootLinks, (err, rootNode) => { + DAGNode.create(headerBuf, fanoutLinks, (err, rootNode) => { if (err) { return callback(err) } return callback(null, rootNode) }) @@ -203,15 +209,15 @@ exports = module.exports = function (dag) { dag.get(link.multihash, (err, res) => { if (err) { return callback(err) } const keys = [] - const walkerFn = link => keys.push(link.multihash) - pinSet.walkItems(res.value, walkerFn, logInternalKey, err => { + const step = link => keys.push(link.multihash) + pinSet.walkItems(res.value, step, logInternalKey, err => { if (err) { return callback(err) } return callback(null, keys) }) }) }, - walkItems: (node, walkerFn, logInternalKey, callback) => { + walkItems: (node, step, logInternalKey, callback) => { callback = once(callback) let pbh try { @@ -222,38 +228,41 @@ exports = module.exports = function (dag) { let subwalkCount = 0 let finishedCount = 0 - const walkCb = (err) => { - if (err) { return callback(err) } - finishedCount++ - if (subwalkCount === finishedCount) { - // done walking - return callback() - } - } + node.links.forEach((link, idx) => { + if (idx < pbh.header.fanout) { + // the first pbh.header.fanout links are fanout bins + // if a link is not 'empty', dig into and walk its DAGLinks + const linkHash = link.multihash + logInternalKey(linkHash) - for (let i = 0; i < node.links.length; i++) { - const link = node.links[i] - if (i >= pbh.header.fanout) { - // item link - walkerFn(link, i, pbh.data) - } else { - // fanout link - logInternalKey(link.multihash) - if (!emptyKey.equals(link.multihash)) { + if (!emptyKey.equals(linkHash)) { subwalkCount++ - dag.get(link.multihash, (err, res) => { + // walk the links of this fanout bin + dag.get(linkHash, (err, res) => { if (err) { return callback(err) } - pinSet.walkItems( - res.value, walkerFn, logInternalKey, walkCb - ) + pinSet.walkItems(res.value, step, logInternalKey, walkCb) }) } + } else { + // otherwise, the link is a pin + return step(link, idx, pbh.data) } - } + }) + if (!subwalkCount) { + // reached end of pins and found no non-empty fanout bins return callback() } + + function walkCb (err) { + if (err) { return callback(err) } + finishedCount++ + if (subwalkCount === finishedCount) { + // done walking + return callback() + } + } } } return pinSet diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 667e5bdc2a..828c3beff5 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -65,6 +65,7 @@ module.exports = function pin (self) { // it's already pinned recursively return cb(null, key) } + // entire graph of nested links should be pinned, // so make sure we have all the objects dag._getRecursive(multihash, (err) => { @@ -81,6 +82,7 @@ module.exports = function pin (self) { // already directly pinned return cb(null, key) } + // make sure we have the object dag.get(new CID(multihash), (err) => { if (err) { return cb(err) } @@ -92,18 +94,13 @@ module.exports = function pin (self) { if (err) { return callback(err) } // update the pin sets in memory if (recursive) { - results.forEach(key => { - // recursive pin should replace direct pin - directPins.delete(key) // TODO questionable - recursivePins.add(key) - }) + results.forEach(key => recursivePins.add(key)) } else { results.forEach(key => directPins.add(key)) } // persist updated pin sets to datastore pin.flush((err, root) => { if (err) { return callback(err) } - self.log(`Added pins: ${results}`) return callback(null, results.map(key => ({hash: key}))) }) }) @@ -150,12 +147,8 @@ module.exports = function pin (self) { // update the pin sets in memory results.forEach(key => { - if (recursive) { - if (recursivePins.has(key)) { - recursivePins.delete(key) - } else { - directPins.delete(key) - } + if (recursive && recursivePins.has(key)) { + recursivePins.delete(key) } else { directPins.delete(key) } @@ -271,28 +264,29 @@ module.exports = function pin (self) { isPinnedWithType: promisify((multihash, pinType, callback) => { const key = toB58String(multihash) + const { recursive, direct, internal, all } = pin.types // recursive - if ((pinType === pin.types.recursive || pinType === pin.types.all) && + if ((pinType === recursive || pinType === all) && recursivePins.has(key)) { - return callback(null, {pinned: true, reason: pin.types.recursive}) + return callback(null, {pinned: true, reason: recursive}) } - if ((pinType === pin.types.recursive)) { + if ((pinType === recursive)) { return callback(null, {pinned: false}) } // direct - if ((pinType === pin.types.direct || pinType === pin.types.all) && + if ((pinType === direct || pinType === all) && directPins.has(key)) { - return callback(null, {pinned: true, reason: pin.types.direct}) + return callback(null, {pinned: true, reason: direct}) } - if ((pinType === pin.types.direct)) { + if ((pinType === direct)) { return callback(null, {pinned: false}) } // internal - if ((pinType === pin.types.internal || pinType === pin.types.all) && + if ((pinType === internal || pinType === all) && internalPins.has(key)) { - return callback(null, {pinned: true, reason: pin.types.internal}) + return callback(null, {pinned: true, reason: internal}) } - if ((pinType === pin.types.internal)) { + if ((pinType === internal)) { return callback(null, {pinned: false}) } @@ -339,9 +333,7 @@ module.exports = function pin (self) { const rKeys = pin.recursiveKeys() each(rKeys, (multihash, cb) => { dag._getRecursive(multihash, (err, nodes) => { - if (err) { - return cb(err) - } + if (err) { return cb(err) } nodes.forEach(node => { const key = toB58String(node.multihash) diff --git a/test/core/pin-set.js b/test/core/pin-set.js index cd1e241294..70a92c917d 100644 --- a/test/core/pin-set.js +++ b/test/core/pin-set.js @@ -29,7 +29,7 @@ function noop () {} * @return {void} */ function createNodes (num, callback) { - let items = [] + const items = [] for (let i = 0; i < num; i++) { items.push(cb => createNode(String(i), (err, node) => cb(err, node._multihash)) @@ -67,12 +67,11 @@ describe('pinset', function () { describe('storeItems', function () { it('generates a root node with links and hash', function (done) { - const expectedRootHash = 'QmYrQ8xraCsNsvziXhMLgCCcaiLqRGVXcTwsynrJkacDPq' + const expectedRootHash = 'QmcLiSTjcjoVC2iuGbk6A2PVcWV3WvjZT4jxfNis1vjyrR' createNode('data', (err, node) => { expect(err).to.not.exist() const nodeHash = node._multihash - pinset.storeSet([nodeHash], noop, (err, rootNode) => { expect(err).to.not.exist() const node = rootNode.toJSON() @@ -91,7 +90,7 @@ describe('pinset', function () { describe('handles large sets', function () { it('handles storing items > maxItems', function (done) { this.timeout(19 * 1000) - const expectedHash = 'QmWKEc6JAq1bKQ6jyFLtoVB5PBApBk1FYjgYekj9sMQgT6' + const expectedHash = 'QmbvhSy83QWfgLXDpYjDmLWBFfGc8utoqjcXHyj3gYuasT' const count = maxItems + 1 createNodes(count, (err, nodes) => { expect(err).to.not.exist() @@ -99,7 +98,7 @@ describe('pinset', function () { expect(err).to.not.exist() node = node.toJSON() - expect(node.size).to.eql(3183411) + expect(node.size).to.eql(3184696) expect(node.links).to.have.length(defaultFanout) expect(node.multihash).to.eql(expectedHash) @@ -166,7 +165,7 @@ describe('pinset', function () { it('visits all non-fanout links of a root node', function (done) { const seen = [] - const walk = (link, idx, data) => seen.push({ link, idx, data }) + const walker = (link, idx, data) => seen.push({ link, idx, data }) createNodes(defaultFanout, (err, nodes) => { expect(err).to.not.exist() @@ -174,7 +173,7 @@ describe('pinset', function () { pinset.storeSet(nodes, noop, (err, node) => { expect(err).to.not.exist() - pinset.walkItems(node, walk, noop, err => { + pinset.walkItems(node, walker, noop, err => { expect(err).to.not.exist() expect(seen).to.have.length(defaultFanout) expect(seen[0].idx).to.eql(defaultFanout) diff --git a/test/core/pin.js b/test/core/pin.js index 525e491883..79b842a642 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -304,7 +304,7 @@ describe('pin', function () { .then(ls => expect(ls.length).to.eql(0)) .then(() => pin.load()) .then(() => pin.ls()) - .then(ls => expect(ls.length).to.be.gt(0)) + .then(ls => expect(ls.length).to.eql(4)) }) }) @@ -315,7 +315,7 @@ describe('pin', function () { it('flushes', function () { return pin.ls() - .then(ls => expect(ls.length).to.be.gt(0)) + .then(ls => expect(ls.length).to.eql(4)) .then(() => { pin.clear() return pin.flush() From 753f6182cea9c975fd3fa74656dcc646101d6a06 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Mon, 2 Apr 2018 19:16:55 -0500 Subject: [PATCH 10/21] feat: parallelize pin.isPinnedWithType --- src/core/components/pin.js | 34 +++++++++++++++------------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 828c3beff5..a27dcfe5a2 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -13,7 +13,7 @@ const each = require('async/each') const series = require('async/series') const waterfall = require('async/waterfall') const parallel = require('async/parallel') -const until = require('async/until') +const someLimit = require('async/someLimit') const once = require('once') function toB58String (hash) { @@ -292,26 +292,22 @@ module.exports = function pin (self) { // indirect (default) // check each recursive key to see if multihash is under it - const rKeys = pin.recursiveKeys() - let found = false - until( - // search until multihash was found or no more keys to check - () => (found || !rKeys.length), - (cb) => { - const key = rKeys.pop() - dag.get(new CID(key), (err, res) => { - if (err) { return cb(err) } - pin.set.hasChild(res.value, multihash, (err, has) => { - if (err) { return cb(err) } - found = has - // if found, return the hash of the parent recursive pin - cb(null, found ? toB58String(res.value.multihash) : null) - }) + // arbitrary limit, enables handling 1000s of pins. + let foundPin + someLimit(pin.recursiveKeys(), 100, (key, cb) => { + dag.get(new CID(key), (err, res) => { + if (err) { return cb(err) } + + pin.set.hasChild(res.value, multihash, (err, has) => { + if (has) { + foundPin = toB58String(res.value.multihash) + } + cb(err, has) }) - }, - (err, result) => { + }) + }, (err, found) => { if (err) { return callback(err) } - return callback(null, {pinned: found, reason: result}) + return callback(null, { pinned: found, reason: foundPin }) } ) }), From 92b3d2fe9212015c8b7a36569d5900c832b6bf02 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Tue, 3 Apr 2018 13:54:20 -0500 Subject: [PATCH 11/21] refactor: refactor pinset.storeItems --- src/core/components/pin-set.js | 159 +++++++++++++++++---------------- 1 file changed, 82 insertions(+), 77 deletions(-) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index ee171070f9..dfc5405864 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -67,6 +67,8 @@ exports = module.exports = function (dag) { childhash = toB58String(childhash) } + return searchChildren(root, callback, root.links.length, 0) + function searchChildren (root, cb, numToCheck, numChecked) { if (!root.links.length && numToCheck === numChecked) { // all nodes have been checked @@ -91,8 +93,6 @@ exports = module.exports = function (dag) { }) }, cb) } - - return searchChildren(root, callback, root.links.length, 0) }, storeSet: (keys, logInternalKey, callback) => { @@ -112,90 +112,95 @@ exports = module.exports = function (dag) { }) }, - storeItems: (pins, logInternalKey, callback, _depth = 0, _binsToFill = 0, _binsFilled = 0) => { - callback = once(callback) - const pbHeader = pb.Set.encode({ - version: 1, - fanout: defaultFanout, - seed: _depth - }) - const headerBuf = Buffer.concat([ - Buffer.from(varint.encode(pbHeader.length)), pbHeader - ]) - const fanoutLinks = [] - for (let i = 0; i < defaultFanout; i++) { - fanoutLinks.push(new DAGLink('', 1, emptyKey)) - } + storeItems: (items, logInternalKey, callback) => { logInternalKey(emptyKey) - if (pins.length <= maxItems) { - const nodes = pins - .map(item => ({ - link: new DAGLink('', 1, item.key), - data: item.data || Buffer.alloc(0) - })) - // sorting makes any ordering of `pins` produce the same DAGNode - .sort((a, b) => Buffer.compare(a.link.multihash, b.link.multihash)) - - const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) - const rootData = Buffer.concat( - [headerBuf].concat(nodes.map(item => item.data)) - ) - - DAGNode.create(rootData, rootLinks, (err, rootNode) => { - if (err) { return callback(err) } - return callback(null, rootNode) + return storePins(items, callback) + + function storePins(pins, cb, depth = 0, binsToFill = 0, binsFilled = 0) { + cb = once(cb) + const pbHeader = pb.Set.encode({ + version: 1, + fanout: defaultFanout, + seed: depth }) - } else { - // If the array of pins is > maxItems, we: - // - distribute the pins among `defaultFanout` bins - // - create a DAGNode for each bin - // - add each pin of that bin as a DAGLink - // - create a root DAGNode - // - store each bin as a DAGLink - // - send that root DAGNode via `callback` - // (using go-ipfs' "wasteful but simple" approach for consistency) - - const bins = pins.reduce((bins, pin) => { - const n = hash(_depth, pin.key) % defaultFanout - bins[n] = n in bins ? bins[n].concat([pin]) : [pin] - return bins - }, {}) - - const binKeys = Object.keys(bins) - _binsToFill += binKeys.length - binKeys.forEach(n => { - pinSet.storeItems( - bins[n], - logInternalKey, - (err, child) => storeItemsCb(err, child, n), - _depth + 1, - _binsToFill, - _binsFilled + const headerBuf = Buffer.concat([ + Buffer.from(varint.encode(pbHeader.length)), pbHeader + ]) + const fanoutLinks = [] + for (let i = 0; i < defaultFanout; i++) { + fanoutLinks.push(new DAGLink('', 1, emptyKey)) + } + + if (pins.length <= maxItems) { + const nodes = pins + .map(item => ({ + link: new DAGLink('', 1, item.key), + data: item.data || Buffer.alloc(0) + })) + // sorting makes any ordering of `pins` produce the same DAGNode + .sort((a, b) => Buffer.compare(a.link.multihash, b.link.multihash)) + + const rootLinks = fanoutLinks.concat(nodes.map(item => item.link)) + const rootData = Buffer.concat( + [headerBuf].concat(nodes.map(item => item.data)) ) - }) - } - function storeItemsCb (err, child, bin) { - if (err) { return callback(err) } - const cid = new CID(child._multihash) + DAGNode.create(rootData, rootLinks, (err, rootNode) => { + if (err) { return cb(err) } + return cb(null, rootNode) + }) + } else { + // If the array of pins is > maxItems, we: + // - distribute the pins among `defaultFanout` bins + // - create a DAGNode for each bin + // - add each pin of that bin as a DAGLink + // - create a root DAGNode + // - store each bin as a DAGLink + // - send that root DAGNode via `cb` + // (using go-ipfs' "wasteful but simple" approach for consistency) + + const bins = pins.reduce((bins, pin) => { + const n = hash(depth, pin.key) % defaultFanout + bins[n] = n in bins ? bins[n].concat([pin]) : [pin] + return bins + }, {}) + + const binKeys = Object.keys(bins) + binsToFill += binKeys.length + binKeys.forEach(n => { + storePins( + bins[n], + (err, child) => storeChild(err, child, n), + depth + 1, + binsToFill, + binsFilled + ) + }) + } - dag.put(child, { cid }, (err) => { - if (err) { return callback(err) } + function storeChild (err, child, bin) { + if (err) { return cb(err) } - logInternalKey(child.multihash) - fanoutLinks[bin] = new DAGLink('', child.size, child.multihash) - _binsFilled++ + const cid = new CID(child._multihash) + dag.put(child, { cid }, (err) => { + if (err) { return cb(err) } - if (_binsFilled === _binsToFill) { - // all finished - DAGNode.create(headerBuf, fanoutLinks, (err, rootNode) => { - if (err) { return callback(err) } - return callback(null, rootNode) - }) - } - }) + logInternalKey(child.multihash) + fanoutLinks[bin] = new DAGLink('', child.size, child.multihash) + binsFilled++ + + if (binsFilled === binsToFill) { + // all finished + DAGNode.create(headerBuf, fanoutLinks, (err, rootNode) => { + if (err) { return cb(err) } + return cb(null, rootNode) + }) + } + }) + } } + }, loadSet: (rootNode, name, logInternalKey, callback) => { From a5c556c819c3bbabed1f47a3556160df6044bb1f Mon Sep 17 00:00:00 2001 From: jonkrone Date: Tue, 3 Apr 2018 14:08:12 -0500 Subject: [PATCH 12/21] fix: re-add pin interface tests I must have missed a commit during a rebase. --- test/core/interface/pin.js | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 test/core/interface/pin.js diff --git a/test/core/interface/pin.js b/test/core/interface/pin.js new file mode 100644 index 0000000000..604a5e2440 --- /dev/null +++ b/test/core/interface/pin.js @@ -0,0 +1,35 @@ +/* eslint-env mocha */ +'use strict' + +const test = require('interface-ipfs-core') +const parallel = require('async/parallel') + +const IPFS = require('../../../src') + +const DaemonFactory = require('ipfsd-ctl') +const df = DaemonFactory.create({ type: 'proc', exec: IPFS }) + +const nodes = [] +const common = { + setup: function (callback) { + callback(null, { + spawnNode: (cb) => { + df.spawn({ + initOptions: { bits: 512 } + }, (err, _ipfsd) => { + if (err) { + return cb(err) + } + + nodes.push(_ipfsd) + cb(null, _ipfsd.api) + }) + } + }) + }, + teardown: function (callback) { + parallel(nodes.map((node) => (cb) => node.stop(cb)), callback) + } +} + +test.pin(common) From 8f7de0508543b1798eae8aa5384e1f2bab117d23 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Tue, 3 Apr 2018 14:51:04 -0500 Subject: [PATCH 13/21] fix: lint --- src/core/components/pin-set.js | 3 +-- src/core/components/pin.js | 7 +++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index dfc5405864..73db4682e3 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -117,7 +117,7 @@ exports = module.exports = function (dag) { return storePins(items, callback) - function storePins(pins, cb, depth = 0, binsToFill = 0, binsFilled = 0) { + function storePins (pins, cb, depth = 0, binsToFill = 0, binsFilled = 0) { cb = once(cb) const pbHeader = pb.Set.encode({ version: 1, @@ -200,7 +200,6 @@ exports = module.exports = function (dag) { }) } } - }, loadSet: (rootNode, name, logInternalKey, callback) => { diff --git a/src/core/components/pin.js b/src/core/components/pin.js index a27dcfe5a2..661a148efe 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -306,10 +306,9 @@ module.exports = function pin (self) { }) }) }, (err, found) => { - if (err) { return callback(err) } - return callback(null, { pinned: found, reason: foundPin }) - } - ) + if (err) { return callback(err) } + return callback(null, { pinned: found, reason: foundPin }) + }) }), directKeyStrings: () => Array.from(directPins), From 408d2bcd1448526d169e789cd6d20acaa4db0486 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Thu, 5 Apr 2018 11:37:00 -0500 Subject: [PATCH 14/21] feat: docs, rename resolvePaths, pin.getIndirectKeys now uses eachLimit --- src/core/components/dag.js | 2 +- src/core/components/pin.js | 64 ++++++++++++++++++++++---------------- src/core/utils.js | 12 +++---- test/core/utils.js | 19 ++++++----- 4 files changed, 53 insertions(+), 44 deletions(-) diff --git a/src/core/components/dag.js b/src/core/components/dag.js index d67668083b..ec0966dd10 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -79,7 +79,7 @@ module.exports = function dag (self) { ) }), - // TODO - move to IPLD resolver and generalize to other IPLD formats + // TODO - use IPLD selectors once they are implemented _getRecursive: promisify((multihash, callback) => { // gets flat array of all DAGNodes in tree given by multihash callback = once(callback) diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 661a148efe..2a57ea3ce9 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -5,17 +5,20 @@ const DAGNode = dagPB.DAGNode const DAGLink = dagPB.DAGLink const CID = require('cids') const pinSet = require('./pin-set') -const resolvePaths = require('../utils').resolvePaths +const resolvePath = require('../utils').resolvePath const promisify = require('promisify-es6') const multihashes = require('multihashes') const Key = require('interface-datastore').Key -const each = require('async/each') +const eachLimit = require('async/eachLimit') const series = require('async/series') const waterfall = require('async/waterfall') const parallel = require('async/parallel') const someLimit = require('async/someLimit') const once = require('once') +// arbitrary limit to number of concurrent dag operations +const concurrencyLimit = 300 + function toB58String (hash) { return new CID(hash).toBaseEncodedString() } @@ -55,7 +58,7 @@ module.exports = function pin (self) { callback = once(callback) const recursive = options ? options.recursive : true - resolvePaths(self, paths, (err, mhs) => { + resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be pinned parallel(mhs.map(multihash => cb => { @@ -92,12 +95,11 @@ module.exports = function pin (self) { } }), (err, results) => { if (err) { return callback(err) } + // update the pin sets in memory - if (recursive) { - results.forEach(key => recursivePins.add(key)) - } else { - results.forEach(key => directPins.add(key)) - } + const pinset = recursive ? recursivePins : directPins + results.forEach(key => pinset.add(key)) + // persist updated pin sets to datastore pin.flush((err, root) => { if (err) { return callback(err) } @@ -115,8 +117,10 @@ module.exports = function pin (self) { recursive = false } callback = once(callback) - resolvePaths(self, paths, (err, mhs) => { + + resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } + // verify that each hash can be unpinned series(mhs.map(multihash => cb => { pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { @@ -187,20 +191,21 @@ module.exports = function pin (self) { `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` )) } + if (paths) { // check the pinned state of specific hashes - resolvePaths(self, paths, (err, mhs) => { + resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } + series(mhs.map(multihash => cb => { pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res const key = toB58String(multihash) if (!pinned) { - return cb(new Error( - `Path ${key} is not pinned` - )) + return cb(new Error(`Path ${key} is not pinned`)) } + switch (reason) { case pin.types.direct: case pin.types.recursive: @@ -262,31 +267,28 @@ module.exports = function pin (self) { pin.isPinnedWithType(multihash, pin.types.all, callback) }), - isPinnedWithType: promisify((multihash, pinType, callback) => { + isPinnedWithType: promisify((multihash, type, callback) => { const key = toB58String(multihash) const { recursive, direct, internal, all } = pin.types // recursive - if ((pinType === recursive || pinType === all) && - recursivePins.has(key)) { + if ((type === recursive || type === all) && recursivePins.has(key)) { return callback(null, {pinned: true, reason: recursive}) } - if ((pinType === recursive)) { + if ((type === recursive)) { return callback(null, {pinned: false}) } // direct - if ((pinType === direct || pinType === all) && - directPins.has(key)) { + if ((type === direct || type === all) && directPins.has(key)) { return callback(null, {pinned: true, reason: direct}) } - if ((pinType === direct)) { + if ((type === direct)) { return callback(null, {pinned: false}) } // internal - if ((pinType === internal || pinType === all) && - internalPins.has(key)) { + if ((type === internal || type === all) && internalPins.has(key)) { return callback(null, {pinned: true, reason: internal}) } - if ((pinType === internal)) { + if ((type === internal)) { return callback(null, {pinned: false}) } @@ -294,7 +296,7 @@ module.exports = function pin (self) { // check each recursive key to see if multihash is under it // arbitrary limit, enables handling 1000s of pins. let foundPin - someLimit(pin.recursiveKeys(), 100, (key, cb) => { + someLimit(pin.recursiveKeys(), concurrencyLimit, (key, cb) => { dag.get(new CID(key), (err, res) => { if (err) { return cb(err) } @@ -326,14 +328,14 @@ module.exports = function pin (self) { getIndirectKeys: promisify(callback => { const indirectKeys = new Set() const rKeys = pin.recursiveKeys() - each(rKeys, (multihash, cb) => { + eachLimit(rKeys, concurrencyLimit, (multihash, cb) => { dag._getRecursive(multihash, (err, nodes) => { if (err) { return cb(err) } nodes.forEach(node => { const key = toB58String(node.multihash) if (!recursivePins.has(key)) { - // not already pinned recursively or directly + // not already pinned recursively indirectKeys.add(key) } }) @@ -357,25 +359,32 @@ module.exports = function pin (self) { cb() } } + waterfall([ // create link to direct keys node (cb) => pin.set.storeSet(pin.directKeys(), logInternalKey, cb), (dRoot, cb) => DAGLink.create(pin.types.direct, dRoot.size, dRoot.multihash, cb), (dLink, cb) => handle.put('dLink', dLink, cb), + // create link to recursive keys node (cb) => pin.set.storeSet(pin.recursiveKeys(), logInternalKey, cb), (rRoot, cb) => DAGLink.create(pin.types.recursive, rRoot.size, rRoot.multihash, cb), (rLink, cb) => handle.put('rLink', rLink, cb), + // the pin-set nodes link to an empty node, so make sure it's added to dag (cb) => DAGNode.create(Buffer.alloc(0), cb), (empty, cb) => dag.put(empty, {cid: new CID(empty.multihash)}, cb), + // create root node with links to direct and recursive nodes (cid, cb) => DAGNode.create(Buffer.alloc(0), [handle.dLink, handle.rLink], cb), (root, cb) => handle.put('root', root, cb), + // add the root node to dag (cb) => dag.put(handle.root, {cid: new CID(handle.root.multihash)}, cb), + // update the internal pin set (cid, cb) => cb(null, logInternalKey(handle.root.multihash)), + // save serialized root to datastore under a consistent key (_, cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) @@ -387,7 +396,7 @@ module.exports = function pin (self) { }) }), - load: promisify((callback) => { + load: promisify(callback => { const newInternalPins = new Set() const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) const handle = { @@ -396,6 +405,7 @@ module.exports = function pin (self) { cb() } } + waterfall([ (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests (_, cb) => repo.datastore.has(pinDataStoreKey, cb), diff --git a/src/core/utils.js b/src/core/utils.js index 4eaa83f746..f8e29e1471 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -49,13 +49,13 @@ function parseIpfsPath (ipfsPath) { * - multihash Buffer * - Arrays of the above * - * @param {IPFS} ipfs the IPFS node - * @param {Described above} ipfsPaths A single or collection of ipfs-paths + * @param {IPFS} objectAPI The IPFS object api + * @param {Described above} ipfsPaths A single or collection of ipfs-paths * @param {Function} callback res is Array * if no callback is passed, returns a Promise * @return {Promise|void} */ -const resolvePaths = promisify(function (ipfs, ipfsPaths, callback) { +const resolvePath = promisify(function (objectAPI, ipfsPaths, callback) { if (!Array.isArray(ipfsPaths)) { ipfsPaths = [ipfsPaths] } @@ -83,7 +83,7 @@ const resolvePaths = promisify(function (ipfs, ipfsPaths, callback) { return cb(null, rootHash) } - ipfs.object.get(rootHash, follow.bind(null, rootLinks)) + objectAPI.get(rootHash, follow.bind(null, rootLinks)) // recursively follow named links to the target node function follow (links, err, obj) { @@ -103,10 +103,10 @@ const resolvePaths = promisify(function (ipfs, ipfsPaths, callback) { )) } - ipfs.object.get(nextObj.multihash, follow.bind(null, links.slice(1))) + objectAPI.get(nextObj.multihash, follow.bind(null, links.slice(1))) } }, callback) }) exports.parseIpfsPath = parseIpfsPath -exports.resolvePaths = resolvePaths +exports.resolvePath = resolvePath diff --git a/test/core/utils.js b/test/core/utils.js index d706511f1f..13cffd264d 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -59,7 +59,7 @@ describe('utils', () => { }) }) - describe('resolvePaths', function () { + describe('resolvePath', function () { this.timeout(80 * 1000) const fixtures = [ 'test/fixtures/planets/mercury/wiki.md', @@ -80,12 +80,10 @@ describe('utils', () => { node.once('ready', () => node.files.add(fixtures, done)) }) - after(done => { - repo.teardown(done) - }) + after(done => node.stop(done)) it('handles base58 hash format', (done) => { - utils.resolvePaths(node, rootHash, (err, hashes) => { + utils.resolvePath(node.object, rootHash, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(rootMultihash) @@ -94,7 +92,7 @@ describe('utils', () => { }) it('handles multihash format', (done) => { - utils.resolvePaths(node, aboutMultihash, (err, hashes) => { + utils.resolvePath(node.object, aboutMultihash, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(aboutMultihash) @@ -104,7 +102,7 @@ describe('utils', () => { it('handles ipfs paths format', function (done) { this.timeout(200 * 1000) - utils.resolvePaths(node, aboutPath, (err, hashes) => { + utils.resolvePath(node.object, aboutPath, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(1) expect(hashes[0]).to.deep.equal(aboutMultihash) @@ -113,7 +111,8 @@ describe('utils', () => { }) it('handles an array', (done) => { - utils.resolvePaths(node, [rootHash, rootPath, rootMultihash], (err, hashes) => { + const paths = [rootHash, rootPath, rootMultihash] + utils.resolvePath(node.object, paths, (err, hashes) => { expect(err).to.not.exist() expect(hashes.length).to.equal(3) expect(hashes[0]).to.deep.equal(rootMultihash) @@ -124,14 +123,14 @@ describe('utils', () => { }) it('should error on invalid hashes', function (done) { - utils.resolvePaths(node, '/ipfs/asdlkjahsdfkjahsdfd', err => { + utils.resolvePath(node.object, '/ipfs/asdlkjahsdfkjahsdfd', err => { expect(err).to.exist() done() }) }) it(`should error when a link doesn't exist`, function (done) { - utils.resolvePaths(node, `${aboutPath}/fusion`, err => { + utils.resolvePath(node.object, `${aboutPath}/fusion`, err => { expect(err.message).to.include( `no link named "fusion" under QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q` ) From f77a3796d9d8ff3dc3911b36aa4fb7b59ecbe986 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Mon, 14 May 2018 18:32:44 -0400 Subject: [PATCH 15/21] chore: rebase a month of changes, resolve minor issues from that fix: yarg arugment naming for pin cli commands fix: convert file multihashes to a b58 string fix: another way of checking for CID-ness fix: lint fix: toB58String handles non-buffers fix: key-exchange core tests now shutdown daemon. --- src/cli/commands/pin/add.js | 2 +- src/cli/commands/pin/ls.js | 2 +- src/cli/commands/pin/rm.js | 2 +- src/core/components/files.js | 4 +--- src/core/components/pin.js | 8 ++++++-- src/core/utils.js | 15 ++++++++------- test/core/key-exchange.js | 2 +- 7 files changed, 19 insertions(+), 16 deletions(-) diff --git a/src/cli/commands/pin/add.js b/src/cli/commands/pin/add.js index 3acf438022..bfb0a0b9a3 100644 --- a/src/cli/commands/pin/add.js +++ b/src/cli/commands/pin/add.js @@ -3,7 +3,7 @@ const print = require('../../utils').print module.exports = { - command: 'add ', + command: 'add ', describe: 'Pins object to local storage.', diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index 04b687ace5..e1a65cb746 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -4,7 +4,7 @@ const print = require('../../utils').print module.exports = { // bracket syntax with '...' tells yargs to optionally accept a list - command: 'ls [ipfs-path...]', + command: 'ls [ipfsPath...]', describe: 'List objects pinned to local storage.', diff --git a/src/cli/commands/pin/rm.js b/src/cli/commands/pin/rm.js index b19454d268..acbadf8a96 100644 --- a/src/cli/commands/pin/rm.js +++ b/src/cli/commands/pin/rm.js @@ -3,7 +3,7 @@ const print = require('../../utils').print module.exports = { - command: 'rm ', + command: 'rm ', describe: 'Removes the pinned object from local storage.', diff --git a/src/core/components/files.js b/src/core/components/files.js index 5146118a56..9d5d44ca82 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -96,9 +96,7 @@ function pinFile (self, opts, file, cb) { const isRootDir = !file.path.includes('/') const shouldPin = pin && isRootDir && !opts.onlyHash if (shouldPin) { - return self.pin.add(file.hash, (err) => { - cb(err, file) - }) + return self.pin.add(file.hash, err => cb(err, file)) } else { cb(null, file) } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 2a57ea3ce9..8fb8325d54 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -19,7 +19,11 @@ const once = require('once') // arbitrary limit to number of concurrent dag operations const concurrencyLimit = 300 -function toB58String (hash) { +function toB58String (hash, tst) { + if (Buffer.isBuffer(hash)) { + hash = multihashes.toB58String(hash) + } + return new CID(hash).toBaseEncodedString() } @@ -71,7 +75,7 @@ module.exports = function pin (self) { // entire graph of nested links should be pinned, // so make sure we have all the objects - dag._getRecursive(multihash, (err) => { + dag._getRecursive(key, (err) => { if (err) { return cb(err) } // found all objects, we can add the pin return cb(null, key) diff --git a/src/core/utils.js b/src/core/utils.js index f8e29e1471..977377a628 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,9 +1,9 @@ 'use strict' +const CID = require('cids') const multihashes = require('multihashes') const promisify = require('promisify-es6') const map = require('async/map') -const isIPFS = require('is-ipfs') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' @@ -20,20 +20,21 @@ exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \' * @throws on an invalid @param ipfsPath */ function parseIpfsPath (ipfsPath) { - const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) const invalidPathErr = new Error('invalid ipfs ref path') + + const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) if (!matched) { throw invalidPathErr } const [hash, ...links] = matched[1].split('/') - if (isIPFS.multihash(hash)) { - return { - hash: hash, - links: links + try { + if (CID.isCID(new CID(hash))) { + return { hash, links } } - } else { + throw invalidPathErr + } catch (err) { throw invalidPathErr } } diff --git a/test/core/key-exchange.js b/test/core/key-exchange.js index 86a2781ad6..5628fc5b71 100644 --- a/test/core/key-exchange.js +++ b/test/core/key-exchange.js @@ -28,7 +28,7 @@ describe('key exchange', () => { ipfs.on('ready', () => done()) }) - after((done) => repo.teardown(done)) + after((done) => ipfs.stop(done)) it('exports', (done) => { ipfs.key.export('self', passwordPem, (err, pem) => { From e3013ee53f46e7397e23e75528ed723ea278a8c8 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Fri, 25 May 2018 11:53:37 -0400 Subject: [PATCH 16/21] chore: update big.js version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 84d66557a2..e426b70d7e 100644 --- a/package.json +++ b/package.json @@ -88,7 +88,7 @@ }, "dependencies": { "async": "^2.6.0", - "big.js": "^5.0.3", + "big.js": "^5.1.2", "binary-querystring": "~0.1.2", "bl": "^1.2.2", "boom": "^7.2.0", From 6c29a3d828ecd69240fedd63cad5fed32f452b6b Mon Sep 17 00:00:00 2001 From: jonkrone Date: Tue, 29 May 2018 12:04:05 -0500 Subject: [PATCH 17/21] revert: do not pin content added with a non-default hash algorithm --- src/cli/commands/pin/ls.js | 2 +- src/core/components/dag.js | 3 +++ src/core/components/files.js | 2 +- src/core/components/pin.js | 11 ++++------- test/cli/commands.js | 2 +- test/cli/files.js | 1 + 6 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/cli/commands/pin/ls.js b/src/cli/commands/pin/ls.js index e1a65cb746..a59958942d 100644 --- a/src/cli/commands/pin/ls.js +++ b/src/cli/commands/pin/ls.js @@ -29,7 +29,7 @@ module.exports = { const type = argv.type const quiet = argv.quiet - argv.ipfs.pin.ls(paths, { type: type }, (err, results) => { + argv.ipfs.pin.ls(paths, { type }, (err, results) => { if (err) { throw err } results.forEach((res) => { let line = res.hash diff --git a/src/core/components/dag.js b/src/core/components/dag.js index ec0966dd10..c933c3505e 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -83,14 +83,17 @@ module.exports = function dag (self) { _getRecursive: promisify((multihash, callback) => { // gets flat array of all DAGNodes in tree given by multihash callback = once(callback) + self.dag.get(new CID(multihash), (err, res) => { if (err) { return callback(err) } const links = res.value.links const nodes = [res.value] + // leaf case if (!links.length) { return callback(null, nodes) } + // branch case links.forEach(link => { self.dag._getRecursive(link.multihash, (err, subNodes) => { diff --git a/src/core/components/files.js b/src/core/components/files.js index 9d5d44ca82..f69868bd77 100644 --- a/src/core/components/files.js +++ b/src/core/components/files.js @@ -94,7 +94,7 @@ function pinFile (self, opts, file, cb) { // of a direct add. const pin = 'pin' in opts ? opts.pin : true const isRootDir = !file.path.includes('/') - const shouldPin = pin && isRootDir && !opts.onlyHash + const shouldPin = pin && isRootDir && !opts.onlyHash && !opts.hashAlg if (shouldPin) { return self.pin.add(file.hash, err => cb(err, file)) } else { diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 8fb8325d54..bb31a73772 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -19,11 +19,7 @@ const once = require('once') // arbitrary limit to number of concurrent dag operations const concurrencyLimit = 300 -function toB58String (hash, tst) { - if (Buffer.isBuffer(hash)) { - hash = multihashes.toB58String(hash) - } - +function toB58String (hash) { return new CID(hash).toBaseEncodedString() } @@ -64,6 +60,7 @@ module.exports = function pin (self) { resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } + // verify that each hash can be pinned parallel(mhs.map(multihash => cb => { const key = toB58String(multihash) @@ -425,8 +422,8 @@ module.exports = function pin (self) { return callback(err) } if (dKeys) { - directPins = new Set(dKeys.map(mh => toB58String(mh))) - recursivePins = new Set(handle.rKeys.map(mh => toB58String(mh))) + directPins = new Set(dKeys.map(toB58String)) + recursivePins = new Set(handle.rKeys.map(toB58String)) logInternalKey(handle.root.multihash) internalPins = newInternalPins } diff --git a/test/cli/commands.js b/test/cli/commands.js index 37ecc0ae74..7a8502bc4c 100644 --- a/test/cli/commands.js +++ b/test/cli/commands.js @@ -4,7 +4,7 @@ const expect = require('chai').expect const runOnAndOff = require('../utils/on-and-off') -const commandCount = 77 +const commandCount = 78 describe('commands', () => runOnAndOff((thing) => { let ipfs diff --git a/test/cli/files.js b/test/cli/files.js index 123fd62c03..dd7911307a 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -316,6 +316,7 @@ describe('files', () => runOnAndOff((thing) => { }) it('add pins by default', function () { + this.timeout(10 * 1000) const filePath = path.join(os.tmpdir(), hat()) const content = String(Math.random()) fs.writeFileSync(filePath, content) From a1b9b93c8eb9f0b4820dc6b19358668501110eb9 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Thu, 31 May 2018 13:24:04 -0500 Subject: [PATCH 18/21] revert: internalKey recording --- src/core/components/pin-set.js | 20 +++++++------------- src/core/components/pin.js | 34 +++++----------------------------- test/core/pin-set.js | 23 ++++++++++++----------- 3 files changed, 24 insertions(+), 53 deletions(-) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index 73db4682e3..ffe9e7e2c6 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -95,26 +95,23 @@ exports = module.exports = function (dag) { } }, - storeSet: (keys, logInternalKey, callback) => { + storeSet: (keys, callback) => { const pins = keys.map(key => ({ key: key, data: null })) - pinSet.storeItems(pins, logInternalKey, (err, rootNode) => { + pinSet.storeItems(pins, (err, rootNode) => { if (err) { return callback(err) } const opts = { cid: new CID(rootNode.multihash) } dag.put(rootNode, opts, (err, cid) => { if (err) { return callback(err) } - logInternalKey(rootNode.multihash) callback(null, rootNode) }) }) }, - storeItems: (items, logInternalKey, callback) => { - logInternalKey(emptyKey) - + storeItems: (items, callback) => { return storePins(items, callback) function storePins (pins, cb, depth = 0, binsToFill = 0, binsFilled = 0) { @@ -186,7 +183,6 @@ exports = module.exports = function (dag) { dag.put(child, { cid }, (err) => { if (err) { return cb(err) } - logInternalKey(child.multihash) fanoutLinks[bin] = new DAGLink('', child.size, child.multihash) binsFilled++ @@ -202,26 +198,25 @@ exports = module.exports = function (dag) { } }, - loadSet: (rootNode, name, logInternalKey, callback) => { + loadSet: (rootNode, name, callback) => { callback = once(callback) const link = rootNode.links.find(l => l.name === name) if (!link) { return callback(new Error('No link found with name ' + name)) } - logInternalKey(link.multihash) dag.get(link.multihash, (err, res) => { if (err) { return callback(err) } const keys = [] const step = link => keys.push(link.multihash) - pinSet.walkItems(res.value, step, logInternalKey, err => { + pinSet.walkItems(res.value, step, err => { if (err) { return callback(err) } return callback(null, keys) }) }) }, - walkItems: (node, step, logInternalKey, callback) => { + walkItems: (node, step, callback) => { callback = once(callback) let pbh try { @@ -237,7 +232,6 @@ exports = module.exports = function (dag) { // the first pbh.header.fanout links are fanout bins // if a link is not 'empty', dig into and walk its DAGLinks const linkHash = link.multihash - logInternalKey(linkHash) if (!emptyKey.equals(linkHash)) { subwalkCount++ @@ -245,7 +239,7 @@ exports = module.exports = function (dag) { // walk the links of this fanout bin dag.get(linkHash, (err, res) => { if (err) { return callback(err) } - pinSet.walkItems(res.value, step, logInternalKey, walkCb) + pinSet.walkItems(res.value, step, walkCb) }) } } else { diff --git a/src/core/components/pin.js b/src/core/components/pin.js index bb31a73772..7edbbe7872 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -26,7 +26,6 @@ function toB58String (hash) { module.exports = function pin (self) { let directPins = new Set() let recursivePins = new Set() - let internalPins = new Set() const pinDataStoreKey = new Key('/local/pins') @@ -38,14 +37,12 @@ module.exports = function pin (self) { direct: 'direct', recursive: 'recursive', indirect: 'indirect', - internal: 'internal', all: 'all' }, clear: () => { directPins.clear() recursivePins.clear() - internalPins.clear() }, set: pinSet(dag), @@ -270,7 +267,7 @@ module.exports = function pin (self) { isPinnedWithType: promisify((multihash, type, callback) => { const key = toB58String(multihash) - const { recursive, direct, internal, all } = pin.types + const { recursive, direct, all } = pin.types // recursive if ((type === recursive || type === all) && recursivePins.has(key)) { return callback(null, {pinned: true, reason: recursive}) @@ -285,13 +282,6 @@ module.exports = function pin (self) { if ((type === direct)) { return callback(null, {pinned: false}) } - // internal - if ((type === internal || type === all) && internalPins.has(key)) { - return callback(null, {pinned: true, reason: internal}) - } - if ((type === internal)) { - return callback(null, {pinned: false}) - } // indirect (default) // check each recursive key to see if multihash is under it @@ -318,14 +308,10 @@ module.exports = function pin (self) { recursiveKeyStrings: () => Array.from(recursivePins), - internalKeyStrings: () => Array.from(internalPins), - directKeys: () => pin.directKeyStrings().map(key => multihashes.fromB58String(key)), recursiveKeys: () => pin.recursiveKeyStrings().map(key => multihashes.fromB58String(key)), - internalKeys: () => pin.internalKeyStrings().map(key => multihashes.fromB58String(key)), - getIndirectKeys: promisify(callback => { const indirectKeys = new Set() const rKeys = pin.recursiveKeys() @@ -352,8 +338,6 @@ module.exports = function pin (self) { // encodes and writes pin key sets to the datastore // each key set will be stored as a DAG node, and a root node will link to both flush: promisify((callback) => { - const newInternalPins = new Set() - const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) const handle = { put: (k, v, cb) => { handle[k] = v @@ -363,12 +347,12 @@ module.exports = function pin (self) { waterfall([ // create link to direct keys node - (cb) => pin.set.storeSet(pin.directKeys(), logInternalKey, cb), + (cb) => pin.set.storeSet(pin.directKeys(), cb), (dRoot, cb) => DAGLink.create(pin.types.direct, dRoot.size, dRoot.multihash, cb), (dLink, cb) => handle.put('dLink', dLink, cb), // create link to recursive keys node - (cb) => pin.set.storeSet(pin.recursiveKeys(), logInternalKey, cb), + (cb) => pin.set.storeSet(pin.recursiveKeys(), cb), (rRoot, cb) => DAGLink.create(pin.types.recursive, rRoot.size, rRoot.multihash, cb), (rLink, cb) => handle.put('rLink', rLink, cb), @@ -383,23 +367,17 @@ module.exports = function pin (self) { // add the root node to dag (cb) => dag.put(handle.root, {cid: new CID(handle.root.multihash)}, cb), - // update the internal pin set - (cid, cb) => cb(null, logInternalKey(handle.root.multihash)), - // save serialized root to datastore under a consistent key (_, cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) ], (err, result) => { if (err) { return callback(err) } self.log(`Flushed pins with root: ${handle.root}.`) - internalPins = newInternalPins return callback(null, handle.root) }) }), load: promisify(callback => { - const newInternalPins = new Set() - const logInternalKey = (mh) => newInternalPins.add(toB58String(mh)) const handle = { put: (k, v, cb) => { handle[k] = v @@ -414,9 +392,9 @@ module.exports = function pin (self) { (cb) => repo.datastore.get(pinDataStoreKey, cb), (mh, cb) => dag.get(new CID(mh), cb), (root, cb) => handle.put('root', root.value, cb), - (cb) => pin.set.loadSet(handle.root, pin.types.recursive, logInternalKey, cb), + (cb) => pin.set.loadSet(handle.root, pin.types.recursive, cb), (rKeys, cb) => handle.put('rKeys', rKeys, cb), - (cb) => pin.set.loadSet(handle.root, pin.types.direct, logInternalKey, cb) + (cb) => pin.set.loadSet(handle.root, pin.types.direct, cb) ], (err, dKeys) => { if (err && err.message !== 'No pins to load') { return callback(err) @@ -424,8 +402,6 @@ module.exports = function pin (self) { if (dKeys) { directPins = new Set(dKeys.map(toB58String)) recursivePins = new Set(handle.rKeys.map(toB58String)) - logInternalKey(handle.root.multihash) - internalPins = newInternalPins } self.log('Loaded pins from the datastore') return callback() diff --git a/test/core/pin-set.js b/test/core/pin-set.js index 70a92c917d..c4ed110131 100644 --- a/test/core/pin-set.js +++ b/test/core/pin-set.js @@ -19,8 +19,6 @@ const createTempRepo = require('../utils/create-repo-nodejs') const defaultFanout = 256 const maxItems = 8192 -function noop () {} - /** * Creates @param num DAGNodes, limited to 500 at a time to save memory * @param {[type]} num the number of nodes to create @@ -63,7 +61,10 @@ describe('pinset', function () { }) }) - after(done => ipfs.stop(done)) + after(function (done) { + this.timeout(10 * 1000) + ipfs.stop(done) + }) describe('storeItems', function () { it('generates a root node with links and hash', function (done) { @@ -72,7 +73,7 @@ describe('pinset', function () { createNode('data', (err, node) => { expect(err).to.not.exist() const nodeHash = node._multihash - pinset.storeSet([nodeHash], noop, (err, rootNode) => { + pinset.storeSet([nodeHash], (err, rootNode) => { expect(err).to.not.exist() const node = rootNode.toJSON() expect(node.multihash).to.eql(expectedRootHash) @@ -94,7 +95,7 @@ describe('pinset', function () { const count = maxItems + 1 createNodes(count, (err, nodes) => { expect(err).to.not.exist() - pinset.storeSet(nodes, noop, (err, node) => { + pinset.storeSet(nodes, (err, node) => { expect(err).to.not.exist() node = node.toJSON() @@ -102,7 +103,7 @@ describe('pinset', function () { expect(node.links).to.have.length(defaultFanout) expect(node.multihash).to.eql(expectedHash) - pinset.loadSet(node, '', noop, (err, loaded) => { + pinset.loadSet(node, '', (err, loaded) => { expect(err).to.not.exist() expect(loaded).to.have.length(30) const hashes = loaded.map(l => new CID(l).toBaseEncodedString()) @@ -133,11 +134,11 @@ describe('pinset', function () { createNodes(limit, (err, nodes) => { expect(err).to.not.exist() series([ - cb => pinset.storeSet(nodes.slice(0, -1), noop, (err, res) => { + cb => pinset.storeSet(nodes.slice(0, -1), (err, res) => { expect(err).to.not.exist() cb(null, res) }), - cb => pinset.storeSet(nodes, noop, (err, res) => { + cb => pinset.storeSet(nodes, (err, res) => { expect(err).to.not.exist() cb(null, res) }) @@ -155,7 +156,7 @@ describe('pinset', function () { createNode('datum', (err, node) => { expect(err).to.not.exist() - pinset.walkItems(node, noop, noop, (err, res) => { + pinset.walkItems(node, () => {}, (err, res) => { expect(err).to.exist() expect(res).to.not.exist() done() @@ -170,10 +171,10 @@ describe('pinset', function () { createNodes(defaultFanout, (err, nodes) => { expect(err).to.not.exist() - pinset.storeSet(nodes, noop, (err, node) => { + pinset.storeSet(nodes, (err, node) => { expect(err).to.not.exist() - pinset.walkItems(node, walker, noop, err => { + pinset.walkItems(node, walker, err => { expect(err).to.not.exist() expect(seen).to.have.length(defaultFanout) expect(seen[0].idx).to.eql(defaultFanout) From 10f40ca9a6237c4d4f5e2890d10affa634399709 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Fri, 8 Jun 2018 16:32:31 -0500 Subject: [PATCH 19/21] refactor: use lodash.flattenDeep refactor: pinset.hasChild -> pinset.hasDescendent fix: invoke someCb if we've seen the hash before refactor: async patterns in dag._getRecursive refactor: pinset.hasDescendant refactor: pinset.storeItems async patterns refactor: pinset.loadSet and pin.walkItem async patterns docs: add link to go-ipfs' fanout bin implementation refactor: async patterns of pin.load/flush refactor: lint refactor: privatize internal pin key storage refactor: change encapsulation of ipfs.pin, fix resulting issues fix: lint fix: 'files add --pin=false' test was giving a false positive refactor: use is-ipfs to check CID-ability of a string refactor: remove last instance of 'once' in the pin code --- package.json | 3 +- src/core/boot.js | 2 +- src/core/components/dag.js | 33 ++-- src/core/components/pin-set.js | 115 ++++-------- src/core/components/pin.js | 311 ++++++++++++++++----------------- src/core/utils.js | 17 +- test/cli/files.js | 19 +- test/core/pin-set.js | 27 +-- test/core/pin.js | 104 ++++++----- test/core/utils.js | 21 ++- 10 files changed, 296 insertions(+), 356 deletions(-) diff --git a/package.json b/package.json index e426b70d7e..737988ff32 100644 --- a/package.json +++ b/package.json @@ -106,8 +106,8 @@ "hapi-set-header": "^1.0.2", "hoek": "^5.0.3", "human-to-milliseconds": "^1.0.0", - "ipfs-api": "^22.0.0", "interface-datastore": "^0.4.1", + "ipfs-api": "^22.0.0", "ipfs-bitswap": "~0.20.0", "ipfs-block": "~0.7.1", "ipfs-block-service": "~0.14.0", @@ -139,6 +139,7 @@ "libp2p-websocket-star": "~0.8.0", "libp2p-websockets": "~0.12.0", "lodash.flatmap": "^4.5.0", + "lodash.flattendeep": "^4.4.0", "lodash.get": "^4.4.2", "lodash.set": "^4.3.2", "lodash.sortby": "^4.7.0", diff --git a/src/core/boot.js b/src/core/boot.js index 2d40ff5f7c..a8cb79d179 100644 --- a/src/core/boot.js +++ b/src/core/boot.js @@ -29,7 +29,7 @@ module.exports = (self) => { series([ (cb) => self._repo.open(cb), - (cb) => self.pin.load(cb), + (cb) => self.pin._load(cb), (cb) => self.preStart(cb), (cb) => { self.log('initialized') diff --git a/src/core/components/dag.js b/src/core/components/dag.js index c933c3505e..2e8685305c 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -3,8 +3,8 @@ const promisify = require('promisify-es6') const CID = require('cids') const pull = require('pull-stream') -const _ = require('lodash') -const once = require('once') +const mapAsync = require('async/map') +const flattenDeep = require('lodash.flattendeep') module.exports = function dag (self) { return { @@ -36,7 +36,11 @@ module.exports = function dag (self) { path = '/' } } else if (Buffer.isBuffer(cid)) { - cid = new CID(cid) + try { + cid = new CID(cid) + } catch (err) { + callback(err) + } } self._ipld.get(cid, path, options, callback) @@ -82,27 +86,16 @@ module.exports = function dag (self) { // TODO - use IPLD selectors once they are implemented _getRecursive: promisify((multihash, callback) => { // gets flat array of all DAGNodes in tree given by multihash - callback = once(callback) self.dag.get(new CID(multihash), (err, res) => { if (err) { return callback(err) } - const links = res.value.links - const nodes = [res.value] - - // leaf case - if (!links.length) { - return callback(null, nodes) - } - // branch case - links.forEach(link => { - self.dag._getRecursive(link.multihash, (err, subNodes) => { - if (err) { return callback(err) } - nodes.push(subNodes) - if (nodes.length === links.length + 1) { - return callback(null, _.flattenDeep(nodes)) - } - }) + mapAsync(res.value.links, (link, cb) => { + self.dag._getRecursive(link.multihash, cb) + }, (err, nodes) => { + // console.log('nodes:', nodes) + if (err) return callback(err) + callback(null, flattenDeep([res.value, nodes])) }) }) }) diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index ffe9e7e2c6..541b3eed49 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -4,12 +4,9 @@ const multihashes = require('multihashes') const CID = require('cids') const protobuf = require('protons') const fnv1a = require('fnv1a') -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink const varint = require('varint') -const once = require('once') -const some = require('async/some') +const { DAGNode, DAGLink } = require('ipld-dag-pb') +const async = require('async') const pbSchema = require('./pin.proto') @@ -61,35 +58,25 @@ function hash (seed, key) { exports = module.exports = function (dag) { const pinSet = { // should this be part of `object` API? - hasChild: (root, childhash, callback) => { + hasDescendant: (root, childhash, callback) => { const seen = {} if (CID.isCID(childhash) || Buffer.isBuffer(childhash)) { childhash = toB58String(childhash) } - return searchChildren(root, callback, root.links.length, 0) + return searchChildren(root, callback) - function searchChildren (root, cb, numToCheck, numChecked) { - if (!root.links.length && numToCheck === numChecked) { - // all nodes have been checked - return cb(null, false) - } - - some(root.links, ({ multihash }, someCb) => { + function searchChildren (root, cb) { + async.some(root.links, ({ multihash }, someCb) => { const bs58Link = toB58String(multihash) - if (bs58Link in seen) return - if (bs58Link === childhash) { - return someCb(null, true) - } + if (bs58Link === childhash) { return someCb(null, true) } + if (bs58Link in seen) { return someCb(null, false) } seen[bs58Link] = true dag.get(multihash, (err, { value }) => { if (err) { return someCb(err) } - - numChecked++ - numToCheck += value.links.length - searchChildren(value, someCb, numToCheck, numChecked) + searchChildren(value, someCb) }) }, cb) } @@ -112,10 +99,9 @@ exports = module.exports = function (dag) { }, storeItems: (items, callback) => { - return storePins(items, callback) + return storePins(items, 0, callback) - function storePins (pins, cb, depth = 0, binsToFill = 0, binsFilled = 0) { - cb = once(cb) + function storePins (pins, depth, storePinsCb) { const pbHeader = pb.Set.encode({ version: 1, fanout: defaultFanout, @@ -144,18 +130,19 @@ exports = module.exports = function (dag) { ) DAGNode.create(rootData, rootLinks, (err, rootNode) => { - if (err) { return cb(err) } - return cb(null, rootNode) + if (err) { return storePinsCb(err) } + return storePinsCb(null, rootNode) }) } else { // If the array of pins is > maxItems, we: // - distribute the pins among `defaultFanout` bins // - create a DAGNode for each bin - // - add each pin of that bin as a DAGLink + // - add each pin as a DAGLink to that bin // - create a root DAGNode - // - store each bin as a DAGLink - // - send that root DAGNode via `cb` + // - add each bin as a DAGLink + // - send that root DAGNode via callback // (using go-ipfs' "wasteful but simple" approach for consistency) + // https://github.com/ipfs/go-ipfs/blob/master/pin/set.go#L57 const bins = pins.reduce((bins, pin) => { const n = hash(depth, pin.key) % defaultFanout @@ -163,43 +150,34 @@ exports = module.exports = function (dag) { return bins }, {}) - const binKeys = Object.keys(bins) - binsToFill += binKeys.length - binKeys.forEach(n => { + async.eachOf(bins, (bin, idx, eachCb) => { storePins( - bins[n], - (err, child) => storeChild(err, child, n), + bin, depth + 1, - binsToFill, - binsFilled + (err, child) => storeChild(err, child, idx, eachCb) ) + }, err => { + if (err) { return storePinsCb(err) } + DAGNode.create(headerBuf, fanoutLinks, (err, rootNode) => { + if (err) { return storePinsCb(err) } + return storePinsCb(null, rootNode) + }) }) } - function storeChild (err, child, bin) { + function storeChild (err, child, binIdx, cb) { if (err) { return cb(err) } - const cid = new CID(child._multihash) - dag.put(child, { cid }, (err) => { + dag.put(child, { cid: new CID(child._multihash) }, err => { if (err) { return cb(err) } - - fanoutLinks[bin] = new DAGLink('', child.size, child.multihash) - binsFilled++ - - if (binsFilled === binsToFill) { - // all finished - DAGNode.create(headerBuf, fanoutLinks, (err, rootNode) => { - if (err) { return cb(err) } - return cb(null, rootNode) - }) - } + fanoutLinks[binIdx] = new DAGLink('', child.size, child.multihash) + cb(null) }) } } }, loadSet: (rootNode, name, callback) => { - callback = once(callback) const link = rootNode.links.find(l => l.name === name) if (!link) { return callback(new Error('No link found with name ' + name)) @@ -217,50 +195,33 @@ exports = module.exports = function (dag) { }, walkItems: (node, step, callback) => { - callback = once(callback) let pbh try { pbh = readHeader(node) } catch (err) { return callback(err) } - let subwalkCount = 0 - let finishedCount = 0 - node.links.forEach((link, idx) => { + async.eachOf(node.links, (link, idx, eachCb) => { if (idx < pbh.header.fanout) { // the first pbh.header.fanout links are fanout bins - // if a link is not 'empty', dig into and walk its DAGLinks + // if a fanout bin is not 'empty', dig into and walk its DAGLinks const linkHash = link.multihash if (!emptyKey.equals(linkHash)) { - subwalkCount++ - // walk the links of this fanout bin - dag.get(linkHash, (err, res) => { - if (err) { return callback(err) } - pinSet.walkItems(res.value, step, walkCb) + return dag.get(linkHash, (err, res) => { + if (err) { return eachCb(err) } + pinSet.walkItems(res.value, step, eachCb) }) } } else { // otherwise, the link is a pin - return step(link, idx, pbh.data) + step(link, idx, pbh.data) } - }) - - if (!subwalkCount) { - // reached end of pins and found no non-empty fanout bins - return callback() - } - function walkCb (err) { - if (err) { return callback(err) } - finishedCount++ - if (subwalkCount === finishedCount) { - // done walking - return callback() - } - } + eachCb(null) + }, callback) } } return pinSet diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 7edbbe7872..3c9fd9fef9 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -1,36 +1,101 @@ +/* eslint max-nested-callbacks: ["error", 8] */ 'use strict' -const dagPB = require('ipld-dag-pb') -const DAGNode = dagPB.DAGNode -const DAGLink = dagPB.DAGLink -const CID = require('cids') -const pinSet = require('./pin-set') -const resolvePath = require('../utils').resolvePath const promisify = require('promisify-es6') +const { DAGNode, DAGLink } = require('ipld-dag-pb') +const CID = require('cids') const multihashes = require('multihashes') -const Key = require('interface-datastore').Key -const eachLimit = require('async/eachLimit') -const series = require('async/series') -const waterfall = require('async/waterfall') -const parallel = require('async/parallel') -const someLimit = require('async/someLimit') -const once = require('once') - -// arbitrary limit to number of concurrent dag operations +const async = require('async') +const { Key } = require('interface-datastore') + +const createPinSet = require('./pin-set') +const { resolvePath } = require('../utils') + +// arbitrary limit to the number of concurrent dag operations const concurrencyLimit = 300 +const pinDataStoreKey = new Key('/local/pins') function toB58String (hash) { return new CID(hash).toBaseEncodedString() } module.exports = function pin (self) { + const repo = self._repo + const dag = self.dag + const pinset = createPinSet(dag) + let directPins = new Set() let recursivePins = new Set() - const pinDataStoreKey = new Key('/local/pins') + const directKeys = () => + Array.from(directPins).map(key => multihashes.fromB58String(key)) + const recursiveKeys = () => + Array.from(recursivePins).map(key => multihashes.fromB58String(key)) - const repo = self._repo - const dag = self.dag + function getIndirectKeys (callback) { + const indirectKeys = new Set() + async.eachLimit(recursiveKeys(), concurrencyLimit, (multihash, cb) => { + dag._getRecursive(multihash, (err, nodes) => { + if (err) { return cb(err) } + + nodes + .map(({ multihash }) => toB58String(multihash)) + // recursive pins pre-empt indirect pins + .filter(key => !recursivePins.has(key)) + .forEach(key => indirectKeys.add(key)) + + cb() + }) + }, (err) => { + if (err) { return callback(err) } + callback(null, Array.from(indirectKeys)) + }) + } + + // Encode and write pin key sets to the datastore: + // a DAGLink for each of the recursive and direct pinsets + // a DAGNode holding those as DAGLinks, a kind of root pin + function flushPins (callback) { + let dLink, rLink, root + async.series([ + // create a DAGLink to the node with direct pins + cb => async.waterfall([ + cb => pinset.storeSet(directKeys(), cb), + (node, cb) => DAGLink.create(pin.types.direct, node.size, node.multihash, cb), + (link, cb) => { dLink = link; cb(null) } + ], cb), + + // create a DAGLink to the node with recursive pins + cb => async.waterfall([ + cb => pinset.storeSet(recursiveKeys(), cb), + (node, cb) => DAGLink.create(pin.types.recursive, node.size, node.multihash, cb), + (link, cb) => { rLink = link; cb(null) } + ], cb), + + // the pin-set nodes link to a special 'empty' node, so make sure it exists + cb => DAGNode.create(Buffer.alloc(0), (err, empty) => { + if (err) { return cb(err) } + dag.put(empty, { cid: new CID(empty.multihash) }, cb) + }), + + // create a root node with DAGLinks to the direct and recursive DAGs + cb => DAGNode.create(Buffer.alloc(0), [dLink, rLink], (err, node) => { + if (err) { return cb(err) } + root = node + dag.put(root, { cid: new CID(root.multihash) }, cb) + }), + + // hack for CLI tests + cb => repo.closed ? repo.datastore.open(cb) : cb(null, null), + + // save root to datastore under a consistent key + cb => repo.datastore.put(pinDataStoreKey, root.multihash, cb) + ], (err, res) => { + if (err) { return callback(err) } + self.log(`Flushed pins with root: ${root}`) + return callback(null, root) + }) + } const pin = { types: { @@ -40,26 +105,18 @@ module.exports = function pin (self) { all: 'all' }, - clear: () => { - directPins.clear() - recursivePins.clear() - }, - - set: pinSet(dag), - add: promisify((paths, options, callback) => { if (typeof options === 'function') { callback = options options = null } - callback = once(callback) const recursive = options ? options.recursive : true resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be pinned - parallel(mhs.map(multihash => cb => { + async.map(mhs, (multihash, cb) => { const key = toB58String(multihash) if (recursive) { if (recursivePins.has(key)) { @@ -91,7 +148,7 @@ module.exports = function pin (self) { return cb(null, key) }) } - }), (err, results) => { + }, (err, results) => { if (err) { return callback(err) } // update the pin sets in memory @@ -99,7 +156,7 @@ module.exports = function pin (self) { results.forEach(key => pinset.add(key)) // persist updated pin sets to datastore - pin.flush((err, root) => { + flushPins((err, root) => { if (err) { return callback(err) } return callback(null, results.map(key => ({hash: key}))) }) @@ -114,14 +171,13 @@ module.exports = function pin (self) { } else if (options && options.recursive === false) { recursive = false } - callback = once(callback) resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } // verify that each hash can be unpinned - series(mhs.map(multihash => cb => { - pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + async.map(mhs, (multihash, cb) => { + pin._isPinnedWithType(multihash, pin.types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res const key = toB58String(multihash) @@ -144,7 +200,7 @@ module.exports = function pin (self) { )) } }) - }), (err, results) => { + }, (err, results) => { if (err) { return callback(err) } // update the pin sets in memory @@ -157,7 +213,7 @@ module.exports = function pin (self) { }) // persist updated pin sets to datastore - pin.flush((err, root) => { + flushPins((err, root) => { if (err) { return callback(err) } self.log(`Removed pins: ${results}`) return callback(null, results.map(key => ({hash: key}))) @@ -183,7 +239,6 @@ module.exports = function pin (self) { if (options && options.type) { type = options.type.toLowerCase() } - callback = once(callback) if (!pin.types[type]) { return callback(new Error( `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` @@ -195,8 +250,8 @@ module.exports = function pin (self) { resolvePath(self.object, paths, (err, mhs) => { if (err) { return callback(err) } - series(mhs.map(multihash => cb => { - pin.isPinnedWithType(multihash, pin.types.all, (err, res) => { + async.mapSeries(mhs, (multihash, cb) => { + pin._isPinnedWithType(multihash, pin.types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res const key = toB58String(multihash) @@ -218,54 +273,50 @@ module.exports = function pin (self) { }) } }) - }), callback) + }, callback) }) } else { // show all pinned items of type - let result = [] + let pins = [] if (type === pin.types.direct || type === pin.types.all) { - pin.directKeyStrings().forEach(hash => { - result.push({ + pins = pins.concat( + Array.from(directPins).map(hash => ({ type: pin.types.direct, - hash: hash - }) - }) + hash + })) + ) } if (type === pin.types.recursive || type === pin.types.all) { - pin.recursiveKeyStrings().forEach(hash => { - result.push({ + pins = pins.concat( + Array.from(recursivePins).map(hash => ({ type: pin.types.recursive, - hash: hash - }) - }) + hash + })) + ) } if (type === pin.types.indirect || type === pin.types.all) { - pin.getIndirectKeys((err, hashes) => { + getIndirectKeys((err, indirects) => { if (err) { return callback(err) } - hashes.forEach(hash => { - if (directPins.has(hash)) { - // if an indirect pin is also pinned directly, - // use only the indirect entry - result = result.filter(pin => pin.hash !== hash) - } - result.push({ + pins = pins + // if something is pinned both directly and indirectly, + // report the indirect entry + .filter(({ hash }) => + !indirects.includes(hash) || + (indirects.includes(hash) && !directPins.has(hash)) + ) + .concat(indirects.map(hash => ({ type: pin.types.indirect, - hash: hash - }) - }) - return callback(null, result) + hash + }))) + return callback(null, pins) }) } else { - return callback(null, result) + return callback(null, pins) } } }), - isPinned: promisify((multihash, callback) => { - pin.isPinnedWithType(multihash, pin.types.all, callback) - }), - - isPinnedWithType: promisify((multihash, type, callback) => { + _isPinnedWithType: promisify((multihash, type, callback) => { const key = toB58String(multihash) const { recursive, direct, all } = pin.types // recursive @@ -287,11 +338,11 @@ module.exports = function pin (self) { // check each recursive key to see if multihash is under it // arbitrary limit, enables handling 1000s of pins. let foundPin - someLimit(pin.recursiveKeys(), concurrencyLimit, (key, cb) => { + async.someLimit(recursiveKeys(), concurrencyLimit, (key, cb) => { dag.get(new CID(key), (err, res) => { if (err) { return cb(err) } - pin.set.hasChild(res.value, multihash, (err, has) => { + pinset.hasDescendant(res.value, multihash, (err, has) => { if (has) { foundPin = toB58String(res.value.multihash) } @@ -304,109 +355,39 @@ module.exports = function pin (self) { }) }), - directKeyStrings: () => Array.from(directPins), - - recursiveKeyStrings: () => Array.from(recursivePins), - - directKeys: () => pin.directKeyStrings().map(key => multihashes.fromB58String(key)), - - recursiveKeys: () => pin.recursiveKeyStrings().map(key => multihashes.fromB58String(key)), - - getIndirectKeys: promisify(callback => { - const indirectKeys = new Set() - const rKeys = pin.recursiveKeys() - eachLimit(rKeys, concurrencyLimit, (multihash, cb) => { - dag._getRecursive(multihash, (err, nodes) => { - if (err) { return cb(err) } - - nodes.forEach(node => { - const key = toB58String(node.multihash) - if (!recursivePins.has(key)) { - // not already pinned recursively - indirectKeys.add(key) - } - }) - - cb() - }) - }, (err) => { - if (err) { return callback(err) } - callback(null, Array.from(indirectKeys)) - }) - }), - - // encodes and writes pin key sets to the datastore - // each key set will be stored as a DAG node, and a root node will link to both - flush: promisify((callback) => { - const handle = { - put: (k, v, cb) => { - handle[k] = v - cb() - } - } - - waterfall([ - // create link to direct keys node - (cb) => pin.set.storeSet(pin.directKeys(), cb), - (dRoot, cb) => DAGLink.create(pin.types.direct, dRoot.size, dRoot.multihash, cb), - (dLink, cb) => handle.put('dLink', dLink, cb), - - // create link to recursive keys node - (cb) => pin.set.storeSet(pin.recursiveKeys(), cb), - (rRoot, cb) => DAGLink.create(pin.types.recursive, rRoot.size, rRoot.multihash, cb), - (rLink, cb) => handle.put('rLink', rLink, cb), - - // the pin-set nodes link to an empty node, so make sure it's added to dag - (cb) => DAGNode.create(Buffer.alloc(0), cb), - (empty, cb) => dag.put(empty, {cid: new CID(empty.multihash)}, cb), - - // create root node with links to direct and recursive nodes - (cid, cb) => DAGNode.create(Buffer.alloc(0), [handle.dLink, handle.rLink], cb), - (root, cb) => handle.put('root', root, cb), - - // add the root node to dag - (cb) => dag.put(handle.root, {cid: new CID(handle.root.multihash)}, cb), - - // save serialized root to datastore under a consistent key - (_, cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests - (_, cb) => repo.datastore.put(pinDataStoreKey, handle.root.multihash, cb) - ], (err, result) => { - if (err) { return callback(err) } - self.log(`Flushed pins with root: ${handle.root}.`) - return callback(null, handle.root) - }) - }), - - load: promisify(callback => { - const handle = { - put: (k, v, cb) => { - handle[k] = v - cb() - } - } - - waterfall([ - (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), // hack for CLI tests + _load: promisify(callback => { + async.waterfall([ + // hack for CLI tests + (cb) => repo.closed ? repo.datastore.open(cb) : cb(null, null), (_, cb) => repo.datastore.has(pinDataStoreKey, cb), (has, cb) => has ? cb() : cb(new Error('No pins to load')), (cb) => repo.datastore.get(pinDataStoreKey, cb), - (mh, cb) => dag.get(new CID(mh), cb), - (root, cb) => handle.put('root', root.value, cb), - (cb) => pin.set.loadSet(handle.root, pin.types.recursive, cb), - (rKeys, cb) => handle.put('rKeys', rKeys, cb), - (cb) => pin.set.loadSet(handle.root, pin.types.direct, cb) - ], (err, dKeys) => { - if (err && err.message !== 'No pins to load') { - return callback(err) + (mh, cb) => dag.get(new CID(mh), cb) + ], (err, pinRoot) => { + if (err) { + if (err.message === 'No pins to load') { + self.log('No pins to load') + return callback() + } else { + return callback(err) + } } - if (dKeys) { + + async.parallel([ + cb => pinset.loadSet(pinRoot.value, pin.types.recursive, cb), + cb => pinset.loadSet(pinRoot.value, pin.types.direct, cb) + ], (err, [rKeys, dKeys]) => { + if (err) { return callback(err) } + directPins = new Set(dKeys.map(toB58String)) - recursivePins = new Set(handle.rKeys.map(toB58String)) - } - self.log('Loaded pins from the datastore') - return callback() + recursivePins = new Set(rKeys.map(toB58String)) + + self.log('Loaded pins from the datastore') + return callback(null) + }) }) }) } + return pin } diff --git a/src/core/utils.js b/src/core/utils.js index 977377a628..b7e1adf7ad 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -4,11 +4,12 @@ const CID = require('cids') const multihashes = require('multihashes') const promisify = require('promisify-es6') const map = require('async/map') +const isIpfs = require('is-ipfs') exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \'ipfs daemon\' first.' /** - * Break an ipfs-path down into it's hash hash and an array of links. + * Break an ipfs-path down into it's hash and an array of links. * * examples: * b58Hash -> { hash: 'b58Hash', links: [] } @@ -21,20 +22,18 @@ exports.OFFLINE_ERROR = 'This command must be run in online mode. Try running \' */ function parseIpfsPath (ipfsPath) { const invalidPathErr = new Error('invalid ipfs ref path') - - const matched = ipfsPath.match(/^(?:\/ipfs\/)?([^/]+(?:\/[^/]+)*)\/?$/) + ipfsPath = ipfsPath.replace(/^\/ipfs\//, '') + const matched = ipfsPath.match(/([^/]+(?:\/[^/]+)*)\/?$/) if (!matched) { throw invalidPathErr } const [hash, ...links] = matched[1].split('/') - try { - if (CID.isCID(new CID(hash))) { - return { hash, links } - } - throw invalidPathErr - } catch (err) { + // check that a CID can be constructed with the hash + if (isIpfs.cid(hash)) { + return { hash, links } + } else { throw invalidPathErr } } diff --git a/test/cli/files.js b/test/cli/files.js index dd7911307a..32fe28fc00 100644 --- a/test/cli/files.js +++ b/test/cli/files.js @@ -11,6 +11,7 @@ const rimraf = require('rimraf').sync const CID = require('cids') const mh = require('multihashes') const runOnAndOff = require('../utils/on-and-off') +const clean = require('../utils/clean') // TODO: Test against all algorithms Object.keys(mh.names) // This subset is known to work with both go-ipfs and js-ipfs as of 2017-09-05 @@ -311,7 +312,7 @@ describe('files', () => runOnAndOff((thing) => { ipfs.fail(`object get ${hash}`), new Promise((resolve, reject) => setTimeout(resolve, 4000)) ]) - .then(() => fs.unlinkSync(filepath)) + .then(() => clean(filepath)) }) }) @@ -327,7 +328,7 @@ describe('files', () => runOnAndOff((thing) => { return ipfs(`pin ls ${hash}`) .then(ls => expect(ls).to.include(hash)) }) - .then(() => fs.unlinkSync(filePath)) + .then(() => clean(filePath)) }) it('add does not pin with --pin=false', function () { @@ -337,18 +338,8 @@ describe('files', () => runOnAndOff((thing) => { fs.writeFileSync(filePath, content) return ipfs(`files add -Q --pin=false ${filePath}`) - .then(out => { - const lsAttempt = ipfs(`pin ls ${out.trim()}`) - .then(ls => { - expect(ls.trim()).to.eql('') - }) - - return Promise.race([ - lsAttempt, - new Promise((resolve, reject) => setTimeout(resolve, 4000)) - ]) - }) - .then(() => fs.unlinkSync(filePath)) + .then(out => ipfs.fail(`pin ls ${out.trim()}`)) + .then(() => clean(filePath)) }) HASH_ALGS.forEach((name) => { diff --git a/test/core/pin-set.js b/test/core/pin-set.js index c4ed110131..243c81d25c 100644 --- a/test/core/pin-set.js +++ b/test/core/pin-set.js @@ -14,6 +14,7 @@ const { DAGNode } = require('ipld-dag-pb') const CID = require('CIDs') const IPFS = require('../../src/core') +const createPinSet = require('../../src/core/components/pin-set') const createTempRepo = require('../utils/create-repo-nodejs') const defaultFanout = 256 @@ -46,9 +47,9 @@ function createNode (data, links = [], callback) { DAGNode.create(data, links, callback) } -describe('pinset', function () { +describe('pinSet', function () { let ipfs - let pinset + let pinSet let repo before(function (done) { @@ -56,7 +57,7 @@ describe('pinset', function () { repo = createTempRepo() ipfs = new IPFS({ repo }) ipfs.on('ready', () => { - pinset = ipfs.pin.set + pinSet = createPinSet(ipfs.dag) done() }) }) @@ -73,7 +74,7 @@ describe('pinset', function () { createNode('data', (err, node) => { expect(err).to.not.exist() const nodeHash = node._multihash - pinset.storeSet([nodeHash], (err, rootNode) => { + pinSet.storeSet([nodeHash], (err, rootNode) => { expect(err).to.not.exist() const node = rootNode.toJSON() expect(node.multihash).to.eql(expectedRootHash) @@ -95,7 +96,7 @@ describe('pinset', function () { const count = maxItems + 1 createNodes(count, (err, nodes) => { expect(err).to.not.exist() - pinset.storeSet(nodes, (err, node) => { + pinSet.storeSet(nodes, (err, node) => { expect(err).to.not.exist() node = node.toJSON() @@ -103,13 +104,13 @@ describe('pinset', function () { expect(node.links).to.have.length(defaultFanout) expect(node.multihash).to.eql(expectedHash) - pinset.loadSet(node, '', (err, loaded) => { + pinSet.loadSet(node, '', (err, loaded) => { expect(err).to.not.exist() expect(loaded).to.have.length(30) const hashes = loaded.map(l => new CID(l).toBaseEncodedString()) // just check the first node, assume all are children if successful - pinset.hasChild(node, hashes[0], (err, has) => { + pinSet.hasDescendant(node, hashes[0], (err, has) => { expect(err).to.not.exist() expect(has).to.eql(true) done() @@ -121,7 +122,7 @@ describe('pinset', function () { // This test is largely taken from go-ipfs/pin/set_test.go // It fails after reaching maximum call stack depth but I don't believe it's - // infinite. We need to reference go's pinset impl to make sure + // infinite. We need to reference go's pinSet impl to make sure // our sharding behaves correctly, or perhaps this test is misguided it.skip('stress test: stores items > (maxItems * defaultFanout) + 1', function (done) { this.timeout(180 * 1000) @@ -134,11 +135,11 @@ describe('pinset', function () { createNodes(limit, (err, nodes) => { expect(err).to.not.exist() series([ - cb => pinset.storeSet(nodes.slice(0, -1), (err, res) => { + cb => pinSet.storeSet(nodes.slice(0, -1), (err, res) => { expect(err).to.not.exist() cb(null, res) }), - cb => pinset.storeSet(nodes, (err, res) => { + cb => pinSet.storeSet(nodes, (err, res) => { expect(err).to.not.exist() cb(null, res) }) @@ -156,7 +157,7 @@ describe('pinset', function () { createNode('datum', (err, node) => { expect(err).to.not.exist() - pinset.walkItems(node, () => {}, (err, res) => { + pinSet.walkItems(node, () => {}, (err, res) => { expect(err).to.exist() expect(res).to.not.exist() done() @@ -171,10 +172,10 @@ describe('pinset', function () { createNodes(defaultFanout, (err, nodes) => { expect(err).to.not.exist() - pinset.storeSet(nodes, (err, node) => { + pinSet.storeSet(nodes, (err, node) => { expect(err).to.not.exist() - pinset.walkItems(node, walker, err => { + pinSet.walkItems(node, walker, err => { expect(err).to.not.exist() expect(seen).to.have.length(defaultFanout) expect(seen[0].idx).to.eql(defaultFanout) diff --git a/test/core/pin.js b/test/core/pin.js index 79b842a642..8d6c48712d 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -44,10 +44,27 @@ describe('pin', function () { type = undefined } - return pin.isPinnedWithType(hash, type || pin.types.all) + return pin._isPinnedWithType(hash, type || pin.types.all) .then(result => expect(result.pinned).to.eql(pinned)) } + function clearPins () { + return pin.ls() + .then(ls => { + const pinsToRemove = ls + .filter(out => out.type === pin.types.recursive) + .map(out => pin.rm(out.hash)) + return Promise.all(pinsToRemove) + }) + .then(() => pin.ls()) + .then(ls => { + const pinsToRemove = ls + .filter(out => out.type === pin.types.direct) + .map(out => pin.rm(out.hash)) + return Promise.all(pinsToRemove) + }) + } + before(function (done) { this.timeout(20 * 1000) repo = createTempRepo() @@ -60,24 +77,21 @@ describe('pin', function () { after(done => ipfs.stop(done)) - /** - getIndirectKeys, - */ - - describe('isPinned', function () { + describe('isPinnedWithType', function () { beforeEach(function () { - pin.clear() + return clearPins() + .then(() => pin.add(pins.root)) }) it('when node is pinned', function () { return pin.add(pins.solarWiki) - .then(() => pin.isPinned(pins.solarWiki)) + .then(() => pin._isPinnedWithType(pins.solarWiki, pin.types.all)) .then(pinned => expect(pinned.pinned).to.eql(true)) }) it('when node is not in datastore', function () { const falseHash = `${pins.root.slice(0, -2)}ss` - return pin.isPinned(falseHash) + return pin._isPinnedWithType(falseHash, pin.types.all) .then(pinned => { expect(pinned.pinned).to.eql(false) expect(pinned.reason).to.eql(undefined) @@ -85,18 +99,12 @@ describe('pin', function () { }) it('when node is in datastore but not pinned', function () { - return expectPinned(pins.root, false) - }) - }) - - describe('isPinnedWithType', function () { - beforeEach(function () { - pin.clear() - return pin.add(pins.root) + return pin.rm(pins.root) + .then(() => expectPinned(pins.root, false)) }) it('when pinned recursively', function () { - return pin.isPinnedWithType(pins.root, pin.types.recursive) + return pin._isPinnedWithType(pins.root, pin.types.recursive) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pin.types.recursive) @@ -104,7 +112,7 @@ describe('pin', function () { }) it('when pinned indirectly', function () { - return pin.isPinnedWithType(pins.mercuryWiki, pin.types.indirect) + return pin._isPinnedWithType(pins.mercuryWiki, pin.types.indirect) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pins.root) @@ -114,7 +122,7 @@ describe('pin', function () { it('when pinned directly', function () { return pin.add(pins.mercuryDir, { recursive: false }) .then(() => { - return pin.isPinnedWithType(pins.mercuryDir, pin.types.direct) + return pin._isPinnedWithType(pins.mercuryDir, pin.types.direct) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pin.types.direct) @@ -123,15 +131,15 @@ describe('pin', function () { }) it('when not pinned', function () { - pin.clear() - return pin.isPinnedWithType(pins.mercuryDir, pin.types.direct) + return clearPins() + .then(() => pin._isPinnedWithType(pins.mercuryDir, pin.types.direct)) .then(pin => expect(pin.pinned).to.eql(false)) }) }) describe('add', function () { beforeEach(function () { - pin.clear() + return clearPins() }) it('recursive', function () { @@ -184,11 +192,11 @@ describe('pin', function () { describe('ls', function () { before(function () { - pin.clear() - return Promise.all([ - pin.add(pins.root), - pin.add(pins.mercuryDir, { recursive: false }) - ]) + return clearPins() + .then(() => Promise.all([ + pin.add(pins.root), + pin.add(pins.mercuryDir, { recursive: false }) + ])) }) it('lists pins of a particular hash', function () { @@ -208,7 +216,7 @@ describe('pin', function () { it('all', function () { return pin.ls() .then(out => - expect(out).to.deep.eql([ + expect(out).to.deep.include.members([ { type: 'recursive', hash: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' }, { type: 'indirect', @@ -224,7 +232,7 @@ describe('pin', function () { it('direct', function () { return pin.ls({ type: 'direct' }) .then(out => - expect(out).to.deep.eql([ + expect(out).to.deep.include.members([ { type: 'direct', hash: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q' } ]) @@ -234,7 +242,7 @@ describe('pin', function () { it('recursive', function () { return pin.ls({ type: 'recursive' }) .then(out => - expect(out).to.deep.eql([ + expect(out).to.deep.include.members([ { type: 'recursive', hash: 'QmTAMavb995EHErSrKo7mB8dYkpaSJxu6ys1a6XJyB2sys' } ]) @@ -244,7 +252,7 @@ describe('pin', function () { it('indirect', function () { return pin.ls({ type: 'indirect' }) .then(out => - expect(out).to.deep.eql([ + expect(out).to.deep.include.members([ { type: 'indirect', hash: 'QmTMbkDfvHwq3Aup6Nxqn3KKw9YnoKzcZvuArAfQ9GF3QG' }, { type: 'indirect', @@ -259,8 +267,8 @@ describe('pin', function () { describe('rm', function () { beforeEach(function () { - pin.clear() - return pin.add(pins.root) + return clearPins() + .then(() => pin.add(pins.root)) }) it('a recursive pin', function () { @@ -274,8 +282,8 @@ describe('pin', function () { }) it('a direct pin', function () { - pin.clear() - return pin.add(pins.mercuryDir, { recursive: false }) + return clearPins() + .then(() => pin.add(pins.mercuryDir, { recursive: false })) .then(() => pin.rm(pins.mercuryDir)) .then(() => expectPinned(pins.mercuryDir, false)) }) @@ -293,21 +301,6 @@ describe('pin', function () { }) }) - describe('load', function () { - before(function () { - return pin.add(pins.root) - }) - - it('loads', function () { - pin.clear() - return pin.ls() - .then(ls => expect(ls.length).to.eql(0)) - .then(() => pin.load()) - .then(() => pin.ls()) - .then(ls => expect(ls.length).to.eql(4)) - }) - }) - describe('flush', function () { beforeEach(function () { return pin.add(pins.root) @@ -317,12 +310,13 @@ describe('pin', function () { return pin.ls() .then(ls => expect(ls.length).to.eql(4)) .then(() => { - pin.clear() - return pin.flush() + // indirectly trigger a datastore flush by adding something + return clearPins() + .then(() => pin.add(pins.mercuryWiki)) }) - .then(() => pin.load()) + .then(() => pin._load()) .then(() => pin.ls()) - .then(ls => expect(ls.length).to.eql(0)) + .then(ls => expect(ls.length).to.eql(1)) }) }) }) diff --git a/test/core/utils.js b/test/core/utils.js index 13cffd264d..8df7f2253d 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -48,12 +48,31 @@ describe('utils', () => { }) }) + it('parses path with leading and trailing slashes', function () { + expect(utils.parseIpfsPath(`/${rootHash}/`)) + .to.deep.equal({ + hash: rootHash, + links: [] + }) + }) + + it('parses non sha2-256 paths', function () { + // There are many, many hashing algorithms. Just one should be a sufficient + // indicator. Used go-ipfs@0.4.13 `add --hash=keccak-512` to generate + const keccak512 = 'zB7S6ZdcqsTqvNhBpx3SbFTocRpAUHj1w9WQXQGyWBVEsLStNfaaNtsdFUQbRk4tYPZvnpGbtDN5gEH4uVzUwsFyJh9Ei' + expect(utils.parseIpfsPath(keccak512)) + .to.deep.equal({ + hash: keccak512, + links: [] + }) + }) + it('returns error for malformed path', function () { const fn = () => utils.parseIpfsPath(`${rootHash}//about`) expect(fn).to.throw('invalid ipfs ref path') }) - it('returns error if root is not a valid multihash', function () { + it('returns error if root is not a valid sha2-256 multihash', function () { const fn = () => utils.parseIpfsPath('invalid/ipfs/path') expect(fn).to.throw('invalid ipfs ref path') }) From a7a55b970c24dd5e1b2a5f38adc70b4138848ea2 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Thu, 14 Jun 2018 13:28:31 -0500 Subject: [PATCH 20/21] refactor: do not expose pinTypes They're simple enough, documented elsewhere, and not used by any exposed functionality. --- src/core/components/pin.js | 53 +++++++++++++++++------------------ src/core/utils.js | 1 - src/http/api/resources/pin.js | 3 +- test/core/pin.js | 34 +++++++++++++--------- test/core/utils.js | 8 +++--- 5 files changed, 51 insertions(+), 48 deletions(-) diff --git a/src/core/components/pin.js b/src/core/components/pin.js index 3c9fd9fef9..f276a87395 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -23,6 +23,12 @@ module.exports = function pin (self) { const repo = self._repo const dag = self.dag const pinset = createPinSet(dag) + const types = { + direct: 'direct', + recursive: 'recursive', + indirect: 'indirect', + all: 'all' + } let directPins = new Set() let recursivePins = new Set() @@ -61,14 +67,14 @@ module.exports = function pin (self) { // create a DAGLink to the node with direct pins cb => async.waterfall([ cb => pinset.storeSet(directKeys(), cb), - (node, cb) => DAGLink.create(pin.types.direct, node.size, node.multihash, cb), + (node, cb) => DAGLink.create(types.direct, node.size, node.multihash, cb), (link, cb) => { dLink = link; cb(null) } ], cb), // create a DAGLink to the node with recursive pins cb => async.waterfall([ cb => pinset.storeSet(recursiveKeys(), cb), - (node, cb) => DAGLink.create(pin.types.recursive, node.size, node.multihash, cb), + (node, cb) => DAGLink.create(types.recursive, node.size, node.multihash, cb), (link, cb) => { rLink = link; cb(null) } ], cb), @@ -98,13 +104,6 @@ module.exports = function pin (self) { } const pin = { - types: { - direct: 'direct', - recursive: 'recursive', - indirect: 'indirect', - all: 'all' - }, - add: promisify((paths, options, callback) => { if (typeof options === 'function') { callback = options @@ -177,7 +176,7 @@ module.exports = function pin (self) { // verify that each hash can be unpinned async.map(mhs, (multihash, cb) => { - pin._isPinnedWithType(multihash, pin.types.all, (err, res) => { + pin._isPinnedWithType(multihash, types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res const key = toB58String(multihash) @@ -186,13 +185,13 @@ module.exports = function pin (self) { } switch (reason) { - case (pin.types.recursive): + case (types.recursive): if (recursive) { return cb(null, key) } else { return cb(new Error(`${key} is pinned recursively`)) } - case (pin.types.direct): + case (types.direct): return cb(null, key) default: return cb(new Error( @@ -223,7 +222,7 @@ module.exports = function pin (self) { }), ls: promisify((paths, options, callback) => { - let type = pin.types.all + let type = types.all if (typeof paths === 'function') { callback = paths options = null @@ -239,7 +238,7 @@ module.exports = function pin (self) { if (options && options.type) { type = options.type.toLowerCase() } - if (!pin.types[type]) { + if (!types[type]) { return callback(new Error( `Invalid type '${type}', must be one of {direct, indirect, recursive, all}` )) @@ -251,7 +250,7 @@ module.exports = function pin (self) { if (err) { return callback(err) } async.mapSeries(mhs, (multihash, cb) => { - pin._isPinnedWithType(multihash, pin.types.all, (err, res) => { + pin._isPinnedWithType(multihash, types.all, (err, res) => { if (err) { return cb(err) } const { pinned, reason } = res const key = toB58String(multihash) @@ -260,8 +259,8 @@ module.exports = function pin (self) { } switch (reason) { - case pin.types.direct: - case pin.types.recursive: + case types.direct: + case types.recursive: return cb(null, { hash: key, type: reason @@ -269,7 +268,7 @@ module.exports = function pin (self) { default: return cb(null, { hash: key, - type: `${pin.types.indirect} through ${reason}` + type: `${types.indirect} through ${reason}` }) } }) @@ -278,23 +277,23 @@ module.exports = function pin (self) { } else { // show all pinned items of type let pins = [] - if (type === pin.types.direct || type === pin.types.all) { + if (type === types.direct || type === types.all) { pins = pins.concat( Array.from(directPins).map(hash => ({ - type: pin.types.direct, + type: types.direct, hash })) ) } - if (type === pin.types.recursive || type === pin.types.all) { + if (type === types.recursive || type === types.all) { pins = pins.concat( Array.from(recursivePins).map(hash => ({ - type: pin.types.recursive, + type: types.recursive, hash })) ) } - if (type === pin.types.indirect || type === pin.types.all) { + if (type === types.indirect || type === types.all) { getIndirectKeys((err, indirects) => { if (err) { return callback(err) } pins = pins @@ -305,7 +304,7 @@ module.exports = function pin (self) { (indirects.includes(hash) && !directPins.has(hash)) ) .concat(indirects.map(hash => ({ - type: pin.types.indirect, + type: types.indirect, hash }))) return callback(null, pins) @@ -318,7 +317,7 @@ module.exports = function pin (self) { _isPinnedWithType: promisify((multihash, type, callback) => { const key = toB58String(multihash) - const { recursive, direct, all } = pin.types + const { recursive, direct, all } = types // recursive if ((type === recursive || type === all) && recursivePins.has(key)) { return callback(null, {pinned: true, reason: recursive}) @@ -374,8 +373,8 @@ module.exports = function pin (self) { } async.parallel([ - cb => pinset.loadSet(pinRoot.value, pin.types.recursive, cb), - cb => pinset.loadSet(pinRoot.value, pin.types.direct, cb) + cb => pinset.loadSet(pinRoot.value, types.recursive, cb), + cb => pinset.loadSet(pinRoot.value, types.direct, cb) ], (err, [rKeys, dKeys]) => { if (err) { return callback(err) } diff --git a/src/core/utils.js b/src/core/utils.js index b7e1adf7ad..a0d67e449a 100644 --- a/src/core/utils.js +++ b/src/core/utils.js @@ -1,6 +1,5 @@ 'use strict' -const CID = require('cids') const multihashes = require('multihashes') const promisify = require('promisify-es6') const map = require('async/map') diff --git a/src/http/api/resources/pin.js b/src/http/api/resources/pin.js index 73c97ac2c3..e42dd1f8c4 100644 --- a/src/http/api/resources/pin.js +++ b/src/http/api/resources/pin.js @@ -25,8 +25,7 @@ function parseArgs (request, reply) { exports.ls = { parseArgs: (request, reply) => { - const ipfs = request.server.app.ipfs - const type = request.query.type || ipfs.pin.types.all + const type = request.query.type || 'all' return reply({ path: request.query.arg, diff --git a/test/core/pin.js b/test/core/pin.js index 8d6c48712d..32618422a0 100644 --- a/test/core/pin.js +++ b/test/core/pin.js @@ -24,6 +24,12 @@ const pins = { mercuryDir: 'QmbJCNKXJqVK8CzbjpNFz2YekHwh3CSHpBA86uqYg3sJ8q', mercuryWiki: 'QmVgSHAdMxFAuMP2JiMAYkB8pCWP1tcB9djqvq8GKAFiHi' } +const pinTypes = { + direct: 'direct', + recursive: 'recursive', + indirect: 'indirect', + all: 'all' +} describe('pin', function () { const fixtures = [ @@ -44,7 +50,7 @@ describe('pin', function () { type = undefined } - return pin._isPinnedWithType(hash, type || pin.types.all) + return pin._isPinnedWithType(hash, type || pinTypes.all) .then(result => expect(result.pinned).to.eql(pinned)) } @@ -52,14 +58,14 @@ describe('pin', function () { return pin.ls() .then(ls => { const pinsToRemove = ls - .filter(out => out.type === pin.types.recursive) + .filter(out => out.type === pinTypes.recursive) .map(out => pin.rm(out.hash)) return Promise.all(pinsToRemove) }) .then(() => pin.ls()) .then(ls => { const pinsToRemove = ls - .filter(out => out.type === pin.types.direct) + .filter(out => out.type === pinTypes.direct) .map(out => pin.rm(out.hash)) return Promise.all(pinsToRemove) }) @@ -85,13 +91,13 @@ describe('pin', function () { it('when node is pinned', function () { return pin.add(pins.solarWiki) - .then(() => pin._isPinnedWithType(pins.solarWiki, pin.types.all)) + .then(() => pin._isPinnedWithType(pins.solarWiki, pinTypes.all)) .then(pinned => expect(pinned.pinned).to.eql(true)) }) it('when node is not in datastore', function () { const falseHash = `${pins.root.slice(0, -2)}ss` - return pin._isPinnedWithType(falseHash, pin.types.all) + return pin._isPinnedWithType(falseHash, pinTypes.all) .then(pinned => { expect(pinned.pinned).to.eql(false) expect(pinned.reason).to.eql(undefined) @@ -104,15 +110,15 @@ describe('pin', function () { }) it('when pinned recursively', function () { - return pin._isPinnedWithType(pins.root, pin.types.recursive) + return pin._isPinnedWithType(pins.root, pinTypes.recursive) .then(result => { expect(result.pinned).to.eql(true) - expect(result.reason).to.eql(pin.types.recursive) + expect(result.reason).to.eql(pinTypes.recursive) }) }) it('when pinned indirectly', function () { - return pin._isPinnedWithType(pins.mercuryWiki, pin.types.indirect) + return pin._isPinnedWithType(pins.mercuryWiki, pinTypes.indirect) .then(result => { expect(result.pinned).to.eql(true) expect(result.reason).to.eql(pins.root) @@ -122,17 +128,17 @@ describe('pin', function () { it('when pinned directly', function () { return pin.add(pins.mercuryDir, { recursive: false }) .then(() => { - return pin._isPinnedWithType(pins.mercuryDir, pin.types.direct) + return pin._isPinnedWithType(pins.mercuryDir, pinTypes.direct) .then(result => { expect(result.pinned).to.eql(true) - expect(result.reason).to.eql(pin.types.direct) + expect(result.reason).to.eql(pinTypes.direct) }) }) }) it('when not pinned', function () { return clearPins() - .then(() => pin._isPinnedWithType(pins.mercuryDir, pin.types.direct)) + .then(() => pin._isPinnedWithType(pins.mercuryDir, pinTypes.direct)) .then(pin => expect(pin.pinned).to.eql(false)) }) }) @@ -165,8 +171,8 @@ describe('pin', function () { .then(() => pin.add(pins.root)) .then(() => Promise.all([ // solarWiki is pinned both directly and indirectly o.O - expectPinned(pins.solarWiki, pin.types.direct), - expectPinned(pins.solarWiki, pin.types.indirect) + expectPinned(pins.solarWiki, pinTypes.direct), + expectPinned(pins.solarWiki, pinTypes.indirect) ])) }) @@ -208,7 +214,7 @@ describe('pin', function () { return pin.ls() .then(ls => { const pinType = ls.find(out => out.hash === pins.mercuryDir).type - expect(pinType).to.eql(pin.types.indirect) + expect(pinType).to.eql(pinTypes.indirect) }) }) diff --git a/test/core/utils.js b/test/core/utils.js index 8df7f2253d..b5c84b15c1 100644 --- a/test/core/utils.js +++ b/test/core/utils.js @@ -61,10 +61,10 @@ describe('utils', () => { // indicator. Used go-ipfs@0.4.13 `add --hash=keccak-512` to generate const keccak512 = 'zB7S6ZdcqsTqvNhBpx3SbFTocRpAUHj1w9WQXQGyWBVEsLStNfaaNtsdFUQbRk4tYPZvnpGbtDN5gEH4uVzUwsFyJh9Ei' expect(utils.parseIpfsPath(keccak512)) - .to.deep.equal({ - hash: keccak512, - links: [] - }) + .to.deep.equal({ + hash: keccak512, + links: [] + }) }) it('returns error for malformed path', function () { From d0b4a0cefdddae94721a41e0664a0fbd02e147d1 Mon Sep 17 00:00:00 2001 From: jonkrone Date: Mon, 18 Jun 2018 16:24:47 -0500 Subject: [PATCH 21/21] fix: do not destructure node callback results --- src/core/components/dag.js | 2 +- src/core/components/pin-set.js | 4 ++-- src/core/components/pin.js | 7 ++++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/core/components/dag.js b/src/core/components/dag.js index 2e8685305c..e43ea241f8 100644 --- a/src/core/components/dag.js +++ b/src/core/components/dag.js @@ -39,7 +39,7 @@ module.exports = function dag (self) { try { cid = new CID(cid) } catch (err) { - callback(err) + return callback(err) } } diff --git a/src/core/components/pin-set.js b/src/core/components/pin-set.js index 541b3eed49..f18a248604 100644 --- a/src/core/components/pin-set.js +++ b/src/core/components/pin-set.js @@ -74,9 +74,9 @@ exports = module.exports = function (dag) { seen[bs58Link] = true - dag.get(multihash, (err, { value }) => { + dag.get(multihash, (err, res) => { if (err) { return someCb(err) } - searchChildren(value, someCb) + searchChildren(res.value, someCb) }) }, cb) } diff --git a/src/core/components/pin.js b/src/core/components/pin.js index f276a87395..93f79a8e74 100644 --- a/src/core/components/pin.js +++ b/src/core/components/pin.js @@ -157,7 +157,7 @@ module.exports = function pin (self) { // persist updated pin sets to datastore flushPins((err, root) => { if (err) { return callback(err) } - return callback(null, results.map(key => ({hash: key}))) + return callback(null, results.map(hash => ({ hash }))) }) }) }) @@ -215,7 +215,7 @@ module.exports = function pin (self) { flushPins((err, root) => { if (err) { return callback(err) } self.log(`Removed pins: ${results}`) - return callback(null, results.map(key => ({hash: key}))) + return callback(null, results.map(hash => ({ hash }))) }) }) }) @@ -375,8 +375,9 @@ module.exports = function pin (self) { async.parallel([ cb => pinset.loadSet(pinRoot.value, types.recursive, cb), cb => pinset.loadSet(pinRoot.value, types.direct, cb) - ], (err, [rKeys, dKeys]) => { + ], (err, keys) => { if (err) { return callback(err) } + const [ rKeys, dKeys ] = keys directPins = new Set(dKeys.map(toB58String)) recursivePins = new Set(rKeys.map(toB58String))