Skip to content
This repository has been archived by the owner on Aug 12, 2020. It is now read-only.

Commit

Permalink
Merge pull request #227 from tableflip/fix/cid-version-1
Browse files Browse the repository at this point in the history
fix: import with CID version 1
  • Loading branch information
achingbrain committed Aug 21, 2018
2 parents c5cb38b + 6ef929d commit c54cdf4
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 15 deletions.
3 changes: 2 additions & 1 deletion src/builder/reduce.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,8 @@ module.exports = function reduce (file, ipld, options) {
return waterfall([
(cb) => ipld.get(leaf.cid, cb),
(result, cb) => {
const data = result.value.data
// If result.value is a buffer, this is a raw leaf otherwise it's a dag-pb node
const data = Buffer.isBuffer(result.value) ? result.value : result.value.data
const fileNode = new UnixFS('file', data)

DAGNode.create(fileNode.marshal(), [], options.hashAlg, (error, node) => {
Expand Down
53 changes: 39 additions & 14 deletions test/importer.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
'use strict'

const importer = require('./../src').importer
const exporter = require('./../src').exporter

const extend = require('deep-extend')
const chai = require('chai')
Expand Down Expand Up @@ -542,10 +543,23 @@ module.exports = (repo) => {
path = path[path.length - 1] === '/' ? path : path + '/'
return {
path: path + name + '.txt',
content: Buffer.alloc(262144 + 5).fill(1)
content: Buffer.alloc(size).fill(1)
}
}

const inputFiles = [
createInputFile('/foo', 10),
createInputFile('/foo', 60),
createInputFile('/foo/bar', 78),
createInputFile('/foo/baz', 200),
// Bigger than maxChunkSize
createInputFile('/foo', 262144 + 45),
createInputFile('/foo/bar', 262144 + 134),
createInputFile('/foo/bar', 262144 + 79),
createInputFile('/foo/bar', 262144 + 876),
createInputFile('/foo/bar', 262144 + 21)
]

const options = {
cidVersion: 1,
// Ensures we use DirSharded for the data below
Expand All @@ -560,23 +574,34 @@ module.exports = (repo) => {

each(files, (file, cb) => {
const cid = new CID(file.multihash).toV1()
ipld.get(cid, cb)
const inputFile = inputFiles.find(f => f.path === file.path)

// Just check the intermediate directory can be retrieved
if (!inputFile) {
return ipld.get(cid, cb)
}

// Check the imported content is correct
pull(
exporter(cid, ipld),
pull.collect((err, nodes) => {
expect(err).to.not.exist()
pull(
nodes[0].content,
pull.collect((err, chunks) => {
expect(err).to.not.exist()
expect(Buffer.concat(chunks)).to.deep.equal(inputFile.content)
cb()
})
)
})
)
}, done)
}

pull(
pull.values([
createInputFile('/foo', 10),
createInputFile('/foo', 60),
createInputFile('/foo/bar', 78),
createInputFile('/foo/baz', 200),
// Bigger than maxChunkSize
createInputFile('/foo', 262144 + 45),
createInputFile('/foo/bar', 262144 + 134),
createInputFile('/foo/bar', 262144 + 79),
createInputFile('/foo/bar', 262144 + 876),
createInputFile('/foo/bar', 262144 + 21)
]),
// Pass a copy of inputFiles, since the importer mutates them
pull.values(inputFiles.map(f => Object.assign({}, f))),
importer(ipld, options),
pull.collect(onCollected)
)
Expand Down

0 comments on commit c54cdf4

Please sign in to comment.