From b39351d9058d9efa7c67d73d94beb9449e0b64fa Mon Sep 17 00:00:00 2001 From: nginnever Date: Sat, 20 Feb 2016 19:25:22 -0800 Subject: [PATCH] added buffer importer --- .travis.yml | 19 ++++ karma.conf.js | 54 +++++++++++ package.json | 31 ++++++- src/chunker-fixed-size.js | 2 +- src/index.js | 107 ++++++++++++++++++--- tests/browser.js | 60 ++++++++++++ tests/buffer-test.js | 155 +++++++++++++++++++++++++++++++ tests/index.js | 13 +-- tests/test-data/empty.txt | 0 tests/test-fixed-size-chunker.js | 79 ++++++++-------- tests/test-import.js | 137 +++++++++++++++++++++++---- 11 files changed, 577 insertions(+), 80 deletions(-) create mode 100644 .travis.yml create mode 100644 karma.conf.js create mode 100644 tests/browser.js create mode 100644 tests/buffer-test.js create mode 100644 tests/test-data/empty.txt diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..525ffed1 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,19 @@ + +language: node_js +node_js: + - '4' + - '5' + +before_install: + - npm i -g npm + # Workaround for a permissions issue with Travis virtual machine images + +addons: + firefox: 'latest' + +before_script: + - export DISPLAY=:99.0 + - sh -e /etc/init.d/xvfb start + +script: + - npm test \ No newline at end of file diff --git a/karma.conf.js b/karma.conf.js new file mode 100644 index 00000000..d9d691f1 --- /dev/null +++ b/karma.conf.js @@ -0,0 +1,54 @@ +const path = require('path') + +module.exports = function (config) { + config.set({ + basePath: '', + frameworks: ['mocha'], + + files: [ + 'tests/browser.js' + ], + + preprocessors: { + 'tests/*': ['webpack', 'sourcemap'] + }, + + webpack: { + devtool: 'eval', + resolve: { + extensions: ['', '.js', '.json'] + }, + externals: { + fs: '{}' + }, + node: { + Buffer: true + }, + module: { + loaders: [ + { test: /\.json$/, loader: 'json' } + ], + postLoaders: [ + { + include: path.resolve(__dirname, 'node_modules/ipfs-unixfs'), + loader: 'transform?brfs' + } + ] + } + }, + + webpackMiddleware: { + noInfo: true, + stats: { + colors: true + } + }, + reporters: ['spec'], + port: 9876, + colors: true, + logLevel: config.LOG_INFO, + autoWatch: false, + browsers: process.env.TRAVIS ? ['Firefox'] : ['Chrome'], + singleRun: true + }) +} diff --git a/package.json b/package.json index d010c178..26b2b9b8 100644 --- a/package.json +++ b/package.json @@ -5,9 +5,14 @@ "main": "src/index.js", "scripts": { "lint": "standard", - "coverage": "istanbul cover --print both -- _mocha tests/index.js", - "test": "mocha tests/index.js" + "test": "npm run test:node && npm run test:browser", + "test:node": "mocha tests/index.js", + "test:browser": "karma start karma.conf.js" }, + "pre-commit": [ + "lint", + "test" + ], "repository": { "type": "git", "url": "git+https://github.com/diasdavid/js-ipfs-data-importing.git" @@ -22,16 +27,34 @@ }, "homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme", "devDependencies": { + "2": "0.0.1", + "brfs": "^1.4.3", "bs58": "^3.0.0", + "buffer-loader": "0.0.1", "chai": "^3.4.1", "fs-blob-store": "^5.2.1", - "ipfs-repo": "^0.5.0", + "highland": "^2.7.1", + "idb-plus-blob-store": "^1.0.0", + "ipfs-repo": "^0.5.1", "istanbul": "^0.4.1", + "json-loader": "^0.5.4", + "karma": "^0.13.19", + "karma-chrome-launcher": "^0.2.2", + "karma-cli": "^0.1.2", + "karma-firefox-launcher": "^0.1.7", + "karma-mocha": "^0.2.1", + "karma-sourcemap-loader": "^0.3.7", + "karma-spec-reporter": "0.0.24", + "karma-webpack": "^1.7.0", "mocha": "^2.3.4", "ncp": "^2.0.0", "pre-commit": "^1.1.2", + "raw-loader": "^0.5.1", "rimraf": "^2.5.1", - "standard": "^5.4.1" + "standard": "^6.0.8", + "string-to-stream": "^1.0.1", + "transform-loader": "^0.2.3", + "webpack": "^2.0.7-beta" }, "dependencies": { "async": "^1.5.2", diff --git a/src/chunker-fixed-size.js b/src/chunker-fixed-size.js index 630e67ca..98be812e 100644 --- a/src/chunker-fixed-size.js +++ b/src/chunker-fixed-size.js @@ -23,7 +23,7 @@ function FixedSizeChunker (size) { var chunk = new Buffer(size, 'binary') var newBuf = new Buffer(buf.length - size, 'binary') buf.copy(chunk, 0, 0, size) - buf.copy(newBuf, 0, size - 1, buf.length - size) + buf.copy(newBuf, 0, size, buf.length) buf = newBuf that.push(chunk) diff --git a/src/index.js b/src/index.js index 1b967fd2..6d837ac4 100644 --- a/src/index.js +++ b/src/index.js @@ -7,33 +7,48 @@ const FixedSizeChunker = require('./chunker-fixed-size') const through2 = require('through2') const UnixFS = require('ipfs-unixfs') const async = require('async') - exports = module.exports const CHUNK_SIZE = 262144 // Use a layout + chunkers to convert a directory (or file) to the layout format -exports.import = (options, callback) => { +exports.import = function (options, callback) { // options.path : what to import + // options.buffer : import a buffer + // options.filename : optional file name for buffer + // options.stream : import a stream // options.recursive : follow dirs // options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker } // options.dag-service : instance of block service const dagService = options.dagService - const stats = fs.statSync(options.path) - if (stats.isFile()) { - fileImporter(options.path, callback) - } else if (stats.isDirectory() && options.recursive) { - dirImporter(options.path, callback) - } else { - return callback(new Error('recursive must be true to add a directory')) + if (options.buffer) { + if (!Buffer.isBuffer(options.buffer)) { + return callback(new Error('buffer importer must take a buffer')) + } + bufferImporter(options.buffer, callback) + } else if (options.stream) { + if (!(typeof options.stream.on === 'function')) { + return callback(new Error('stream importer must take a readable stream')) + } + // TODO Create Stream Importer + // streamImporter(options.stream, callback) + return callback(new Error('stream importer has not been built yet')) + } else if (options.path) { + const stats = fs.statSync(options.path) + if (stats.isFile()) { + fileImporter(options.path, callback) + } else if (stats.isDirectory() && options.recursive) { + dirImporter(options.path, callback) + } else { + return callback(new Error('recursive must be true to add a directory')) + } } function fileImporter (path, callback) { const stats = fs.statSync(path) if (stats.size > CHUNK_SIZE) { const links = [] // { Hash: , Size: , Name: } - fs.createReadStream(path) .pipe(new FixedSizeChunker(CHUNK_SIZE)) .pipe(through2((chunk, enc, cb) => { @@ -53,7 +68,6 @@ exports.import = (options, callback) => { leafSize: raw.fileSize(), Name: '' }) - cb() }) }, (cb) => { @@ -83,7 +97,8 @@ exports.import = (options, callback) => { })) } else { // create just one file node with the data directly - const fileUnixFS = new UnixFS('file', fs.readFileSync(path)) + var buf = fs.readFileSync(path) + const fileUnixFS = new UnixFS('file', buf) const fileNode = new mDAG.DAGNode(fileUnixFS.marshal()) dagService.add(fileNode, (err) => { @@ -166,9 +181,73 @@ exports.import = (options, callback) => { }) }) } + function bufferImporter (buffer, callback) { + const links = [] // { Hash: , Size: , Name: } + if (buffer.length > CHUNK_SIZE) { + var fsc = new FixedSizeChunker(CHUNK_SIZE) + fsc.write(buffer) + fsc.end() + fsc.pipe(through2((chunk, enc, cb) => { + // TODO: check if this is right (I believe it should be type 'raw' + // https://github.com/ipfs/go-ipfs/issues/2331 + const raw = new UnixFS('file', chunk) + const node = new mDAG.DAGNode(raw.marshal()) + + dagService.add(node, function (err) { + if (err) { + return log.err(err) + } + links.push({ + Hash: node.multihash(), + Size: node.size(), + leafSize: raw.fileSize(), + Name: '' + }) + + cb() + }) + }, (cb) => { + const file = new UnixFS('file') + const parentNode = new mDAG.DAGNode() + links.forEach((l) => { + file.addBlockSize(l.leafSize) + const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash) + parentNode.addRawLink(link) + }) + parentNode.data = file.marshal() + dagService.add(parentNode, (err) => { + if (err) { + return log.err(err) + } + // an optional file name provided + const fileName = options.filename - // function bufferImporter (path) {} - // function streamImporter (path) {} + callback(null, { + Hash: parentNode.multihash(), + Size: parentNode.size(), + Name: fileName + }) && cb() + }) + })) + } else { + // create just one file node with the data directly + const fileUnixFS = new UnixFS('file', buffer) + const fileNode = new mDAG.DAGNode(fileUnixFS.marshal()) + + dagService.add(fileNode, (err) => { + if (err) { + return log.err(err) + } + + callback(null, { + Hash: fileNode.multihash(), + Size: fileNode.size(), + Name: options.filename + }) + }) + } + } + // function streamImporter (stream, callback) {} } exports.export = function () { diff --git a/tests/browser.js b/tests/browser.js new file mode 100644 index 00000000..2d812dfb --- /dev/null +++ b/tests/browser.js @@ -0,0 +1,60 @@ +/* eslint-env mocha */ +const tests = require('./buffer-test') +const async = require('async') +const store = require('idb-plus-blob-store') +const _ = require('lodash') +const IPFSRepo = require('ipfs-repo') +const repoContext = require.context('buffer!./repo-example', true) + +const idb = window.indexedDB || + window.mozIndexedDB || + window.webkitIndexedDB || + window.msIndexedDB + +idb.deleteDatabase('ipfs') +idb.deleteDatabase('ipfs/blocks') + +describe('IPFS data importing tests on the Browser', function () { + before(function (done) { + this.timeout(23000) + var repoData = [] + repoContext.keys().forEach(function (key) { + repoData.push({ + key: key.replace('./', ''), + value: repoContext(key) + }) + }) + + const mainBlob = store('ipfs') + const blocksBlob = store('ipfs/blocks') + + async.eachSeries(repoData, (file, cb) => { + if (_.startsWith(file.key, 'datastore/')) { + return cb() + } + + const blocks = _.startsWith(file.key, 'blocks/') + const blob = blocks ? blocksBlob : mainBlob + const key = blocks ? file.key.replace(/^blocks\//, '') : file.key + + blob.createWriteStream({ + key: key + }).end(file.value, cb) + }, done) + }) + + // create the repo constant to be used in the import a small buffer test + const options = { + stores: { + keys: store, + config: store, + datastore: store, + // datastoreLegacy: needs https://github.com/ipfs/js-ipfs-repo/issues/6#issuecomment-164650642 + logs: store, + locks: store, + version: store + } + } + const repo = new IPFSRepo('ipfs', options) + tests(repo) +}) diff --git a/tests/buffer-test.js b/tests/buffer-test.js new file mode 100644 index 00000000..61098f35 --- /dev/null +++ b/tests/buffer-test.js @@ -0,0 +1,155 @@ +/* eslint-env mocha */ +const importer = require('./../src') +const BlockService = require('ipfs-blocks').BlockService +const DAGService = require('ipfs-merkle-dag').DAGService +const DAGNode = require('ipfs-merkle-dag').DAGNode +const UnixFS = require('ipfs-unixfs') + +const FixedSizeChunker = require('./../src/chunker-fixed-size') +const expect = require('chai').expect +const stringToStream = require('string-to-stream') +const through = require('through2') + +const myFile = require('buffer!./test-data/1.2MiB.txt') +const fileStream = function () { + return stringToStream(myFile) +} + +const smallBuf = require('buffer!./test-data/200Bytes.txt') +const bigBuf = require('buffer!./test-data/1.2MiB.txt') +const bigBlock = require('buffer!./test-data/1.2MiB.txt.block') +const bigLink = require('buffer!./test-data/1.2MiB.txt.link-block0') +const marbuf = require('buffer!./test-data/200Bytes.txt.block') + +module.exports = function (repo) { + describe('chunker: fixed size', function () { + this.timeout(10000) + it('256 Bytes chunks', function (done) { + var counter = 0 + fileStream() + .pipe(FixedSizeChunker(256)) + .pipe(through(function (chunk, enc, cb) { + if (chunk.length < 256) { + expect(counter).to.be.below(1) + counter += 1 + return cb() + } + expect(chunk.length).to.equal(256) + cb() + }, () => { + done() + })) + }) + + it('256 KiB chunks', function (done) { + var counter = 0 + var KiB256 = 262144 + fileStream() + .pipe(FixedSizeChunker(KiB256)) + .pipe(through((chunk, enc, cb) => { + if (chunk.length < 262144) { + expect(counter).to.be.below(1) + counter += 1 + return cb() + } + expect(chunk.length).to.equal(262144) + cb() + }, () => { + done() + })) + }) + + it('256 KiB chunks of non scalar filesize', function (done) { + var counter = 0 + var KiB256 = 262144 + fileStream() + .pipe(FixedSizeChunker(KiB256)) + .pipe(through((chunk, enc, cb) => { + if (chunk.length < KiB256) { + expect(counter).to.be.below(2) + counter += 1 + return cb() + } + expect(chunk.length).to.equal(KiB256) + cb() + }, () => { + done() + })) + }) + }) + + describe('layout: importer', function () { + it('import a small buffer', function (done) { + // this is just like "import a small file" + var bs = new BlockService(repo) + var ds = new DAGService(bs) + var buf = smallBuf + importer.import({ + buffer: buf, + dagService: ds + }, function (err, stat) { + expect(err).to.not.exist + ds.get(stat.Hash, function (err, node) { + expect(err).to.not.exist + const smallDAGNode = new DAGNode() + smallDAGNode.unMarshal(marbuf) + expect(node.size()).to.equal(smallDAGNode.size()) + expect(node.multihash()).to.deep.equal(smallDAGNode.multihash()) + done() + }) + }) + }) + + it('import a big buffer', function (done) { + // this is just like "import a big file" + var buf = bigBuf + var bs = new BlockService(repo) + var ds = new DAGService(bs) + importer.import({ + buffer: buf, + dagService: ds, + filename: 'Test.txt' + }, function (err, stat) { + expect(err).to.not.exist + ds.get(stat.Hash, function (err, node) { + expect(err).to.not.exist + const bigDAGNode = new DAGNode() + bigDAGNode.unMarshal(bigBlock) + expect(node.size()).to.equal(bigDAGNode.size()) + expect(node.links).to.deep.equal(bigDAGNode.links) + + const nodeUnixFS = UnixFS.unmarshal(node.data) + const bigDAGNodeUnixFS = UnixFS.unmarshal(bigDAGNode.data) + expect(nodeUnixFS.type).to.equal(bigDAGNodeUnixFS.type) + expect(nodeUnixFS.data).to.deep.equal(bigDAGNodeUnixFS.data) + expect(nodeUnixFS.blockSizes).to.deep.equal(bigDAGNodeUnixFS.blockSizes) + expect(nodeUnixFS.fileSize()).to.equal(bigDAGNodeUnixFS.fileSize()) + + expect(node.data).to.deep.equal(bigDAGNode.data) + expect(node.multihash()).to.deep.equal(bigDAGNode.multihash()) + + ds.get(node.links[0].hash, function (err, node) { + expect(err).to.not.exist + const leaf = new DAGNode() + + var marbuf2 = bigLink + leaf.unMarshal(marbuf2) + expect(node.links).to.deep.equal(leaf.links) + expect(node.links.length).to.equal(0) + expect(leaf.links.length).to.equal(0) + expect(leaf.marshal()).to.deep.equal(marbuf2) + const nodeUnixFS = UnixFS.unmarshal(node.data) + const leafUnixFS = UnixFS.unmarshal(leaf.data) + expect(nodeUnixFS.type).to.equal(leafUnixFS.type) + expect(nodeUnixFS.fileSize()).to.equal(leafUnixFS.fileSize()) + expect(nodeUnixFS.data).to.deep.equal(leafUnixFS.data) + expect(nodeUnixFS.blockSizes).to.deep.equal(leafUnixFS.blockSizes) + expect(node.data).to.deep.equal(leaf.data) + expect(node.marshal()).to.deep.equal(leaf.marshal()) + done() + }) + }) + }) + }) + }) +} diff --git a/tests/index.js b/tests/index.js index a9237262..640b4108 100644 --- a/tests/index.js +++ b/tests/index.js @@ -11,33 +11,34 @@ describe('core', () => { const repoExample = process.cwd() + '/tests/repo-example' const repoTests = process.cwd() + '/tests/repo-tests' + Date.now() - before(done => { - ncp(repoExample, repoTests, err => { + before((done) => { + ncp(repoExample, repoTests, (err) => { process.env.IPFS_PATH = repoTests expect(err).to.equal(null) done() }) }) - after(done => { - rimraf(repoTests, err => { + after((done) => { + rimraf(repoTests, (err) => { expect(err).to.equal(null) done() }) }) const tests = fs.readdirSync(__dirname) - tests.filter(file => { + tests.filter((file) => { if (file === 'index.js' || file === 'browser.js' || file === 'test-data' || file === 'repo-example' || + file === 'buffer-test.js' || file.indexOf('repo-tests') > -1) { return false } else { return true } - }).forEach(file => { + }).forEach((file) => { require('./' + file) }) }) diff --git a/tests/test-data/empty.txt b/tests/test-data/empty.txt new file mode 100644 index 00000000..e69de29b diff --git a/tests/test-fixed-size-chunker.js b/tests/test-fixed-size-chunker.js index 6a14508e..c713c14f 100644 --- a/tests/test-fixed-size-chunker.js +++ b/tests/test-fixed-size-chunker.js @@ -1,61 +1,64 @@ /* globals describe, it */ -var FixedSizeChunker = require('./../src/chunker-fixed-size') -var fs = require('fs') -var stream = require('stream') -var expect = require('chai').expect +const FixedSizeChunker = require('./../src/chunker-fixed-size') +const fs = require('fs') +const expect = require('chai').expect +const stringToStream = require('string-to-stream') +const through = require('through2') +const path = require('path') +var isNode = !global.window + +var fileStream +if (isNode) { + fileStream = function () { + return fs.createReadStream(path.join(__dirname, '/test-data/1MiB.txt')) + } +} else { + var myFile = require('buffer!./test-data/1MiB.txt') + fileStream = function () { + return stringToStream(myFile) + } +} describe('chunker: fixed size', function () { it('256 Bytes chunks', function (done) { - var writable = new stream.Writable({ - write: function (chunk, encoding, next) { - expect(chunk.length).to.equal(256) - next() - } - }) - - fs.createReadStream(__dirname + '/test-data/1MiB.txt') + fileStream() .pipe(FixedSizeChunker(256)) - .pipe(writable) - - writable.on('finish', done) + .pipe(through((chunk, enc, cb) => { + expect(chunk.length).to.equal(256) + cb() + }, () => { + done() + })) }) it('256 KiB chunks', function (done) { var KiB256 = 262144 - var writable = new stream.Writable({ - write: function (chunk, encoding, next) { - expect(chunk.length).to.equal(KiB256) - next() - } - }) - - fs.createReadStream(__dirname + '/test-data/1MiB.txt') + fileStream() .pipe(FixedSizeChunker(KiB256)) - .pipe(writable) - - writable.on('finish', done) + .pipe(through((chunk, enc, cb) => { + expect(chunk.length).to.equal(KiB256) + cb() + }, () => { + done() + })) }) it('256 KiB chunks of non scalar filesize', function (done) { var counter = 0 var KiB256 = 262144 - var writable = new stream.Writable({ - write: function (chunk, encoding, next) { + fileStream() + .pipe(FixedSizeChunker(KiB256)) + .pipe(through((chunk, enc, cb) => { if (chunk.length < KiB256) { expect(counter).to.be.below(2) counter += 1 - return next() + return cb() } expect(chunk.length).to.equal(KiB256) - next() - } - }) - - fs.createReadStream(__dirname + '/test-data/1.2MiB.txt') - .pipe(FixedSizeChunker(KiB256)) - .pipe(writable) - - writable.on('finish', done) + cb() + }, () => { + done() + })) }) }) diff --git a/tests/test-import.js b/tests/test-import.js index 9e2e3084..5c2b01e1 100644 --- a/tests/test-import.js +++ b/tests/test-import.js @@ -1,5 +1,4 @@ -/* globals describe, it */ - +/* eslint-env mocha */ const importer = require('./../src') const expect = require('chai').expect const IPFSRepo = require('ipfs-repo') @@ -10,13 +9,27 @@ const fsBlobStore = require('fs-blob-store') const bs58 = require('bs58') const fs = require('fs') const UnixFS = require('ipfs-unixfs') +const path = require('path') describe('layout: importer', function () { - const big = __dirname + '/test-data/1.2MiB.txt' - const small = __dirname + '/test-data/200Bytes.txt' - const dirSmall = __dirname + '/test-data/dir-small' - const dirBig = __dirname + '/test-data/dir-big' - const dirNested = __dirname + '/test-data/dir-nested' + const big = path.join(__dirname, '/test-data/1.2MiB.txt') + const small = path.join(__dirname, '/test-data/200Bytes.txt') + const dirSmall = path.join(__dirname, '/test-data/dir-small') + const dirBig = path.join(__dirname, '/test-data/dir-big') + const dirNested = path.join(__dirname, '/test-data/dir-nested') + + // check to see if missing empty dirs need to be created + + fs.stat(path.join(__dirname, '/test-data/dir-nested/dir-another'), function (err, exists) { + if (err) { + fs.mkdir(path.join(__dirname, '/test-data/dir-nested/dir-another')) + } + }) + fs.stat(path.join(__dirname, '/test-data/dir-nested/level-1/level-2'), function (err, exists) { + if (err) { + fs.mkdir(path.join(__dirname, '/test-data/dir-nested/level-1/level-2')) + } + }) var ds @@ -32,7 +45,6 @@ describe('layout: importer', function () { version: fsBlobStore } } - var repo = new IPFSRepo(process.env.IPFS_PATH, options) var bs = new BlockService(repo) ds = new DAGService(bs) @@ -50,7 +62,8 @@ describe('layout: importer', function () { ds.get(stat.Hash, (err, node) => { expect(err).to.not.exist const smallDAGNode = new DAGNode() - smallDAGNode.unMarshal(fs.readFileSync(small + '.block')) + var buf = fs.readFileSync(small + '.block') + smallDAGNode.unMarshal(buf) expect(node.size()).to.equal(smallDAGNode.size()) expect(node.multihash()).to.deep.equal(smallDAGNode.multihash()) done() @@ -68,7 +81,8 @@ describe('layout: importer', function () { expect(err).to.not.exist const bigDAGNode = new DAGNode() - bigDAGNode.unMarshal(fs.readFileSync(big + '.block')) + var buf = fs.readFileSync(big + '.block') + bigDAGNode.unMarshal(buf) expect(node.size()).to.equal(bigDAGNode.size()) expect(node.links).to.deep.equal(bigDAGNode.links) @@ -85,11 +99,12 @@ describe('layout: importer', function () { ds.get(node.links[0].hash, (err, node) => { expect(err).to.not.exist const leaf = new DAGNode() - leaf.unMarshal(fs.readFileSync(big + '.link-block0')) + var buf2 = fs.readFileSync(big + '.link-block0') + leaf.unMarshal(buf2) expect(node.links).to.deep.equal(leaf.links) expect(node.links.length).to.equal(0) expect(leaf.links.length).to.equal(0) - expect(leaf.marshal()).to.deep.equal(fs.readFileSync(big + '.link-block0')) + expect(leaf.marshal()).to.deep.equal(buf2) const nodeUnixFS = UnixFS.unmarshal(node.data) const leafUnixFS = UnixFS.unmarshal(leaf.data) expect(nodeUnixFS.type).to.equal(leafUnixFS.type) @@ -115,7 +130,8 @@ describe('layout: importer', function () { ds.get(stats.Hash, (err, node) => { expect(err).to.not.exist const dirSmallNode = new DAGNode() - dirSmallNode.unMarshal(fs.readFileSync(dirSmall + '.block')) + var buf = fs.readFileSync(dirSmall + '.block') + dirSmallNode.unMarshal(buf) expect(node.links).to.deep.equal(dirSmallNode.links) const nodeUnixFS = UnixFS.unmarshal(node.data) @@ -143,7 +159,8 @@ describe('layout: importer', function () { ds.get(stats.Hash, (err, node) => { expect(err).to.not.exist const dirNode = new DAGNode() - dirNode.unMarshal(fs.readFileSync(dirBig + '.block')) + var buf = fs.readFileSync(dirBig + '.block') + dirNode.unMarshal(buf) expect(node.links).to.deep.equal(dirNode.links) const nodeUnixFS = UnixFS.unmarshal(node.data) @@ -174,7 +191,8 @@ describe('layout: importer', function () { expect(node.links.length).to.equal(3) const dirNode = new DAGNode() - dirNode.unMarshal(fs.readFileSync(dirNested + '.block')) + var buf = fs.readFileSync(dirNested + '.block') + dirNode.unMarshal(buf) expect(node.links).to.deep.equal(dirNode.links) expect(node.data).to.deep.equal(dirNode.data) done() @@ -182,7 +200,92 @@ describe('layout: importer', function () { }) }) - it.skip('import a buffer', (done) => {}) - it.skip('import from a stream', (done) => {}) + it('import a small buffer', (done) => { + // this is just like "import a small file" + var buf = fs.readFileSync(path.join(__dirname, '/test-data/200Bytes.txt')) + importer.import({ + buffer: buf, + dagService: ds + }, function (err, stat) { + expect(err).to.not.exist + ds.get(stat.Hash, (err, node) => { + expect(err).to.not.exist + const smallDAGNode = new DAGNode() + var marbuf = fs.readFileSync(small + '.block') + smallDAGNode.unMarshal(marbuf) + expect(node.size()).to.equal(smallDAGNode.size()) + expect(node.multihash()).to.deep.equal(smallDAGNode.multihash()) + done() + }) + }) + }) + + it('import a big buffer', (done) => { + // this is just like "import a big file" + var buf = fs.readFileSync(path.join(__dirname, '/test-data/1.2MiB.txt')) + importer.import({ + buffer: buf, + dagService: ds, + filename: 'Test.txt' + }, function (err, stat) { + expect(err).to.not.exist + ds.get(stat.Hash, (err, node) => { + expect(err).to.not.exist + + const bigDAGNode = new DAGNode() + var marbuf = fs.readFileSync(big + '.block') + bigDAGNode.unMarshal(marbuf) + expect(node.size()).to.equal(bigDAGNode.size()) + expect(node.links).to.deep.equal(bigDAGNode.links) + + const nodeUnixFS = UnixFS.unmarshal(node.data) + const bigDAGNodeUnixFS = UnixFS.unmarshal(bigDAGNode.data) + expect(nodeUnixFS.type).to.equal(bigDAGNodeUnixFS.type) + expect(nodeUnixFS.data).to.deep.equal(bigDAGNodeUnixFS.data) + expect(nodeUnixFS.blockSizes).to.deep.equal(bigDAGNodeUnixFS.blockSizes) + expect(nodeUnixFS.fileSize()).to.equal(bigDAGNodeUnixFS.fileSize()) + + expect(node.data).to.deep.equal(bigDAGNode.data) + expect(node.multihash()).to.deep.equal(bigDAGNode.multihash()) + + ds.get(node.links[0].hash, (err, node) => { + expect(err).to.not.exist + const leaf = new DAGNode() + + var marbuf2 = fs.readFileSync(big + '.link-block0') + leaf.unMarshal(marbuf2) + expect(node.links).to.deep.equal(leaf.links) + expect(node.links.length).to.equal(0) + expect(leaf.links.length).to.equal(0) + expect(leaf.marshal()).to.deep.equal(marbuf2) + const nodeUnixFS = UnixFS.unmarshal(node.data) + const leafUnixFS = UnixFS.unmarshal(leaf.data) + expect(nodeUnixFS.type).to.equal(leafUnixFS.type) + expect(nodeUnixFS.fileSize()).to.equal(leafUnixFS.fileSize()) + expect(nodeUnixFS.data).to.deep.equal(leafUnixFS.data) + expect(nodeUnixFS.blockSizes).to.deep.equal(leafUnixFS.blockSizes) + expect(node.data).to.deep.equal(leaf.data) + expect(node.marshal()).to.deep.equal(leaf.marshal()) + done() + }) + }) + }) + }) + + // TODO, make this work with small files + it.skip('import from a readable stream', (done) => { + }) + + it.skip('export a file by hash', (done) => { + // TODO Create tests and function for exporting data + var hash = 'QmW7BDxEbGqxxSYVtn3peNPQgdDXbWkoQ6J1EFYAEuQV3Q' + importer.export({ + hash: hash, + dagService: ds + }, function (err, file) { + console.log(err) + done() + }) + }) })