Skip to content

Commit

Permalink
Merge pull request #6 from nginnever/master
Browse files Browse the repository at this point in the history
added buffer importer
  • Loading branch information
daviddias committed Mar 21, 2016
2 parents e80e895 + b39351d commit d1a9ddd
Show file tree
Hide file tree
Showing 11 changed files with 577 additions and 80 deletions.
19 changes: 19 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@

language: node_js
node_js:
- '4'
- '5'

before_install:
- npm i -g npm
# Workaround for a permissions issue with Travis virtual machine images

addons:
firefox: 'latest'

before_script:
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start

script:
- npm test
54 changes: 54 additions & 0 deletions karma.conf.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
const path = require('path')

module.exports = function (config) {
config.set({
basePath: '',
frameworks: ['mocha'],

files: [
'tests/browser.js'
],

preprocessors: {
'tests/*': ['webpack', 'sourcemap']
},

webpack: {
devtool: 'eval',
resolve: {
extensions: ['', '.js', '.json']
},
externals: {
fs: '{}'
},
node: {
Buffer: true
},
module: {
loaders: [
{ test: /\.json$/, loader: 'json' }
],
postLoaders: [
{
include: path.resolve(__dirname, 'node_modules/ipfs-unixfs'),
loader: 'transform?brfs'
}
]
}
},

webpackMiddleware: {
noInfo: true,
stats: {
colors: true
}
},
reporters: ['spec'],
port: 9876,
colors: true,
logLevel: config.LOG_INFO,
autoWatch: false,
browsers: process.env.TRAVIS ? ['Firefox'] : ['Chrome'],
singleRun: true
})
}
31 changes: 27 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,14 @@
"main": "src/index.js",
"scripts": {
"lint": "standard",
"coverage": "istanbul cover --print both -- _mocha tests/index.js",
"test": "mocha tests/index.js"
"test": "npm run test:node && npm run test:browser",
"test:node": "mocha tests/index.js",
"test:browser": "karma start karma.conf.js"
},
"pre-commit": [
"lint",
"test"
],
"repository": {
"type": "git",
"url": "git+https://github.com/diasdavid/js-ipfs-data-importing.git"
Expand All @@ -22,16 +27,34 @@
},
"homepage": "https://github.com/diasdavid/js-ipfs-data-importing#readme",
"devDependencies": {
"2": "0.0.1",
"brfs": "^1.4.3",
"bs58": "^3.0.0",
"buffer-loader": "0.0.1",
"chai": "^3.4.1",
"fs-blob-store": "^5.2.1",
"ipfs-repo": "^0.5.0",
"highland": "^2.7.1",
"idb-plus-blob-store": "^1.0.0",
"ipfs-repo": "^0.5.1",
"istanbul": "^0.4.1",
"json-loader": "^0.5.4",
"karma": "^0.13.19",
"karma-chrome-launcher": "^0.2.2",
"karma-cli": "^0.1.2",
"karma-firefox-launcher": "^0.1.7",
"karma-mocha": "^0.2.1",
"karma-sourcemap-loader": "^0.3.7",
"karma-spec-reporter": "0.0.24",
"karma-webpack": "^1.7.0",
"mocha": "^2.3.4",
"ncp": "^2.0.0",
"pre-commit": "^1.1.2",
"raw-loader": "^0.5.1",
"rimraf": "^2.5.1",
"standard": "^5.4.1"
"standard": "^6.0.8",
"string-to-stream": "^1.0.1",
"transform-loader": "^0.2.3",
"webpack": "^2.0.7-beta"
},
"dependencies": {
"async": "^1.5.2",
Expand Down
2 changes: 1 addition & 1 deletion src/chunker-fixed-size.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ function FixedSizeChunker (size) {
var chunk = new Buffer(size, 'binary')
var newBuf = new Buffer(buf.length - size, 'binary')
buf.copy(chunk, 0, 0, size)
buf.copy(newBuf, 0, size - 1, buf.length - size)
buf.copy(newBuf, 0, size, buf.length)
buf = newBuf
that.push(chunk)

Expand Down
107 changes: 93 additions & 14 deletions src/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,33 +7,48 @@ const FixedSizeChunker = require('./chunker-fixed-size')
const through2 = require('through2')
const UnixFS = require('ipfs-unixfs')
const async = require('async')

exports = module.exports

const CHUNK_SIZE = 262144

// Use a layout + chunkers to convert a directory (or file) to the layout format
exports.import = (options, callback) => {
exports.import = function (options, callback) {
// options.path : what to import
// options.buffer : import a buffer
// options.filename : optional file name for buffer
// options.stream : import a stream
// options.recursive : follow dirs
// options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker }
// options.dag-service : instance of block service
const dagService = options.dagService

const stats = fs.statSync(options.path)
if (stats.isFile()) {
fileImporter(options.path, callback)
} else if (stats.isDirectory() && options.recursive) {
dirImporter(options.path, callback)
} else {
return callback(new Error('recursive must be true to add a directory'))
if (options.buffer) {
if (!Buffer.isBuffer(options.buffer)) {
return callback(new Error('buffer importer must take a buffer'))
}
bufferImporter(options.buffer, callback)
} else if (options.stream) {
if (!(typeof options.stream.on === 'function')) {
return callback(new Error('stream importer must take a readable stream'))
}
// TODO Create Stream Importer
// streamImporter(options.stream, callback)
return callback(new Error('stream importer has not been built yet'))
} else if (options.path) {
const stats = fs.statSync(options.path)
if (stats.isFile()) {
fileImporter(options.path, callback)
} else if (stats.isDirectory() && options.recursive) {
dirImporter(options.path, callback)
} else {
return callback(new Error('recursive must be true to add a directory'))
}
}

function fileImporter (path, callback) {
const stats = fs.statSync(path)
if (stats.size > CHUNK_SIZE) {
const links = [] // { Hash: , Size: , Name: }

fs.createReadStream(path)
.pipe(new FixedSizeChunker(CHUNK_SIZE))
.pipe(through2((chunk, enc, cb) => {
Expand All @@ -53,7 +68,6 @@ exports.import = (options, callback) => {
leafSize: raw.fileSize(),
Name: ''
})

cb()
})
}, (cb) => {
Expand Down Expand Up @@ -83,7 +97,8 @@ exports.import = (options, callback) => {
}))
} else {
// create just one file node with the data directly
const fileUnixFS = new UnixFS('file', fs.readFileSync(path))
var buf = fs.readFileSync(path)
const fileUnixFS = new UnixFS('file', buf)
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())

dagService.add(fileNode, (err) => {
Expand Down Expand Up @@ -166,9 +181,73 @@ exports.import = (options, callback) => {
})
})
}
function bufferImporter (buffer, callback) {
const links = [] // { Hash: , Size: , Name: }
if (buffer.length > CHUNK_SIZE) {
var fsc = new FixedSizeChunker(CHUNK_SIZE)
fsc.write(buffer)
fsc.end()
fsc.pipe(through2((chunk, enc, cb) => {
// TODO: check if this is right (I believe it should be type 'raw'
// https://github.com/ipfs/go-ipfs/issues/2331
const raw = new UnixFS('file', chunk)
const node = new mDAG.DAGNode(raw.marshal())

dagService.add(node, function (err) {
if (err) {
return log.err(err)
}
links.push({
Hash: node.multihash(),
Size: node.size(),
leafSize: raw.fileSize(),
Name: ''
})

cb()
})
}, (cb) => {
const file = new UnixFS('file')
const parentNode = new mDAG.DAGNode()
links.forEach((l) => {
file.addBlockSize(l.leafSize)
const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash)
parentNode.addRawLink(link)
})
parentNode.data = file.marshal()
dagService.add(parentNode, (err) => {
if (err) {
return log.err(err)
}
// an optional file name provided
const fileName = options.filename

// function bufferImporter (path) {}
// function streamImporter (path) {}
callback(null, {
Hash: parentNode.multihash(),
Size: parentNode.size(),
Name: fileName
}) && cb()
})
}))
} else {
// create just one file node with the data directly
const fileUnixFS = new UnixFS('file', buffer)
const fileNode = new mDAG.DAGNode(fileUnixFS.marshal())

dagService.add(fileNode, (err) => {
if (err) {
return log.err(err)
}

callback(null, {
Hash: fileNode.multihash(),
Size: fileNode.size(),
Name: options.filename
})
})
}
}
// function streamImporter (stream, callback) {}
}

exports.export = function () {
Expand Down
60 changes: 60 additions & 0 deletions tests/browser.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
/* eslint-env mocha */
const tests = require('./buffer-test')
const async = require('async')
const store = require('idb-plus-blob-store')
const _ = require('lodash')
const IPFSRepo = require('ipfs-repo')
const repoContext = require.context('buffer!./repo-example', true)

const idb = window.indexedDB ||
window.mozIndexedDB ||
window.webkitIndexedDB ||
window.msIndexedDB

idb.deleteDatabase('ipfs')
idb.deleteDatabase('ipfs/blocks')

describe('IPFS data importing tests on the Browser', function () {
before(function (done) {
this.timeout(23000)
var repoData = []
repoContext.keys().forEach(function (key) {
repoData.push({
key: key.replace('./', ''),
value: repoContext(key)
})
})

const mainBlob = store('ipfs')
const blocksBlob = store('ipfs/blocks')

async.eachSeries(repoData, (file, cb) => {
if (_.startsWith(file.key, 'datastore/')) {
return cb()
}

const blocks = _.startsWith(file.key, 'blocks/')
const blob = blocks ? blocksBlob : mainBlob
const key = blocks ? file.key.replace(/^blocks\//, '') : file.key

blob.createWriteStream({
key: key
}).end(file.value, cb)
}, done)
})

// create the repo constant to be used in the import a small buffer test
const options = {
stores: {
keys: store,
config: store,
datastore: store,
// datastoreLegacy: needs https://github.com/ipfs/js-ipfs-repo/issues/6#issuecomment-164650642
logs: store,
locks: store,
version: store
}
}
const repo = new IPFSRepo('ipfs', options)
tests(repo)
})
Loading

0 comments on commit d1a9ddd

Please sign in to comment.