Skip to content
This repository has been archived by the owner on Feb 12, 2024. It is now read-only.

Files Quest #156

Closed
wants to merge 25 commits into from
Closed
Show file tree
Hide file tree
Changes from 9 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 5 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -63,11 +63,11 @@
"fs-blob-store": "^5.2.1",
"hapi": "^13.3.0",
"ipfs-api": "^3.0.1",
"ipfs-blocks": "^0.2.0",
"ipfs-data-importing": "^0.3.3",
"ipfs-blocks": "^0.2.3",
"ipfs-merkle-dag": "^0.4.0",
"ipfs-multipart": "^0.1.0",
"ipfs-repo": "^0.6.1",
"ipfs-repo": "^0.6.0",
"ipfs-unixfs-engine": "^0.5.0",
"joi": "^8.0.2",
"libp2p-ipfs": "^0.3.3",
"lodash.get": "^4.2.1",
Expand All @@ -76,6 +76,7 @@
"peer-id": "^0.6.6",
"peer-info": "^0.6.2",
"ronin": "^0.3.11",
"streamifier": "^0.1.1",
"temp": "^0.8.3"
},
"aegir": {
Expand Down Expand Up @@ -110,4 +111,4 @@
"kumavis <kumavis@users.noreply.github.com>",
"nginnever <ginneversource@gmail.com>"
]
}
}
76 changes: 68 additions & 8 deletions src/cli/commands/files/add.js
Original file line number Diff line number Diff line change
@@ -1,11 +1,16 @@
'use strict'

const Command = require('ronin').Command
const IPFS = require('../../../core')
const utils = require('../../utils')
const debug = require('debug')
const log = debug('cli:version')
log.error = debug('cli:version:error')
const bs58 = require('bs58')
const streamifier = require('streamifier')
const fs = require('fs')
const async = require('async')
const pathj = require('path')
const glob = require('glob')

module.exports = Command.extend({
desc: 'Add a file to IPFS using the UnixFS data format',
Expand All @@ -19,15 +24,70 @@ module.exports = Command.extend({
},

run: (recursive, path) => {
var node = new IPFS()
path = process.cwd() + '/' + path
node.files.add(path, {
recursive: recursive
}, (err, stats) => {
let s

if (!path) {
throw new Error('Error: Argument \'path\' is required')
}

s = fs.statSync(path)

if (s.isDirectory() && recursive === false) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you use backticks for these error messages, like this:

throw new Error(`Error: ${process.cwd()} is a directory, use the '-r' flag to specify directories`)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Also I think the Error: in the front is not needed, in go-ipfs that's just the log level

}
if (path === '.' && recursive === true) {
path = process.cwd()
s = fs.statSync(process.cwd())
} else if (path === '.' && recursive === false) {
s = fs.statSync(process.cwd())
if (s.isDirectory()) {
throw new Error('Error: ' + process.cwd() + ' is a directory, use the \'-r\' flag to specify directories')
}
}

glob(pathj.join(path, '/**/*'), (err, res) => {
if (err) {
return console.log(err)
throw err
}
console.log('added', bs58.encode(stats.Hash).toString(), stats.Name)
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
if (utils.isDaemonOn()) {
throw new Error('daemon running is not supported yet')
}
const i = ipfs.files.add()
i.on('data', (file) => {
console.log('added', bs58.encode(file.multihash).toString(), file.path)
})
if (res.length !== 0) {
const index = path.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
const r = streamifier.createReadStream(buffered)
const filePair = {path: addPath, stream: r}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw err
}
i.end()
return
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no need for return

})
} else {
const buffered = fs.readFileSync(path)
const r = streamifier.createReadStream(buffered)
const filePair = {path: path, stream: r}
i.write(filePair)
i.end()
}
})
})
}
})
49 changes: 49 additions & 0 deletions src/cli/commands/files/cat.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
}
utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
if (utils.isDaemonOn()) {
ipfs.cat(path, (err, res) => {
if (err) {
throw new Error(err)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No need for wrapping the err

}
res.on('data', (data) => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

you can just res.pipe(process.stdout)

process.stdout.write(data)
})
})
return
}

ipfs.files.cat(path, (err, res) => {
if (err) {
throw new Error(err)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No need for wrapping the err

}
if (res) {
res.on('file', (data) => {
data.stream.pipe(process.stdout)
})
}
})
})
}
})
70 changes: 70 additions & 0 deletions src/cli/commands/files/get.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
'use strict'

const Command = require('ronin').Command
const debug = require('debug')
const utils = require('../../utils')
const log = debug('cli:files')
log.error = debug('cli:files:error')
var fs = require('fs')

module.exports = Command.extend({
desc: 'Download IPFS objects',

options: {},

run: (path, options) => {
let dir
let filepath
let ws

if (!path) {
throw new Error("Argument 'path' is required")
}
if (!options) {
options = {}
dir = process.cwd()
} else {
if (options.slice(-1) !== '/') {
options += '/'
}
dir = options
}

utils.getIPFS((err, ipfs) => {
if (err) {
throw err
}
ipfs.files.get(path, (err, data) => {
if (err) {
throw new Error(err)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No need for wrapping the err

}
data.on('file', (data) => {
if (data.path.lastIndexOf('/') === -1) {
filepath = data.path
if (data.dir === false) {
ws = fs.createWriteStream(dir + data.path)
data.stream.pipe(ws)
} else {
try {
fs.mkdirSync(dir + filepath)
} catch (err) {
console.log(err)
}
}
} else {
filepath = data.path.substring(0, data.path.lastIndexOf('/') + 1)
try {
fs.mkdirSync(dir + filepath)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe path.join is safer?

} catch (err) {
}
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why can this err be ignored?

ws = fs.createWriteStream(dir + data.path)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe path.join is safer?

// data.stream.on('end', () => {
// console.log('finished writing file to disk')
// })
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why is this commented out?

data.stream.pipe(ws)
}
})
})
})
}
})
54 changes: 49 additions & 5 deletions src/core/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,12 @@ const DAGService = mDAG.DAGService
const peerId = require('peer-id')
const PeerInfo = require('peer-info')
const multiaddr = require('multiaddr')
const importer = require('ipfs-data-importing').import
const Importer = require('ipfs-unixfs-engine').importer
const Exporter = require('ipfs-unixfs-engine').exporter
const libp2p = require('libp2p-ipfs')
const init = require('./init')
const IPFSRepo = require('ipfs-repo')
const UnixFS = require('ipfs-unixfs')

exports = module.exports = IPFS

Expand Down Expand Up @@ -393,10 +395,52 @@ function IPFS (repo) {
}

this.files = {
add: (path, options, callback) => {
options.path = path
options.dagService = dagS
importer(options, callback)
add: (arr, callback) => {
if (typeof arr === 'function') {
callback = arr
arr = undefined
}
if (callback === undefined) {
callback = function noop () {}
}
if (arr === undefined) {
return new Importer(dagS)
}

const i = new Importer(dagS)
const res = []

i.on('data', (info) => {
res.push(info)
})

i.on('end', () => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Use once it's safer

callback(null, res)
})

arr.forEach((tuple) => {
i.write(tuple)
})

i.end()
},
cat: (hash, callback) => {
dagS.get(hash, (err, fetchedNode) => {
if (err) {
return callback(err, null)
}
const data = UnixFS.unmarshal(fetchedNode.data)
if (data.type === 'directory') {
callback('This dag node is a directory', null)
} else {
const exportEvent = Exporter(hash, dagS)
callback(null, exportEvent)
}
})
},
get: (hash, callback) => {
var exportFile = Exporter(hash, dagS)
callback(null, exportFile)
}
}
}
Expand Down
43 changes: 38 additions & 5 deletions src/core/init.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ const peerId = require('peer-id')
const IpfsBlocks = require('ipfs-blocks').BlockService
const IpfsDagService = require('ipfs-merkle-dag').DAGService
const path = require('path')
const glob = require('glob')
const async = require('async')
const streamifier = require('streamifier')
const fs = require('fs')

module.exports = (repo, opts, callback) => {
opts = opts || {}
Expand Down Expand Up @@ -63,17 +67,46 @@ module.exports = (repo, opts, callback) => {
return doneImport(null)
}

const importer = require('ipfs-data-importing')
const Importer = require('ipfs-unixfs-engine').importer
const blocks = new IpfsBlocks(repo)
const dag = new IpfsDagService(blocks)

const initDocsPath = path.join(__dirname, '../init-files/init-docs')

importer.import(initDocsPath, dag, {
recursive: true
}, doneImport)
const i = new Importer(dag)
i.on('data', (file) => {
})
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

why an empty listener?


glob(path.join(initDocsPath, '/**/*'), (err, res) => {
if (err) {
throw err
}
const index = __dirname.lastIndexOf('/')
async.eachLimit(res, 10, (element, callback) => {
const addPath = element.substring(index + 1, element.length)
if (fs.statSync(element).isDirectory()) {
callback()
} else {
const buffered = fs.readFileSync(element)
const r = streamifier.createReadStream(buffered)
const filePair = {path: addPath, stream: r}
i.write(filePair)
callback()
}
}, (err) => {
if (err) {
throw err
}
i.end()
return
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

no need for return

})
})

i.on('end', () => {
doneImport(null)
})

function doneImport (err, stat) {
function doneImport (err) {
if (err) { return callback(err) }

// All finished!
Expand Down
2 changes: 1 addition & 1 deletion test/cli-tests/test-commands.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ describe('commands', () => {
.run((err, stdout, exitcode) => {
expect(err).to.not.exist
expect(exitcode).to.equal(0)
expect(stdout.length).to.equal(45)
expect(stdout.length).to.equal(47)
done()
})
})
Expand Down
Loading