Skip to content
This repository has been archived by the owner on Apr 29, 2020. It is now read-only.

Commit

Permalink
chore: refactor to async/await (#17)
Browse files Browse the repository at this point in the history
BREAKING CHANGE: This module used to export a class that extended EventEmitter,
now it exports a function that returns an async iterable.

I also updated the deps to use the latest http api, though it's removed
the ability to add whole paths at once, along with some special logic
to handle symlinks.  The `Dicer` module that this module depends on
will still emit events for when it encounters symlinks so I left the
handlers in though am unsure if we actually use them.
  • Loading branch information
achingbrain authored and hugomrdias committed Aug 27, 2019
1 parent edc2f72 commit 55d926e
Show file tree
Hide file tree
Showing 8 changed files with 305 additions and 335 deletions.
35 changes: 17 additions & 18 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,31 +27,30 @@ npm install ipfs-multipart
## Usage
```javascript
const http = require('http')
const IPFSMultipart = require('ipfs-multipart')
const parser = require('ipfs-multipart')

http.createServer((req, res) => {
http.createServer(async (req, res) => {
if (req.method === 'POST' && req.headers['content-type']) {
const parser = IPFSMultipart.reqParser(req)

parser.on('file', (fileName, fileStream) => {
console.log(`file ${fileName} start`)
for await (const entry of parser(req)) {
if (entry.type === 'directory') {
console.log(`dir ${entry.name} start`)
}

fileStream.on('data', (data) => {
console.log(`file ${fileName} contents:`, data.toString())
})
if (entry.type === 'file') {
console.log(`file ${entry.name} start`)

fileStream.on('end', (data) => {
console.log(`file ${fileName} end`)
})
})
for await (const data of entry.content) {
console.log(`file ${entry.name} contents:`, data.toString())
}

parser.on('end', () => {
console.log('finished parsing')
res.writeHead(200)
res.end()
})
console.log(`file ${entry.name} end`)
}
}

return
console.log('finished parsing')
res.writeHead(200)
res.end()
}

res.writeHead(404)
Expand Down
38 changes: 15 additions & 23 deletions example.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,23 @@
/* eslint-disable no-console */

const http = require('http')
const IPFSMultipart = require('.')
const multipart = require('ipfs-multipart')

http.createServer((req, res) => {
http.createServer(async (req, res) => {
if (req.method === 'POST' && req.headers['content-type']) {
const parser = IPFSMultipart.reqParser(req)

parser.on('file', (fileName, fileStream) => {
console.log(`file ${fileName} start`)

fileStream.on('data', (data) => {
console.log(`file ${fileName} contents:`, data.toString())
})

fileStream.on('end', (data) => {
console.log(`file ${fileName} end`)
})
})

parser.on('end', () => {
console.log('finished parsing')
res.writeHead(200)
res.end()
})

return
for await (const part of multipart(req)) {
console.log(`file ${part.name} start`)

if (part.type === 'file') {
for await (const chunk of part.content) {
console.log(`file ${part.name} contents:`, chunk.toString())
}
}
}

console.log('finished parsing')
res.writeHead(200)
res.end()
}

res.writeHead(404)
Expand Down
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,12 @@
},
"dependencies": {
"@hapi/content": "^4.1.0",
"dicer": "~0.3.0"
"it-multipart": "~0.0.2"
},
"devDependencies": {
"aegir": "^20.0.0",
"chai": "^4.2.0",
"ipfs-api": "github:ipfs/js-ipfs-api#1fd9749",
"ipfs-http-client": "^33.1.1",
"request": "^2.88.0"
},
"engines": {
Expand Down
27 changes: 12 additions & 15 deletions src/index.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
'use strict'

const content = require('@hapi/content')
const Parser = require('./parser')
const parser = require('./parser')

module.exports = {
Parser,
/**
* Request Parser
*
* @param {Object} req - Request
* @returns {Parser}
*/
reqParser: (req) => {
const boundary = content.type(req.headers['content-type']).boundary
const parser = new Parser({ boundary: boundary })
req.pipe(parser)
return parser
}
/**
* Request Parser
*
* @param {Object} req - Request
* @param {Object} options - Options passed to stream constructors
* @returns {Object} an async iterable
*/
module.exports = (req, options = {}) => {
options.boundary = content.type(req.headers['content-type']).boundary

return parser(req.payload || req, options)
}
103 changes: 50 additions & 53 deletions src/parser.js
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
'use strict'

const Dicer = require('dicer')
const Content = require('@hapi/content')
const stream = require('stream')
const util = require('util')
const Transform = stream.Transform
const multipart = require('it-multipart')

const multipartFormdataType = 'multipart/form-data'
const applicationDirectory = 'application/x-directory'
Expand All @@ -25,79 +22,79 @@ const parseDisposition = (disposition) => {
}

const parseHeader = (header) => {
const type = Content.type(header['content-type'][0])
const disposition = parseDisposition(header['content-disposition'][0])
const type = Content.type(header['content-type'])
const disposition = parseDisposition(header['content-disposition'])

const details = type
details.name = disposition.name
details.name = decodeURIComponent(disposition.name)
details.type = disposition.type

return details
}

/**
* Parser
*
* @constructor
* @param {Object} options
* @returns {Parser}
*/
function Parser (options) {
// allow use without new
if (!(this instanceof Parser)) {
return new Parser(options)
}

this.dicer = new Dicer({ boundary: options.boundary })

this.dicer.on('part', (part) => this.handlePart(part))

this.dicer.on('error', (err) => this.emit('err', err))
const collect = async (stream) => {
const buffers = []
let size = 0

this.dicer.on('finish', () => {
this.emit('finish')
this.emit('end')
})
for await (const buf of stream) {
size += buf.length
buffers.push(buf)
}

Transform.call(this, options)
return Buffer.concat(buffers, size)
}
util.inherits(Parser, Transform)

Parser.prototype._transform = function (chunk, enc, cb) {
this.dicer.write(chunk, enc)
cb()
}
const ignore = async (stream) => {
for await (const _ of stream) { // eslint-disable-line no-unused-vars

Parser.prototype._flush = function (cb) {
this.dicer.end()
cb()
}
}

Parser.prototype.handlePart = function (part) {
part.on('header', (header) => {
const partHeader = parseHeader(header)
async function * parser (stream, options) {
for await (const part of multipart(stream, options.boundary)) {
const partHeader = parseHeader(part.headers)

if (isDirectory(partHeader.mime)) {
part.on('data', () => false)
this.emit('directory', partHeader.name)
return
yield {
type: 'directory',
name: partHeader.name
}

await ignore(part.body)

continue
}

if (partHeader.mime === applicationSymlink) {
part.on('data', (target) => this.emit('symlink', partHeader.name, target.toString()))
return
const target = await collect(part.body)

yield {
type: 'symlink',
name: partHeader.name,
target: target.toString('utf8')
}

continue
}

if (partHeader.boundary) {
// recursively parse nested multiparts
const parser = new Parser({ boundary: partHeader.boundary })
parser.on('file', (file) => this.emit('file', file))
part.pipe(parser)
return
for await (const entry of parser(part, {
...options,
boundary: partHeader.boundary
})) {
yield entry
}

continue
}

this.emit('file', partHeader.name, part)
})
yield {
type: 'file',
name: partHeader.name,
content: part.body
}
}
}

module.exports = Parser
module.exports = parser
14 changes: 0 additions & 14 deletions test/node.js

This file was deleted.

Loading

0 comments on commit 55d926e

Please sign in to comment.