Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit 55d926e

Browse files
achingbrainhugomrdias
authored andcommittedAug 27, 2019
chore: refactor to async/await (#17)
BREAKING CHANGE: This module used to export a class that extended EventEmitter, now it exports a function that returns an async iterable. I also updated the deps to use the latest http api, though it's removed the ability to add whole paths at once, along with some special logic to handle symlinks. The `Dicer` module that this module depends on will still emit events for when it encounters symlinks so I left the handlers in though am unsure if we actually use them.
1 parent edc2f72 commit 55d926e

File tree

8 files changed

+305
-335
lines changed

8 files changed

+305
-335
lines changed
 

‎README.md

+17-18
Original file line numberDiff line numberDiff line change
@@ -27,31 +27,30 @@ npm install ipfs-multipart
2727
## Usage
2828
```javascript
2929
const http = require('http')
30-
const IPFSMultipart = require('ipfs-multipart')
30+
const parser = require('ipfs-multipart')
3131

32-
http.createServer((req, res) => {
32+
http.createServer(async (req, res) => {
3333
if (req.method === 'POST' && req.headers['content-type']) {
34-
const parser = IPFSMultipart.reqParser(req)
3534

36-
parser.on('file', (fileName, fileStream) => {
37-
console.log(`file ${fileName} start`)
35+
for await (const entry of parser(req)) {
36+
if (entry.type === 'directory') {
37+
console.log(`dir ${entry.name} start`)
38+
}
3839

39-
fileStream.on('data', (data) => {
40-
console.log(`file ${fileName} contents:`, data.toString())
41-
})
40+
if (entry.type === 'file') {
41+
console.log(`file ${entry.name} start`)
4242

43-
fileStream.on('end', (data) => {
44-
console.log(`file ${fileName} end`)
45-
})
46-
})
43+
for await (const data of entry.content) {
44+
console.log(`file ${entry.name} contents:`, data.toString())
45+
}
4746

48-
parser.on('end', () => {
49-
console.log('finished parsing')
50-
res.writeHead(200)
51-
res.end()
52-
})
47+
console.log(`file ${entry.name} end`)
48+
}
49+
}
5350

54-
return
51+
console.log('finished parsing')
52+
res.writeHead(200)
53+
res.end()
5554
}
5655

5756
res.writeHead(404)

‎example.js

+15-23
Original file line numberDiff line numberDiff line change
@@ -3,31 +3,23 @@
33
/* eslint-disable no-console */
44

55
const http = require('http')
6-
const IPFSMultipart = require('.')
6+
const multipart = require('ipfs-multipart')
77

8-
http.createServer((req, res) => {
8+
http.createServer(async (req, res) => {
99
if (req.method === 'POST' && req.headers['content-type']) {
10-
const parser = IPFSMultipart.reqParser(req)
11-
12-
parser.on('file', (fileName, fileStream) => {
13-
console.log(`file ${fileName} start`)
14-
15-
fileStream.on('data', (data) => {
16-
console.log(`file ${fileName} contents:`, data.toString())
17-
})
18-
19-
fileStream.on('end', (data) => {
20-
console.log(`file ${fileName} end`)
21-
})
22-
})
23-
24-
parser.on('end', () => {
25-
console.log('finished parsing')
26-
res.writeHead(200)
27-
res.end()
28-
})
29-
30-
return
10+
for await (const part of multipart(req)) {
11+
console.log(`file ${part.name} start`)
12+
13+
if (part.type === 'file') {
14+
for await (const chunk of part.content) {
15+
console.log(`file ${part.name} contents:`, chunk.toString())
16+
}
17+
}
18+
}
19+
20+
console.log('finished parsing')
21+
res.writeHead(200)
22+
res.end()
3123
}
3224

3325
res.writeHead(404)

‎package.json

+2-2
Original file line numberDiff line numberDiff line change
@@ -27,12 +27,12 @@
2727
},
2828
"dependencies": {
2929
"@hapi/content": "^4.1.0",
30-
"dicer": "~0.3.0"
30+
"it-multipart": "~0.0.2"
3131
},
3232
"devDependencies": {
3333
"aegir": "^20.0.0",
3434
"chai": "^4.2.0",
35-
"ipfs-api": "github:ipfs/js-ipfs-api#1fd9749",
35+
"ipfs-http-client": "^33.1.1",
3636
"request": "^2.88.0"
3737
},
3838
"engines": {

‎src/index.js

+12-15
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,17 @@
11
'use strict'
22

33
const content = require('@hapi/content')
4-
const Parser = require('./parser')
4+
const parser = require('./parser')
55

6-
module.exports = {
7-
Parser,
8-
/**
9-
* Request Parser
10-
*
11-
* @param {Object} req - Request
12-
* @returns {Parser}
13-
*/
14-
reqParser: (req) => {
15-
const boundary = content.type(req.headers['content-type']).boundary
16-
const parser = new Parser({ boundary: boundary })
17-
req.pipe(parser)
18-
return parser
19-
}
6+
/**
7+
* Request Parser
8+
*
9+
* @param {Object} req - Request
10+
* @param {Object} options - Options passed to stream constructors
11+
* @returns {Object} an async iterable
12+
*/
13+
module.exports = (req, options = {}) => {
14+
options.boundary = content.type(req.headers['content-type']).boundary
15+
16+
return parser(req.payload || req, options)
2017
}

‎src/parser.js

+50-53
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
11
'use strict'
22

3-
const Dicer = require('dicer')
43
const Content = require('@hapi/content')
5-
const stream = require('stream')
6-
const util = require('util')
7-
const Transform = stream.Transform
4+
const multipart = require('it-multipart')
85

96
const multipartFormdataType = 'multipart/form-data'
107
const applicationDirectory = 'application/x-directory'
@@ -25,79 +22,79 @@ const parseDisposition = (disposition) => {
2522
}
2623

2724
const parseHeader = (header) => {
28-
const type = Content.type(header['content-type'][0])
29-
const disposition = parseDisposition(header['content-disposition'][0])
25+
const type = Content.type(header['content-type'])
26+
const disposition = parseDisposition(header['content-disposition'])
3027

3128
const details = type
32-
details.name = disposition.name
29+
details.name = decodeURIComponent(disposition.name)
3330
details.type = disposition.type
3431

3532
return details
3633
}
3734

38-
/**
39-
* Parser
40-
*
41-
* @constructor
42-
* @param {Object} options
43-
* @returns {Parser}
44-
*/
45-
function Parser (options) {
46-
// allow use without new
47-
if (!(this instanceof Parser)) {
48-
return new Parser(options)
49-
}
50-
51-
this.dicer = new Dicer({ boundary: options.boundary })
52-
53-
this.dicer.on('part', (part) => this.handlePart(part))
54-
55-
this.dicer.on('error', (err) => this.emit('err', err))
35+
const collect = async (stream) => {
36+
const buffers = []
37+
let size = 0
5638

57-
this.dicer.on('finish', () => {
58-
this.emit('finish')
59-
this.emit('end')
60-
})
39+
for await (const buf of stream) {
40+
size += buf.length
41+
buffers.push(buf)
42+
}
6143

62-
Transform.call(this, options)
44+
return Buffer.concat(buffers, size)
6345
}
64-
util.inherits(Parser, Transform)
6546

66-
Parser.prototype._transform = function (chunk, enc, cb) {
67-
this.dicer.write(chunk, enc)
68-
cb()
69-
}
47+
const ignore = async (stream) => {
48+
for await (const _ of stream) { // eslint-disable-line no-unused-vars
7049

71-
Parser.prototype._flush = function (cb) {
72-
this.dicer.end()
73-
cb()
50+
}
7451
}
7552

76-
Parser.prototype.handlePart = function (part) {
77-
part.on('header', (header) => {
78-
const partHeader = parseHeader(header)
53+
async function * parser (stream, options) {
54+
for await (const part of multipart(stream, options.boundary)) {
55+
const partHeader = parseHeader(part.headers)
7956

8057
if (isDirectory(partHeader.mime)) {
81-
part.on('data', () => false)
82-
this.emit('directory', partHeader.name)
83-
return
58+
yield {
59+
type: 'directory',
60+
name: partHeader.name
61+
}
62+
63+
await ignore(part.body)
64+
65+
continue
8466
}
8567

8668
if (partHeader.mime === applicationSymlink) {
87-
part.on('data', (target) => this.emit('symlink', partHeader.name, target.toString()))
88-
return
69+
const target = await collect(part.body)
70+
71+
yield {
72+
type: 'symlink',
73+
name: partHeader.name,
74+
target: target.toString('utf8')
75+
}
76+
77+
continue
8978
}
9079

9180
if (partHeader.boundary) {
9281
// recursively parse nested multiparts
93-
const parser = new Parser({ boundary: partHeader.boundary })
94-
parser.on('file', (file) => this.emit('file', file))
95-
part.pipe(parser)
96-
return
82+
for await (const entry of parser(part, {
83+
...options,
84+
boundary: partHeader.boundary
85+
})) {
86+
yield entry
87+
}
88+
89+
continue
9790
}
9891

99-
this.emit('file', partHeader.name, part)
100-
})
92+
yield {
93+
type: 'file',
94+
name: partHeader.name,
95+
content: part.body
96+
}
97+
}
10198
}
10299

103-
module.exports = Parser
100+
module.exports = parser

‎test/node.js

-14
This file was deleted.

‎test/parser.js

-210
This file was deleted.

‎test/parser.spec.js

+209
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,209 @@
1+
'use strict'
2+
3+
/* eslint-env mocha */
4+
/* eslint-disable no-unused-expressions */
5+
6+
const expect = require('chai').expect
7+
const APIctl = require('ipfs-http-client')
8+
const http = require('http')
9+
const path = require('path')
10+
const fs = require('fs')
11+
const request = require('request')
12+
const parser = require('../src')
13+
const os = require('os')
14+
15+
const isWindows = os.platform() === 'win32'
16+
17+
const readDir = (path, prefix, output = []) => {
18+
const entries = fs.readdirSync(path)
19+
20+
entries.forEach(entry => {
21+
// resolves symlinks
22+
const entryPath = fs.realpathSync(`${path}/${entry}`)
23+
const type = fs.statSync(entryPath)
24+
25+
if (type.isDirectory()) {
26+
readDir(entryPath, `${prefix}/${entry}`, output)
27+
}
28+
29+
if (type.isFile()) {
30+
output.push({
31+
path: `${prefix}/${entry}`,
32+
content: fs.createReadStream(entryPath)
33+
})
34+
}
35+
})
36+
37+
output.push({
38+
path: prefix
39+
})
40+
41+
return output
42+
}
43+
44+
describe('parser', () => {
45+
const PORT = 6001
46+
47+
let ctl
48+
let handler = () => {}
49+
50+
before((done) => {
51+
http.createServer((req, res) => {
52+
if (req.method === 'POST' && req.headers['content-type']) {
53+
handler(req)
54+
.then(() => {
55+
res.writeHead(200)
56+
})
57+
.catch(() => {
58+
res.writeHead(500)
59+
})
60+
.then(() => {
61+
res.end()
62+
})
63+
64+
return
65+
}
66+
67+
res.writeHead(404)
68+
res.end()
69+
}).listen(PORT, () => {
70+
ctl = APIctl(`/ip4/127.0.0.1/tcp/${PORT}`)
71+
done()
72+
})
73+
})
74+
75+
describe('single file', () => {
76+
const filePath = path.resolve(__dirname, 'fixtures/config')
77+
const fileContent = fs.readFileSync(filePath, 'utf8')
78+
79+
before(() => {
80+
handler = async (req) => {
81+
expect(req.headers['content-type']).to.be.a('string')
82+
83+
const files = []
84+
85+
for await (const entry of parser(req)) {
86+
if (entry.type === 'file') {
87+
const file = { name: entry.name, content: '' }
88+
89+
for await (const data of entry.content) {
90+
file.content += data.toString()
91+
}
92+
93+
files.push(file)
94+
}
95+
}
96+
97+
expect(files.length).to.equal(1)
98+
expect(files[0].name).to.equal('config')
99+
expect(files[0].content).to.equal(fileContent)
100+
}
101+
})
102+
103+
it('parses ctl.config.replace correctly', async () => {
104+
await ctl.config.replace(filePath)
105+
})
106+
107+
it('parses regular multipart requests correctly', (done) => {
108+
const formData = {
109+
file: fs.createReadStream(filePath)
110+
}
111+
112+
request.post({ url: `http://localhost:${PORT}`, formData: formData }, (err) => done(err))
113+
})
114+
})
115+
116+
describe('directory', () => {
117+
const dirPath = path.resolve(__dirname, 'fixtures')
118+
119+
let files = []
120+
121+
before(() => {
122+
handler = async (req) => {
123+
expect(req.headers['content-type']).to.be.a('string')
124+
125+
for await (const entry of parser(req)) {
126+
if (entry.type === 'file') {
127+
const file = { name: entry.name, content: '' }
128+
129+
for await (const data of entry.content) {
130+
file.content += data.toString()
131+
}
132+
133+
files.push(file)
134+
}
135+
}
136+
}
137+
})
138+
139+
beforeEach(() => {
140+
files = []
141+
})
142+
143+
it('parses ctl.add correctly', async () => {
144+
const contents = readDir(dirPath, 'fixtures')
145+
146+
await ctl.add(contents, { recursive: true, followSymlinks: false })
147+
148+
if (isWindows) {
149+
return
150+
}
151+
152+
expect(files.length).to.equal(5)
153+
expect(files[0].name).to.equal('fixtures/config')
154+
expect(files[1].name).to.equal('fixtures/folderlink/deepfile')
155+
expect(files[2].name).to.equal('fixtures/link')
156+
expect(files[3].name).to.equal('fixtures/otherfile')
157+
expect(files[4].name).to.equal('fixtures/subfolder/deepfile')
158+
})
159+
})
160+
161+
describe('empty', () => {
162+
before(() => {
163+
handler = async (req) => {
164+
expect(req.headers['content-type']).to.be.a('string')
165+
166+
for await (const _ of parser(req)) { // eslint-disable-line no-unused-vars
167+
168+
}
169+
}
170+
})
171+
172+
it('does not block', (done) => {
173+
request.post({ url: `http://localhost:${PORT}` }, (err, httpResponse, body) => {
174+
expect(err).not.to.exist
175+
done()
176+
})
177+
})
178+
})
179+
180+
describe('buffer', () => {
181+
const files = []
182+
183+
before(() => {
184+
handler = async (req) => {
185+
expect(req.headers['content-type']).to.be.a('string')
186+
187+
for await (const entry of parser(req)) {
188+
if (entry.type === 'file') {
189+
const file = { name: entry.name, content: '' }
190+
191+
for await (const data of entry.content) {
192+
file.content += data.toString()
193+
}
194+
195+
files.push(file)
196+
}
197+
}
198+
}
199+
})
200+
201+
it('parses ctl.add buffer correctly', async () => {
202+
await ctl.add(Buffer.from('hello world'))
203+
204+
expect(files.length).to.equal(1)
205+
expect(files[0].name).to.equal('')
206+
expect(files[0].content).to.equal('hello world')
207+
})
208+
})
209+
})

0 commit comments

Comments
 (0)
This repository has been archived.