Skip to content

Commit 18e799d

Browse files
pdugasamcgoogan
andauthoredApr 14, 2024··
Removing binary dependency (#283)
* Removing binary dependency * Adding unit test for parseBuffer --------- Co-authored-by: amcgoogan <amcgoogan@gmail.com>
1 parent b94faa8 commit 18e799d

File tree

7 files changed

+252
-129
lines changed

7 files changed

+252
-129
lines changed
 

‎lib/Open/directory.js

+53-53
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
var binary = require('binary');
21
var PullStream = require('../PullStream');
32
var unzip = require('./unzip');
43
var Promise = require('bluebird');
@@ -8,6 +7,7 @@ var Buffer = require('../Buffer');
87
var path = require('path');
98
var Writer = require('fstream').Writer;
109
var parseDateTime = require('../parseDateTime');
10+
var parseBuffer = require('../parseBuffer');
1111

1212
var signature = Buffer.alloc(4);
1313
signature.writeUInt32LE(0x06054b50,0);
@@ -20,11 +20,11 @@ function getCrxHeader(source) {
2020
if (signature === 0x34327243) {
2121
var crxHeader;
2222
return sourceStream.pull(12).then(function(data) {
23-
crxHeader = binary.parse(data)
24-
.word32lu('version')
25-
.word32lu('pubKeyLength')
26-
.word32lu('signatureLength')
27-
.vars;
23+
crxHeader = parseBuffer.parse(data, [
24+
['version', 4],
25+
['pubKeyLength', 4],
26+
['signatureLength', 4],
27+
]);
2828
}).then(function() {
2929
return sourceStream.pull(crxHeader.pubKeyLength +crxHeader.signatureLength);
3030
}).then(function(data) {
@@ -39,12 +39,12 @@ function getCrxHeader(source) {
3939

4040
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
4141
function getZip64CentralDirectory(source, zip64CDL) {
42-
var d64loc = binary.parse(zip64CDL)
43-
.word32lu('signature')
44-
.word32lu('diskNumber')
45-
.word64lu('offsetToStartOfCentralDirectory')
46-
.word32lu('numberOfDisks')
47-
.vars;
42+
var d64loc = parseBuffer.parse(zip64CDL, [
43+
['signature', 4],
44+
['diskNumber', 4],
45+
['offsetToStartOfCentralDirectory', 8],
46+
['numberOfDisks', 4],
47+
]);
4848

4949
if (d64loc.signature != 0x07064b50) {
5050
throw new Error('invalid zip64 end of central dir locator signature (0x07064b50): 0x' + d64loc.signature.toString(16));
@@ -58,18 +58,18 @@ function getZip64CentralDirectory(source, zip64CDL) {
5858

5959
// Zip64 File Format Notes: https://pkware.cachefly.net/webdocs/casestudies/APPNOTE.TXT
6060
function parseZip64DirRecord (dir64record) {
61-
var vars = binary.parse(dir64record)
62-
.word32lu('signature')
63-
.word64lu('sizeOfCentralDirectory')
64-
.word16lu('version')
65-
.word16lu('versionsNeededToExtract')
66-
.word32lu('diskNumber')
67-
.word32lu('diskStart')
68-
.word64lu('numberOfRecordsOnDisk')
69-
.word64lu('numberOfRecords')
70-
.word64lu('sizeOfCentralDirectory')
71-
.word64lu('offsetToStartOfCentralDirectory')
72-
.vars;
61+
var vars = parseBuffer.parse(dir64record, [
62+
['signature', 4],
63+
['sizeOfCentralDirectory', 8],
64+
['version', 2],
65+
['versionsNeededToExtract', 2],
66+
['diskNumber', 4],
67+
['diskStart', 4],
68+
['numberOfRecordsOnDisk', 8],
69+
['numberOfRecords', 8],
70+
['sizeOfCentralDirectory', 8],
71+
['offsetToStartOfCentralDirectory', 8],
72+
]);
7373

7474
if (vars.signature != 0x06064b50) {
7575
throw new Error('invalid zip64 end of central dir locator signature (0x06064b50): 0x0' + vars.signature.toString(16));
@@ -107,16 +107,16 @@ module.exports = function centralDirectory(source, options) {
107107
var data = d.directory;
108108
startOffset = d.crxHeader && d.crxHeader.size || 0;
109109

110-
vars = binary.parse(data)
111-
.word32lu('signature')
112-
.word16lu('diskNumber')
113-
.word16lu('diskStart')
114-
.word16lu('numberOfRecordsOnDisk')
115-
.word16lu('numberOfRecords')
116-
.word32lu('sizeOfCentralDirectory')
117-
.word32lu('offsetToStartOfCentralDirectory')
118-
.word16lu('commentLength')
119-
.vars;
110+
vars = parseBuffer.parse(data, [
111+
['signature', 4],
112+
['diskNumber', 2],
113+
['diskStart', 2],
114+
['numberOfRecordsOnDisk', 2],
115+
['numberOfRecords', 2],
116+
['sizeOfCentralDirectory', 4],
117+
['offsetToStartOfCentralDirectory', 4],
118+
['commentLength', 2],
119+
]);
120120

121121
// Is this zip file using zip64 format? Use same check as Go:
122122
// https://github.com/golang/go/blob/master/src/archive/zip/reader.go#L503
@@ -179,25 +179,25 @@ module.exports = function centralDirectory(source, options) {
179179

180180
vars.files = Promise.mapSeries(Array(vars.numberOfRecords),function() {
181181
return records.pull(46).then(function(data) {
182-
var vars = binary.parse(data)
183-
.word32lu('signature')
184-
.word16lu('versionMadeBy')
185-
.word16lu('versionsNeededToExtract')
186-
.word16lu('flags')
187-
.word16lu('compressionMethod')
188-
.word16lu('lastModifiedTime')
189-
.word16lu('lastModifiedDate')
190-
.word32lu('crc32')
191-
.word32lu('compressedSize')
192-
.word32lu('uncompressedSize')
193-
.word16lu('fileNameLength')
194-
.word16lu('extraFieldLength')
195-
.word16lu('fileCommentLength')
196-
.word16lu('diskNumber')
197-
.word16lu('internalFileAttributes')
198-
.word32lu('externalFileAttributes')
199-
.word32lu('offsetToLocalFileHeader')
200-
.vars;
182+
var vars = vars = parseBuffer.parse(data, [
183+
['signature', 4],
184+
['versionMadeBy', 2],
185+
['versionsNeededToExtract', 2],
186+
['flags', 2],
187+
['compressionMethod', 2],
188+
['lastModifiedTime', 2],
189+
['lastModifiedDate', 2],
190+
['crc32', 4],
191+
['compressedSize', 4],
192+
['uncompressedSize', 4],
193+
['fileNameLength', 2],
194+
['extraFieldLength', 2],
195+
['fileCommentLength', 2],
196+
['diskNumber', 2],
197+
['internalFileAttributes', 2],
198+
['externalFileAttributes', 4],
199+
['offsetToLocalFileHeader', 4],
200+
]);
201201

202202
vars.offsetToLocalFileHeader += startOffset;
203203
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);

‎lib/Open/unzip.js

+14-14
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,11 @@ var Promise = require('bluebird');
22
var Decrypt = require('../Decrypt');
33
var PullStream = require('../PullStream');
44
var Stream = require('stream');
5-
var binary = require('binary');
65
var zlib = require('zlib');
76
var parseExtraField = require('../parseExtraField');
87
var Buffer = require('../Buffer');
98
var parseDateTime = require('../parseDateTime');
9+
var parseBuffer = require('../parseBuffer');
1010

1111
// Backwards compatibility for node versions < 8
1212
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
@@ -23,19 +23,19 @@ module.exports = function unzip(source,offset,_password, directoryVars) {
2323

2424
entry.vars = file.pull(30)
2525
.then(function(data) {
26-
var vars = binary.parse(data)
27-
.word32lu('signature')
28-
.word16lu('versionsNeededToExtract')
29-
.word16lu('flags')
30-
.word16lu('compressionMethod')
31-
.word16lu('lastModifiedTime')
32-
.word16lu('lastModifiedDate')
33-
.word32lu('crc32')
34-
.word32lu('compressedSize')
35-
.word32lu('uncompressedSize')
36-
.word16lu('fileNameLength')
37-
.word16lu('extraFieldLength')
38-
.vars;
26+
var vars = parseBuffer.parse(data, [
27+
['signature', 4],
28+
['versionsNeededToExtract', 2],
29+
['flags', 2],
30+
['compressionMethod', 2],
31+
['lastModifiedTime', 2],
32+
['lastModifiedDate', 2],
33+
['crc32', 4],
34+
['compressedSize', 4],
35+
['uncompressedSize', 4],
36+
['fileNameLength', 2],
37+
['extraFieldLength', 2],
38+
]);
3939

4040
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
4141

‎lib/parse.js

+51-52
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
var util = require('util');
22
var zlib = require('zlib');
33
var Stream = require('stream');
4-
var binary = require('binary');
54
var Promise = require('bluebird');
65
var PullStream = require('./PullStream');
76
var NoopStream = require('./NoopStream');
87
var BufferStream = require('./BufferStream');
98
var parseExtraField = require('./parseExtraField');
109
var Buffer = require('./Buffer');
1110
var parseDateTime = require('./parseDateTime');
11+
var parseBuffer = require('./parseBuffer');
1212

1313
// Backwards compatibility for node versions < 8
1414
if (!Stream.Writable || !Stream.Writable.prototype.destroy)
@@ -74,11 +74,11 @@ Parse.prototype._readRecord = function () {
7474
Parse.prototype._readCrxHeader = function() {
7575
var self = this;
7676
return self.pull(12).then(function(data) {
77-
self.crxHeader = binary.parse(data)
78-
.word32lu('version')
79-
.word32lu('pubKeyLength')
80-
.word32lu('signatureLength')
81-
.vars;
77+
self.crxHeader = parseBuffer.parse(data, [
78+
['version', 4],
79+
['pubKeyLength', 4],
80+
['signatureLength', 4],
81+
]);
8282
return self.pull(self.crxHeader.pubKeyLength + self.crxHeader.signatureLength);
8383
}).then(function(data) {
8484
self.crxHeader.publicKey = data.slice(0,self.crxHeader.pubKeyLength);
@@ -91,18 +91,18 @@ Parse.prototype._readCrxHeader = function() {
9191
Parse.prototype._readFile = function () {
9292
var self = this;
9393
return self.pull(26).then(function(data) {
94-
var vars = binary.parse(data)
95-
.word16lu('versionsNeededToExtract')
96-
.word16lu('flags')
97-
.word16lu('compressionMethod')
98-
.word16lu('lastModifiedTime')
99-
.word16lu('lastModifiedDate')
100-
.word32lu('crc32')
101-
.word32lu('compressedSize')
102-
.word32lu('uncompressedSize')
103-
.word16lu('fileNameLength')
104-
.word16lu('extraFieldLength')
105-
.vars;
94+
var vars = parseBuffer.parse(data, [
95+
['versionsNeededToExtract', 2],
96+
['flags', 2],
97+
['compressionMethod', 2],
98+
['lastModifiedTime', 2],
99+
['lastModifiedDate', 2],
100+
['crc32', 4],
101+
['compressedSize', 4],
102+
['uncompressedSize', 4],
103+
['fileNameLength', 2],
104+
['extraFieldLength', 2],
105+
]);
106106

107107
vars.lastModifiedDateTime = parseDateTime(vars.lastModifiedDate, vars.lastModifiedTime);
108108

@@ -205,12 +205,12 @@ Parse.prototype._readFile = function () {
205205
Parse.prototype._processDataDescriptor = function (entry) {
206206
var self = this;
207207
return self.pull(16).then(function(data) {
208-
var vars = binary.parse(data)
209-
.word32lu('dataDescriptorSignature')
210-
.word32lu('crc32')
211-
.word32lu('compressedSize')
212-
.word32lu('uncompressedSize')
213-
.vars;
208+
var vars = parseBuffer.parse(data, [
209+
['dataDescriptorSignature', 4],
210+
['crc32', 4],
211+
['compressedSize', 4],
212+
['uncompressedSize', 4],
213+
]);
214214

215215
entry.size = vars.uncompressedSize;
216216
return self._readRecord();
@@ -220,25 +220,24 @@ Parse.prototype._processDataDescriptor = function (entry) {
220220
Parse.prototype._readCentralDirectoryFileHeader = function () {
221221
var self = this;
222222
return self.pull(42).then(function(data) {
223-
224-
var vars = binary.parse(data)
225-
.word16lu('versionMadeBy')
226-
.word16lu('versionsNeededToExtract')
227-
.word16lu('flags')
228-
.word16lu('compressionMethod')
229-
.word16lu('lastModifiedTime')
230-
.word16lu('lastModifiedDate')
231-
.word32lu('crc32')
232-
.word32lu('compressedSize')
233-
.word32lu('uncompressedSize')
234-
.word16lu('fileNameLength')
235-
.word16lu('extraFieldLength')
236-
.word16lu('fileCommentLength')
237-
.word16lu('diskNumber')
238-
.word16lu('internalFileAttributes')
239-
.word32lu('externalFileAttributes')
240-
.word32lu('offsetToLocalFileHeader')
241-
.vars;
223+
var vars = parseBuffer.parse(data, [
224+
['versionMadeBy', 2],
225+
['versionsNeededToExtract', 2],
226+
['flags', 2],
227+
['compressionMethod', 2],
228+
['lastModifiedTime', 2],
229+
['lastModifiedDate', 2],
230+
['crc32', 4],
231+
['compressedSize', 4],
232+
['uncompressedSize', 4],
233+
['fileNameLength', 2],
234+
['extraFieldLength', 2],
235+
['fileCommentLength', 2],
236+
['diskNumber', 2],
237+
['internalFileAttributes', 2],
238+
['externalFileAttributes', 4],
239+
['offsetToLocalFileHeader', 4],
240+
]);
242241

243242
return self.pull(vars.fileNameLength).then(function(fileName) {
244243
vars.fileName = fileName.toString('utf8');
@@ -257,15 +256,15 @@ Parse.prototype._readEndOfCentralDirectoryRecord = function() {
257256
var self = this;
258257
return self.pull(18).then(function(data) {
259258

260-
var vars = binary.parse(data)
261-
.word16lu('diskNumber')
262-
.word16lu('diskStart')
263-
.word16lu('numberOfRecordsOnDisk')
264-
.word16lu('numberOfRecords')
265-
.word32lu('sizeOfCentralDirectory')
266-
.word32lu('offsetToStartOfCentralDirectory')
267-
.word16lu('commentLength')
268-
.vars;
259+
var vars = parseBuffer.parse(data, [
260+
['diskNumber', 2],
261+
['diskStart', 2],
262+
['numberOfRecordsOnDisk', 2],
263+
['numberOfRecords', 2],
264+
['sizeOfCentralDirectory', 4],
265+
['offsetToStartOfCentralDirectory', 4],
266+
['commentLength', 2],
267+
]);
269268

270269
return self.pull(vars.commentLength).then(function(comment) {
271270
comment = comment.toString('utf8');

‎lib/parseBuffer.js

+55
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
const parseUIntLE = function(buffer, offset, size) {
2+
var result;
3+
switch(size) {
4+
case 1:
5+
result = buffer.readUInt8(offset);
6+
break;
7+
case 2:
8+
result = buffer.readUInt16LE(offset);
9+
break;
10+
case 4:
11+
result = buffer.readUInt32LE(offset);
12+
break;
13+
case 8:
14+
result = Number(buffer.readBigUInt64LE(offset));
15+
break;
16+
default:
17+
throw new Error('Unsupported UInt LE size!');
18+
}
19+
return result;
20+
}
21+
22+
/**
23+
* Parses sequential unsigned little endian numbers from the head of the passed buffer according to
24+
* the specified format passed. If the buffer is not large enough to satisfy the full format,
25+
* null values will be assigned to the remaining keys.
26+
* @param {*} buffer The buffer to sequentially extract numbers from.
27+
* @param {*} format Expected format to follow when extrcting values from the buffer. A list of list entries
28+
* with the following structure:
29+
* [
30+
* [
31+
* <key>, // Name of the key to assign the extracted number to.
32+
* <size> // The size in bytes of the number to extract. possible values are 1, 2, 4, 8.
33+
* ],
34+
* ...
35+
* ]
36+
* @returns An object with keys set to their associated extracted values.
37+
*/
38+
const parse = function(buffer, format) {
39+
var result = {}
40+
var offset = 0;
41+
for(const [key, size] of format) {
42+
if(buffer.length >= offset + size) {
43+
result[key] = parseUIntLE(buffer, offset, size);
44+
}
45+
else {
46+
result[key] = null;
47+
}
48+
offset += size;
49+
}
50+
return result;
51+
}
52+
53+
module.exports = {
54+
parse
55+
}

‎lib/parseExtraField.js

+9-9
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
1-
var binary = require('binary');
1+
var parseBuffer = require('./parseBuffer');
22

33
module.exports = function(extraField, vars) {
44
var extra;
55
// Find the ZIP64 header, if present.
66
while(!extra && extraField && extraField.length) {
7-
var candidateExtra = binary.parse(extraField)
8-
.word16lu('signature')
9-
.word16lu('partsize')
10-
.word64lu('uncompressedSize')
11-
.word64lu('compressedSize')
12-
.word64lu('offset')
13-
.word64lu('disknum')
14-
.vars;
7+
var candidateExtra = parseBuffer.parse(extraField, [
8+
['signature', 2],
9+
['partsize', 2],
10+
['uncompressedSize', 8],
11+
['compressedSize', 8],
12+
['offset', 8],
13+
['disknum', 8],
14+
]);
1515

1616
if(candidateExtra.signature === 0x0001) {
1717
extra = candidateExtra;

‎package.json

-1
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,6 @@
2424
"license": "MIT",
2525
"dependencies": {
2626
"big-integer": "^1.6.17",
27-
"binary": "~0.3.0",
2827
"bluebird": "~3.4.1",
2928
"buffer-indexof-polyfill": "~1.0.0",
3029
"duplexer2": "~0.1.4",

‎test/parseBuffer.js

+70
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,70 @@
1+
'use strict';
2+
3+
var test = require('tap').test;
4+
var parseBuffer = require('../lib/parseBuffer');
5+
6+
const buf = Buffer.from([
7+
0x62,
8+
0x75,
9+
0x66,
10+
0x68,
11+
0x65,
12+
0x72,
13+
0xFF,
14+
0xAE,
15+
0x00,
16+
0x11,
17+
0x99,
18+
0xD7,
19+
0x7B,
20+
0x13,
21+
0x35
22+
]);
23+
24+
test(`parse little endian values for increasing byte size`, function (t) {
25+
const result = parseBuffer.parse(buf, [
26+
['key1', 1],
27+
['key2', 2],
28+
['key3', 4],
29+
['key4', 8],
30+
]);
31+
t.same(result, {
32+
key1: 98,
33+
key2: 26229,
34+
key3: 4285687144,
35+
key4: 3824536674483896300
36+
});
37+
t.end();
38+
})
39+
40+
test(`parse little endian values for decreasing byte size`, function (t) {
41+
const result = parseBuffer.parse(buf, [
42+
['key1', 8],
43+
['key2', 4],
44+
['key3', 2],
45+
['key4', 1],
46+
]);
47+
t.same(result, {
48+
key1: 12609923261529487000,
49+
key2: 3617132800,
50+
key3: 4987,
51+
key4: 53
52+
});
53+
t.end();
54+
})
55+
56+
test(`parse little endian values with null keys due to small buffer`, function (t) {
57+
const result = parseBuffer.parse(buf, [
58+
['key1', 8],
59+
['key2', 8],
60+
['key3', 8],
61+
['key4', 8],
62+
]);
63+
t.same(result, {
64+
key1: 12609923261529487000,
65+
key2: null,
66+
key3: null,
67+
key4: null
68+
});
69+
t.end();
70+
})

0 commit comments

Comments
 (0)
Please sign in to comment.