Skip to content

Commit

Permalink
Implement a configurable memory usage limit for decompression
Browse files Browse the repository at this point in the history
  • Loading branch information
Zireael-N committed May 10, 2020
1 parent 64dad67 commit 6065c16
Show file tree
Hide file tree
Showing 10 changed files with 64 additions and 21 deletions.
2 changes: 2 additions & 0 deletions README.md
Expand Up @@ -64,6 +64,8 @@ const parser = require('node-weakauras-parser');
}());
```

Both `decode()` and `decodeSync()` accept an optional argument to configure the memory usage limit for decompression (in bytes). Default value is 8 MBs. You can pass `+Infinity` to disable it.

## Major changes

### v3
Expand Down
4 changes: 2 additions & 2 deletions lib/index.d.ts
@@ -1,4 +1,4 @@
export function decode<T = any>(str: string): Promise<T>;
export function decode<T = any>(str: string, max_decompressed_size?: number): Promise<T>;
export function encode(value: any): Promise<string>;
export function decodeSync<T = any>(str: string): T;
export function decodeSync<T = any>(str: string, max_decompressed_size?: number): T;
export function encodeSync(value: any): string;
8 changes: 4 additions & 4 deletions lib/index.js
@@ -1,9 +1,9 @@
const addon = require('../native');

module.exports = {
decode: function(str) {
decode: function(str, max_decompressed_size) {
return new Promise(function(resolve, reject) {
addon.decode(str, function(error, result) {
addon.decode(str, max_decompressed_size, function(error, result) {
if (error) {
return reject(error);
}
Expand Down Expand Up @@ -32,8 +32,8 @@ module.exports = {
return Promise.reject(error);
}
},
decodeSync: function(str) {
return JSON.parse(addon.decodeSync(str));
decodeSync: function(str, max_decompressed_size) {
return JSON.parse(addon.decodeSync(str, max_decompressed_size));
},
encodeSync: function(obj) {
return addon.encodeSync(JSON.stringify(obj));
Expand Down
2 changes: 1 addition & 1 deletion native/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion native/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "node-weakauras-parser"
version = "3.0.6"
version = "3.0.7"
authors = ["Velithris"]
edition = "2018"
license = "MIT"
Expand Down
11 changes: 7 additions & 4 deletions native/src/asynchronous.rs
Expand Up @@ -2,14 +2,14 @@ use neon::prelude::*;

use super::common;

struct DecodeTask(String);
struct DecodeTask(String, Option<usize>);
impl Task for DecodeTask {
type Output = String;
type Error = &'static str;
type JsEvent = JsString;

fn perform(&self) -> Result<Self::Output, Self::Error> {
common::decode_weakaura(&self.0)
common::decode_weakaura(&self.0, self.1)
}

fn complete(self, mut cx: TaskContext, result: Result<Self::Output, Self::Error>) -> JsResult<Self::JsEvent> {
Expand All @@ -36,9 +36,12 @@ impl Task for EncodeTask {

pub fn decode_weakaura(mut cx: FunctionContext) -> JsResult<JsUndefined> {
let src = cx.argument::<JsString>(0)?.value();
let cb = cx.argument::<JsFunction>(1)?;
let max_size = cx
.argument::<JsValue>(1)
.and_then(|v| common::transform_max_size(v, &mut cx))?;
let cb = cx.argument::<JsFunction>(2)?;

DecodeTask(src).schedule(cb);
DecodeTask(src, max_size).schedule(cb);

Ok(cx.undefined())
}
Expand Down
38 changes: 34 additions & 4 deletions native/src/common.rs
@@ -1,3 +1,5 @@
use neon::prelude::*;

use std::borrow::Cow;

use super::base64;
Expand All @@ -6,7 +8,24 @@ use super::huffman;
use super::deserialization::Deserializer;
use super::serialization::Serializer;

pub fn decode_weakaura(src: &str) -> Result<String, &'static str> {
pub fn transform_max_size<'a>(v: Handle<'a, JsValue>, cx: &'a mut FunctionContext) -> NeonResult<Option<usize>> {
if v.downcast::<JsUndefined>().is_ok() {
Ok(Some(8 * 1024 * 1024))
} else {
v.downcast_or_throw::<JsNumber, FunctionContext>(cx).and_then(|v| {
let v = v.value();
if v == f64::INFINITY {
Ok(None)
} else if v.is_finite() {
Ok(Some(v.trunc() as usize))
} else {
cx.throw_type_error("invalid value, expected a finite number or +Infinity")
}
})
}
}

pub fn decode_weakaura(src: &str, max_size: Option<usize>) -> Result<String, &'static str> {
let (weakaura, legacy) = if src.starts_with('!') {
(&src[1..], false)
} else {
Expand All @@ -15,19 +34,30 @@ pub fn decode_weakaura(src: &str) -> Result<String, &'static str> {

let decoded = base64::decode(weakaura)?;

let max_size = max_size.unwrap_or(usize::MAX);
let decompressed = if legacy {
huffman::decompress(&decoded)
huffman::decompress(&decoded, max_size)
} else {
use flate2::read::DeflateDecoder;
use std::io::prelude::*;

let mut result = Vec::new();
let mut inflater = DeflateDecoder::new(&decoded[..]);
let mut inflater = DeflateDecoder::new(&decoded[..]).take(max_size as u64);

inflater
.read_to_end(&mut result)
.map_err(|_| "decompression error")
.and_then(|_| {
if result.len() < max_size {
Ok(())
} else {
match inflater.into_inner().bytes().next() {
Some(_) => Err("compressed data is too large"),
None => Ok(()),
}
}
})
.map(|_| Cow::from(result))
.map_err(|_| "compression error")
}?;

Deserializer::from_str(&String::from_utf8_lossy(&decompressed))
Expand Down
10 changes: 7 additions & 3 deletions native/src/huffman/mod.rs
Expand Up @@ -13,7 +13,7 @@ use utils::{get_code, unescape_code};

const GENERIC_ERROR: &str = "decompression error";

pub(crate) fn decompress(bytes: &[u8]) -> Result<Cow<'_, [u8]>, &'static str> {
pub(crate) fn decompress(bytes: &[u8], max_size: usize) -> Result<Cow<'_, [u8]>, &'static str> {
let mut iter = bytes.iter();
match iter.next() {
Some(1) => return Ok(Cow::from(&bytes[1..])),
Expand All @@ -26,7 +26,7 @@ pub(crate) fn decompress(bytes: &[u8]) -> Result<Cow<'_, [u8]>, &'static str> {
return Err("insufficient data");
}

let num_symbols = iter.next().unwrap().checked_add(1).ok_or(GENERIC_ERROR)?;
let num_symbols = *iter.next().unwrap() as usize + 1;

let original_size = iter
.by_ref()
Expand All @@ -39,7 +39,11 @@ pub(crate) fn decompress(bytes: &[u8]) -> Result<Cow<'_, [u8]>, &'static str> {
return Err("insufficient data");
}

let mut codes = Vec::with_capacity(num_symbols as usize);
if original_size > max_size {
return Err("compressed data is too large");
}

let mut codes = Vec::with_capacity(num_symbols);
let mut result = Vec::with_capacity(original_size);

let mut bitfield = Bitfield::new();
Expand Down
6 changes: 5 additions & 1 deletion native/src/synchronous.rs
Expand Up @@ -4,8 +4,12 @@ use super::common;

pub fn decode_weakaura(mut cx: FunctionContext) -> JsResult<JsString> {
let src = cx.argument::<JsString>(0)?.value();
let max_size = match cx.argument_opt(1) {
Some(v) => common::transform_max_size(v, &mut cx),
None => Ok(Some(8 * 1024 * 1024)),
}?;

common::decode_weakaura(&src)
common::decode_weakaura(&src, max_size)
.map(|json| cx.string(json))
.or_else(|e| cx.throw_error(e))
}
Expand Down
2 changes: 1 addition & 1 deletion package.json
@@ -1,6 +1,6 @@
{
"name": "node-weakauras-parser",
"version": "3.0.6",
"version": "3.0.7",
"description": "Native module for Node.js that does deserialization/serialization of WeakAuras' strings",
"keywords": ["World of Warcraft", "WoW", "WeakAuras", "WA"],
"main": "lib/index.js",
Expand Down

0 comments on commit 6065c16

Please sign in to comment.