How to use the pako.Inflate function in pako

To help you get started, we’ve selected a few pako examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github neuroanatomy / BrainBox / view / atlasMaker / src / atlasmaker-interaction.js View on Github external
reader.onload = function (e) {
                var result = e.target.result;
                var nii;
                if(name.name.split('.').pop() === "gz") {
                    var inflate = new pako.Inflate();
                    inflate.push(new Uint8Array(result), true);
                    nii = inflate.result.buffer;
                } else { nii = result; }
                var mri = me.loadNifti(nii);

                if( mri.dim[0] !== me.User.dim[0] ||
                    mri.dim[1] !== me.User.dim[1] ||
                    mri.dim[2] !== me.User.dim[2]) {
                    console.log("ERROR: Volume dimensions do not match");

                    return;
                }

                // copy uploaded data to atlas data
                var i;
                for(i = 0; i
github SamyPesse / gitkit-js / src / models / PackFile.js View on Github external
function parseInflateContent(parser: Dissolve): Dissolve {
    const inflator = new Inflate();

    // Iterate while we found end of zip content
    return parser.loop(end => {
        parser.buffer('byte', 1).tap(() => {
            const byte = parser.vars.byte;

            const ab = new Uint8Array(1);
            ab.fill(byte[0]);

            inflator.push(ab);

            if (inflator.ended) {
                if (inflator.err) {
                    parser.emit('error', new Error(inflator.msg));
                }
github SamyPesse / gitkit-js / lib / TransferUtils / parsePack.js View on Github external
function parseInflateContent(parser) {
    var inflator = new pako.Inflate();

    // Iterate while we found end of zip content
    return parser.loop(function(end) {
        this.buffer('byte', 1)
        .tap(function() {
            var byte = this.vars.byte;

            var ab = new Uint8Array(1);
            ab.fill(byte[0]);

            inflator.push(ab);

            if (inflator.ended) {
                if (inflator.err) {
                    this.emit('error', new Error(inflator.msg));
                }
github magcius / noclip.website / src / BanjoKazooie / tools / extractor.ts View on Github external
function decompressPairedFiles(buffer: ArrayBufferSlice, ram: number): RAMRegion[] {
    const view = buffer.createDataView();
    const out: RAMRegion[] = [];

    assert(view.getUint16(0x00) === 0x1172, `bad bytes ${view.getUint32(0).toString(16)} from ${buffer.byteOffset.toString(16)}`);
    const decompressedCodeSize = view.getUint32(0x02);
    let srcOffs = 0x06;

    const inflator = new Pako.Inflate({ raw: true });
    inflator.push(buffer.createTypedArray(Uint8Array, srcOffs), true);
    out.push({ data: new ArrayBufferSlice((inflator.result as Uint8Array).buffer as ArrayBuffer), start: ram });

    const startPoint = srcOffs + ((inflator as any).strm.next_in as number); // read internal zlib stream state to find the next file
    const dataFile = decompress(buffer.slice(startPoint));
    out.push({ data: dataFile, start: ram + decompressedCodeSize }); // files are placed consecutively
    return out;
}
github shyras / osweb / src / js / osweb / util / files.js View on Github external
export async function decompress (zipfile, onProgress) {
  const fs = new FileStreamer(zipfile)
  const inflator = new pako.Inflate()
  let block

  while (!fs.isEndOfFile()) {
    block = await fs.readBlock()
    inflator.push(block.data, fs.isEndOfFile())
    if (inflator.err) {
      throw inflator.msg
    }
    if (isFunction(onProgress)) onProgress(block.progress)
  }
  return untar(inflator.result.buffer)
}
github es-git / es-git / ts / lib / inflate-stream.ts View on Github external
export default function inflateStream() {
  let inf = new Inflate();
  const b = new Uint8Array(1);
  const empty = new Binary(0);

  return {
    write: write,
    recycle: recycle,
    flush: Binary === Uint8Array ? flush : flushConvert
  };

  function write(byte : number) {
    b[0] = byte;
    inf.push(b);
    return !(inf as any).ended;
  }

  function recycle() { inf = new Inflate(); }
github es-git / es-git / packages / packfile / ts / parse-packfile.ts View on Github external
async function $body(state : HeaderState | OfsDeltaState | RefDeltaState) : Promise {
  const inf = new pako.Inflate();
  do {
    inf.push(await state.buffer.chunk());
  } while(inf.err === 0 && inf.result === undefined);
  state.buffer.rewind((inf as any).strm.avail_in);
  if(inf.err != 0) throw new Error(`Inflate error ${inf.err} ${inf.msg}`);
  const data = inf.result as Uint8Array;
  if (data.length !== state.size)
    throw new Error(`Length mismatch, expected ${state.size} got ${data.length}`);

  return {
    ...state,
    state: 'entry',
    entry: entry(state, data),
    entryIndex: state.entryIndex+1
  }
}
github thenickdude / chickenpaint / js / engine / CPChibiFile.js View on Github external
module.exports.load = function(source, options) {
    options = options || {};

	const
		STATE_WAIT_FOR_CHUNK = 0,
		
		STATE_DECODE_FILE_HEADER = 1,
		
		STATE_DECODE_LAYER = 2,
		STATE_DECODE_GROUP = 3,
		
		STATE_SUCCESS = 45,
		STATE_FATAL = 5;
	
	let
		inflator = new pako.Inflate({}),
		state = STATE_WAIT_FOR_CHUNK,
		
		/**
		 * Destination artwork
		 *
		 * @type {CPArtwork}
		 */
		artwork = null,
		
		/**
		 * Group we're currently loading layers into
		 *
		 * @type {CPLayerGroup}
		 */
		destGroup = null,
github es-git / es-git / ts / lib / inflate-stream.ts View on Github external
  function recycle() { inf = new Inflate(); }