How to use the pako.inflate function in pako

To help you get started, we’ve selected a few pako examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github atlasmap / atlasmap / ui / src / app / lib / atlasmap-data-mapper / services / initialization.service.ts View on Github external
return new Promise(async(resolve, reject) => {
      try {

        // Inflate the compressed content.
        const decompress = inflate(compressedCatalog);
        const mappingsDocuments =
          new Uint8Array(decompress).reduce((data, byte) => data + String.fromCharCode(byte), '');
        const mInfo = this.processMappingsDocuments(mappingsDocuments);

        // Reinitialize the model mappings.
        if (mInfo && mInfo.exportMappings) {
          const catalogMappingsName = MappingSerializer.deserializeAtlasMappingName(
            DocumentManagementService.getMappingsInfo(mInfo.exportMappings.value));

            // If the live UI mappings name does not match the UI mappings name extracted from the
            // catalog file then use the mappings from the catalog file.  Otherwise use the live
            // UI file.
            this.cfg.fileService.findMappingFiles('UI').toPromise()
              .then( async(files: string[]) => {

              await this.updateCatalog(compressedCatalog);
github Kitware / vtk-js / Sources / IO / Core / DataAccessHelper / JSZipDataAccessHelper.js View on Github external
return (uint8array) => {
    array.buffer = new ArrayBuffer(uint8array.length);

    // copy uint8array to buffer
    const view = new Uint8Array(array.buffer);
    view.set(uint8array);

    if (fetchGzip) {
      if (array.dataType === 'string' || array.dataType === 'JSON') {
        array.buffer = pako.inflate(new Uint8Array(array.buffer), { to: 'string' });
      } else {
        array.buffer = pako.inflate(new Uint8Array(array.buffer)).buffer;
      }
    }

    if (array.ref.encode === 'JSON') {
      array.values = JSON.parse(array.buffer);
    } else {
      if (Endian.ENDIANNESS !== array.ref.encode && Endian.ENDIANNESS) {
        // Need to swap bytes
        vtkDebugMacro(`Swap bytes of ${array.name}`);
        Endian.swapBytes(array.buffer, DataTypeByteSize[array.dataType]);
      }

      array.values = new window[array.dataType](array.buffer);
    }

    if (array.values.length !== array.size) {
github magcius / noclip.website / src / metroid_prime / mrea.ts View on Github external
let remainingSize = blockDecompressedSize;
            while (remainingSize > 0) {
                stream.goTo(compressedBlocksIdx);
                compressedBlocksIdx += 0x02;
                let segmentSize = stream.readInt16();
                if (segmentSize < 0) {
                    // Uncompressed segment.
                    segmentSize = -segmentSize;
                    decompressedSegments.push(stream.getBuffer().createTypedArray(Uint8Array, compressedBlocksIdx, segmentSize));
                    compressedBlocksIdx += segmentSize;
                    remainingSize -= segmentSize;
                } else {
                    if (!usesLzo) {
                        // zlib
                        const compressedSegment = stream.getBuffer().createTypedArray(Uint8Array, compressedBlocksIdx, segmentSize);
                        const decompressedSegment = Pako.inflate(compressedSegment);
                        decompressedSegments.push(decompressedSegment);
                        compressedBlocksIdx += segmentSize;
                        remainingSize -= decompressedSegment.byteLength;
                    }
                    else {
                        // LZO1X
                        const compressedSegment = stream.getBuffer().subarray(compressedBlocksIdx, segmentSize);
                        const decompressedSegment = lzoDecompress(compressedSegment, 0x4000);
                        decompressedSegments.push(decompressedSegment.createTypedArray(Uint8Array));
                        compressedBlocksIdx += segmentSize;
                        remainingSize -= decompressedSegment.byteLength;
                    }
                }
            }
            stream.goTo(offs);
        }
github aertslab / SCope / src / components / App.jsx View on Github external
restoreSession(ip, uuid, permalink) {
		const { history } = this.props;
		try {
			permalink = decodeURIComponent(permalink);
			let base64 = permalink.replace(/\$/g, '/');
			let deflated = window.atob(base64);
			let settings = JSON.parse(pako.inflate(deflated, { to: 'string' }));
			BackendAPI.importObject(settings);
			console.log("Restoring session"+ uuid +"...")
			BackendAPI.queryLoomFiles(uuid, () => {
				Object.keys(settings.features).map((page) => {
					settings.features[page].map((f, i) => {
						BackendAPI.updateFeature(i, f.type, f.feature, f.featureType, f.metadata ? f.metadata.description : null, page);
					})
				})
				if (settings.page && settings.loom) {
					let permalinkRedirect = (uuid) => {
						history.replace('/' + [uuid, encodeURIComponent(settings.loom), encodeURIComponent(settings.page)].join('/'));
						BackendAPI.forceUpdate();
					}
					if (!uuid) {
						this.obtainNewUUID(ip, permalinkRedirect);
					} else {
github flowtsohg / mdx-m3-viewer / src / parsers / mpq / file.js View on Github external
} else {
            let compressionMask = typedArray[0];

            if (compressionMask & COMPRESSION_BZIP2) {
                console.warn(`File ${this.name}, compression type 'bzip2' not supported`);
                return null;
            }

            if (compressionMask & COMPRESSION_IMPLODE) {
                console.warn(`File ${this.name}, compression type 'implode' not supported`);
                return null;
            }

            if (compressionMask & COMPRESSION_DEFLATE) {
                try {
                    typedArray = inflate(typedArray.subarray(1));
                } catch (e) {
                    console.warn(`File ${this.name}, failed to decompress with 'zlib': ${e}`);
                    return null;
                }
            }

            if (compressionMask & COMPRESSION_HUFFMAN) {
                console.warn(`File ${this.name}, compression type 'huffman' not supported`);
                return null;
            }

            if (compressionMask & COMPRESSION_ADPCM_STEREO) {
                console.warn(`File ${this.name}, compression type 'adpcm stereo' not supported`);
                return null;
            }
github jlfwong / speedscope / src / import / utils.ts View on Github external
this.uncompressedData = maybeCompressedDataPromise.then(async (fileData: ArrayBuffer) => {
      try {
        const result = pako.inflate(new Uint8Array(fileData)).buffer
        return result
      } catch (e) {
        return fileData
      }
    })
  }
github quicktype / quicktype / src / quicktype-core / support / Support.ts View on Github external
export function inflateBase64(encoded: string): string {
    const bytes = Base64.atob(encoded);
    return pako.inflate(bytes, { to: "string" });
}
github PonyCui / SVGAPlayer-SVG / src / parser.ts View on Github external
reader.onloadend = () => {
                if (reader.result) {
                    const buffer = reader.result
                    const inflatedData = pako.inflate(buffer)
                    const movieData = ProtoMovieEntity.decode(inflatedData)
                    res(new VideoEntity(movieData))
                }
                else {
                    rej("Load failed.")
                }
            }
            reader.onerror = () => {
github vivaxy / course / png / decode / index.js View on Github external
function inflateSync(data) {
    return zlib.inflate(data);
  }
github phihag / pdfform.js / minipdf.js View on Github external
if (params_map) {
		predictor = params_map.Predictor;
		columns = params_map.Columns;
		if (params_map.Colors) {
			if (params_map.Colors != 1) {
				throw new Error('Unsupported predictor Colors value: ' + params_map.Colors);
			}
		}
		if (params_map.BitsPerComponent) {
			if (params_map.BitsPerComponent != 8) {
				throw new Error('Unsupported predictor BitsPerComponent value: ' + params_map.BitsPerComponent);
			}
		}
	}

	var res = pako.inflate(content);
	if (predictor == 1) {
		return res;
	}

	assert(columns > 0, 'columns must be set for PNG predictors');

	if ((predictor >= 10) && (predictor <= 15)) {
		res = png_filter(res, columns);
	} else {
		throw new Error('Unsupported predictor ' + predictor);
	}
	return res;
}