Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
return new Promise(async(resolve, reject) => {
try {
// Inflate the compressed content.
const decompress = inflate(compressedCatalog);
const mappingsDocuments =
new Uint8Array(decompress).reduce((data, byte) => data + String.fromCharCode(byte), '');
const mInfo = this.processMappingsDocuments(mappingsDocuments);
// Reinitialize the model mappings.
if (mInfo && mInfo.exportMappings) {
const catalogMappingsName = MappingSerializer.deserializeAtlasMappingName(
DocumentManagementService.getMappingsInfo(mInfo.exportMappings.value));
// If the live UI mappings name does not match the UI mappings name extracted from the
// catalog file then use the mappings from the catalog file. Otherwise use the live
// UI file.
this.cfg.fileService.findMappingFiles('UI').toPromise()
.then( async(files: string[]) => {
await this.updateCatalog(compressedCatalog);
return (uint8array) => {
array.buffer = new ArrayBuffer(uint8array.length);
// copy uint8array to buffer
const view = new Uint8Array(array.buffer);
view.set(uint8array);
if (fetchGzip) {
if (array.dataType === 'string' || array.dataType === 'JSON') {
array.buffer = pako.inflate(new Uint8Array(array.buffer), { to: 'string' });
} else {
array.buffer = pako.inflate(new Uint8Array(array.buffer)).buffer;
}
}
if (array.ref.encode === 'JSON') {
array.values = JSON.parse(array.buffer);
} else {
if (Endian.ENDIANNESS !== array.ref.encode && Endian.ENDIANNESS) {
// Need to swap bytes
vtkDebugMacro(`Swap bytes of ${array.name}`);
Endian.swapBytes(array.buffer, DataTypeByteSize[array.dataType]);
}
array.values = new window[array.dataType](array.buffer);
}
if (array.values.length !== array.size) {
let remainingSize = blockDecompressedSize;
while (remainingSize > 0) {
stream.goTo(compressedBlocksIdx);
compressedBlocksIdx += 0x02;
let segmentSize = stream.readInt16();
if (segmentSize < 0) {
// Uncompressed segment.
segmentSize = -segmentSize;
decompressedSegments.push(stream.getBuffer().createTypedArray(Uint8Array, compressedBlocksIdx, segmentSize));
compressedBlocksIdx += segmentSize;
remainingSize -= segmentSize;
} else {
if (!usesLzo) {
// zlib
const compressedSegment = stream.getBuffer().createTypedArray(Uint8Array, compressedBlocksIdx, segmentSize);
const decompressedSegment = Pako.inflate(compressedSegment);
decompressedSegments.push(decompressedSegment);
compressedBlocksIdx += segmentSize;
remainingSize -= decompressedSegment.byteLength;
}
else {
// LZO1X
const compressedSegment = stream.getBuffer().subarray(compressedBlocksIdx, segmentSize);
const decompressedSegment = lzoDecompress(compressedSegment, 0x4000);
decompressedSegments.push(decompressedSegment.createTypedArray(Uint8Array));
compressedBlocksIdx += segmentSize;
remainingSize -= decompressedSegment.byteLength;
}
}
}
stream.goTo(offs);
}
restoreSession(ip, uuid, permalink) {
const { history } = this.props;
try {
permalink = decodeURIComponent(permalink);
let base64 = permalink.replace(/\$/g, '/');
let deflated = window.atob(base64);
let settings = JSON.parse(pako.inflate(deflated, { to: 'string' }));
BackendAPI.importObject(settings);
console.log("Restoring session"+ uuid +"...")
BackendAPI.queryLoomFiles(uuid, () => {
Object.keys(settings.features).map((page) => {
settings.features[page].map((f, i) => {
BackendAPI.updateFeature(i, f.type, f.feature, f.featureType, f.metadata ? f.metadata.description : null, page);
})
})
if (settings.page && settings.loom) {
let permalinkRedirect = (uuid) => {
history.replace('/' + [uuid, encodeURIComponent(settings.loom), encodeURIComponent(settings.page)].join('/'));
BackendAPI.forceUpdate();
}
if (!uuid) {
this.obtainNewUUID(ip, permalinkRedirect);
} else {
} else {
let compressionMask = typedArray[0];
if (compressionMask & COMPRESSION_BZIP2) {
console.warn(`File ${this.name}, compression type 'bzip2' not supported`);
return null;
}
if (compressionMask & COMPRESSION_IMPLODE) {
console.warn(`File ${this.name}, compression type 'implode' not supported`);
return null;
}
if (compressionMask & COMPRESSION_DEFLATE) {
try {
typedArray = inflate(typedArray.subarray(1));
} catch (e) {
console.warn(`File ${this.name}, failed to decompress with 'zlib': ${e}`);
return null;
}
}
if (compressionMask & COMPRESSION_HUFFMAN) {
console.warn(`File ${this.name}, compression type 'huffman' not supported`);
return null;
}
if (compressionMask & COMPRESSION_ADPCM_STEREO) {
console.warn(`File ${this.name}, compression type 'adpcm stereo' not supported`);
return null;
}
this.uncompressedData = maybeCompressedDataPromise.then(async (fileData: ArrayBuffer) => {
try {
const result = pako.inflate(new Uint8Array(fileData)).buffer
return result
} catch (e) {
return fileData
}
})
}
export function inflateBase64(encoded: string): string {
const bytes = Base64.atob(encoded);
return pako.inflate(bytes, { to: "string" });
}
reader.onloadend = () => {
if (reader.result) {
const buffer = reader.result
const inflatedData = pako.inflate(buffer)
const movieData = ProtoMovieEntity.decode(inflatedData)
res(new VideoEntity(movieData))
}
else {
rej("Load failed.")
}
}
reader.onerror = () => {
function inflateSync(data) {
return zlib.inflate(data);
}
if (params_map) {
predictor = params_map.Predictor;
columns = params_map.Columns;
if (params_map.Colors) {
if (params_map.Colors != 1) {
throw new Error('Unsupported predictor Colors value: ' + params_map.Colors);
}
}
if (params_map.BitsPerComponent) {
if (params_map.BitsPerComponent != 8) {
throw new Error('Unsupported predictor BitsPerComponent value: ' + params_map.BitsPerComponent);
}
}
}
var res = pako.inflate(content);
if (predictor == 1) {
return res;
}
assert(columns > 0, 'columns must be set for PNG predictors');
if ((predictor >= 10) && (predictor <= 15)) {
res = png_filter(res, columns);
} else {
throw new Error('Unsupported predictor ' + predictor);
}
return res;
}