How to use @tanker/stream-base - 10 common examples

To help you get started, we’ve selected a few @tanker/stream-base examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github TankerHQ / sdk-js / packages / core / src / CloudStorage / CloudStorageManager.js View on Github external
const { head_url: headUrl, get_url: getUrl, service } = await this._client.send('get file download url', { // eslint-disable-line no-underscore-dangle
      resource_id: resourceId,
    });

    if (!streamCloudStorage[service])
      throw new InternalError(`unsupported cloud storage service: ${service}`);

    const { DownloadStream } = streamCloudStorage[service];

    const downloadChunkSize = 1024 * 1024;
    const downloader = new DownloadStream(resourceId, headUrl, getUrl, downloadChunkSize);

    const { metadata: encryptedMetadata, encryptedContentLength } = await downloader.getMetadata();
    const { encryptionFormat, clearContentLength, ...fileMetadata } = await this._decryptMetadata(encryptedMetadata);
    const combinedOutputOptions = extractOutputOptions({ type: defaultDownloadType, ...outputOptions, ...fileMetadata });
    const merger = new MergerStream(combinedOutputOptions);

    const decryptor = await this._dataProtector.makeDecryptorStream();

    // For compatibility with SDKs up to 2.2.1
    const clearSize = encryptionFormat
      ? getClearSize(encryptionFormat, encryptedContentLength)
      : clearContentLength;
    const progressHandler = new ProgressHandler(progressOptions).start(clearSize);
    decryptor.on('data', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    return pipeStreams({ streams: [downloader, decryptor, merger], resolveEvent: 'data' });
  }
}
github TankerHQ / sdk-js / packages / core / src / CloudStorage / CloudStorageManager.js View on Github external
const { UploadStream } = streamService;

    const slicer = new SlicerStream({ source: clearData });
    const uploader = new UploadStream(urls, headers, totalEncryptedSize, recommendedChunkSize, encryptedMetadata);

    const progressHandler = new ProgressHandler(progressOptions).start(totalEncryptedSize);
    uploader.on('uploaded', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    const streams = [slicer, encryptor];

    // Some version of Edge (e.g. version 18) fail to handle the 308 HTTP status used by
    // GCS in a non-standard way (no redirection expected) when uploading in chunks. So we
    // add a merger stream before the uploader to ensure there's a single upload request
    // returning the 200 HTTP status.
    if (service === 'GCS' && isEdge()) {
      const merger = new MergerStream({ type: Uint8Array });
      streams.push(merger);
    } else if (service === 'S3') {
      const resizer = new ResizerStream(recommendedChunkSize);
      streams.push(resizer);
    }

    streams.push(uploader);

    await pipeStreams({ streams, resolveEvent: 'finish' });

    return resourceId;
  }
github TankerHQ / sdk-js / packages / core / src / DataProtection / EncryptorStream.js View on Github external
_initializeStreams() {
    this._resizerStream = new ResizerStream(this._maxClearChunkSize);

    this._encryptorStream = new Transform({
      // buffering input bytes until clear chunk size is reached
      writableHighWaterMark: this._maxClearChunkSize,
      writableObjectMode: false,
      // buffering output bytes until encrypted chunk size is reached
      readableHighWaterMark: this._maxEncryptedChunkSize,
      readableObjectMode: false,

      transform: (clearData, encoding, done) => {
        try {
          const encryptedChunk = this._encryptChunk(clearData);
          this._encryptorStream.push(encryptedChunk);
        } catch (err) {
          return done(err);
        }
github TankerHQ / sdk-js / packages / core / src / CloudStorage / CloudStorageManager.js View on Github external
const uploader = new UploadStream(urls, headers, totalEncryptedSize, recommendedChunkSize, encryptedMetadata);

    const progressHandler = new ProgressHandler(progressOptions).start(totalEncryptedSize);
    uploader.on('uploaded', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    const streams = [slicer, encryptor];

    // Some version of Edge (e.g. version 18) fail to handle the 308 HTTP status used by
    // GCS in a non-standard way (no redirection expected) when uploading in chunks. So we
    // add a merger stream before the uploader to ensure there's a single upload request
    // returning the 200 HTTP status.
    if (service === 'GCS' && isEdge()) {
      const merger = new MergerStream({ type: Uint8Array });
      streams.push(merger);
    } else if (service === 'S3') {
      const resizer = new ResizerStream(recommendedChunkSize);
      streams.push(resizer);
    }

    streams.push(uploader);

    await pipeStreams({ streams, resolveEvent: 'finish' });

    return resourceId;
  }
github TankerHQ / sdk-js / packages / core / src / DataProtection / DecryptorStream.js View on Github external
async _initializeStreams(headOfEncryptedData: Uint8Array) {
    let encryptedChunkSize;
    let resourceId;

    try {
      ({ encryptedChunkSize, resourceId } = encryptionV4.unserialize(headOfEncryptedData));
    } catch (e) {
      throw new InvalidArgument('encryptedData', e, headOfEncryptedData);
    }

    const key = await this._mapper.findKey(resourceId);

    this._state.maxEncryptedChunkSize = encryptedChunkSize;
    this._resizerStream = new ResizerStream(encryptedChunkSize);

    const b64ResourceId = utils.toBase64(resourceId);

    this._decryptionStream = new Transform({
      // buffering input bytes until encrypted chunk size is reached
      writableHighWaterMark: encryptedChunkSize,
      writableObjectMode: false,
      // buffering output bytes until clear chunk size is reached
      readableHighWaterMark: encryptedChunkSize - encryptionV4.overhead,
      readableObjectMode: false,

      transform: (encryptedChunk, encoding, done) => {
        try {
          const clearData = encryptionV4.decrypt(key, this._state.index, encryptionV4.unserialize(encryptedChunk));
          this._decryptionStream.push(clearData);
        } catch (error) {
github TankerHQ / sdk-js / packages / core / src / CloudStorage / CloudStorageManager.js View on Github external
headers,
      service,
      recommended_chunk_size: recommendedChunkSize
    } = await this._client.send('get file upload url', {
      resource_id: resourceId,
      metadata: encryptedMetadata,
      upload_content_length: totalEncryptedSize,
    });

    if (!streamCloudStorage[service])
      throw new InternalError(`unsupported cloud storage service: ${service}`);

    const streamService = streamCloudStorage[service];
    const { UploadStream } = streamService;

    const slicer = new SlicerStream({ source: clearData });
    const uploader = new UploadStream(urls, headers, totalEncryptedSize, recommendedChunkSize, encryptedMetadata);

    const progressHandler = new ProgressHandler(progressOptions).start(totalEncryptedSize);
    uploader.on('uploaded', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    const streams = [slicer, encryptor];

    // Some version of Edge (e.g. version 18) fail to handle the 308 HTTP status used by
    // GCS in a non-standard way (no redirection expected) when uploading in chunks. So we
    // add a merger stream before the uploader to ensure there's a single upload request
    // returning the 200 HTTP status.
    if (service === 'GCS' && isEdge()) {
      const merger = new MergerStream({ type: Uint8Array });
      streams.push(merger);
    } else if (service === 'S3') {
      const resizer = new ResizerStream(recommendedChunkSize);
github TankerHQ / sdk-js / packages / core / src / DataProtection / EncryptorStream.js View on Github external
_initializeStreams() {
    this._resizerStream = new ResizerStream(this._maxClearChunkSize);

    this._encryptorStream = new Transform({
      // buffering input bytes until clear chunk size is reached
      writableHighWaterMark: this._maxClearChunkSize,
      writableObjectMode: false,
      // buffering output bytes until encrypted chunk size is reached
      readableHighWaterMark: this._maxEncryptedChunkSize,
      readableObjectMode: false,

      transform: (clearData, encoding, done) => {
        try {
          const encryptedChunk = this._encryptChunk(clearData);
          this._encryptorStream.push(encryptedChunk);
        } catch (err) {
          return done(err);
        }
        done();
      },
github TankerHQ / sdk-js / packages / core / src / DataProtection / DecryptorStream.js View on Github external
let resourceId;

    try {
      ({ encryptedChunkSize, resourceId } = encryptionV4.unserialize(headOfEncryptedData));
    } catch (e) {
      throw new InvalidArgument('encryptedData', e, headOfEncryptedData);
    }

    const key = await this._mapper.findKey(resourceId);

    this._state.maxEncryptedChunkSize = encryptedChunkSize;
    this._resizerStream = new ResizerStream(encryptedChunkSize);

    const b64ResourceId = utils.toBase64(resourceId);

    this._decryptionStream = new Transform({
      // buffering input bytes until encrypted chunk size is reached
      writableHighWaterMark: encryptedChunkSize,
      writableObjectMode: false,
      // buffering output bytes until clear chunk size is reached
      readableHighWaterMark: encryptedChunkSize - encryptionV4.overhead,
      readableObjectMode: false,

      transform: (encryptedChunk, encoding, done) => {
        try {
          const clearData = encryptionV4.decrypt(key, this._state.index, encryptionV4.unserialize(encryptedChunk));
          this._decryptionStream.push(clearData);
        } catch (error) {
          return done(new DecryptionFailed({ error, b64ResourceId }));
        }
        this._state.lastEncryptedChunkSize = encryptedChunk.length;
        this._state.index += 1; // safe as long as index < 2^53
github TankerHQ / sdk-js / packages / core / src / DataProtection / DataProtector.js View on Github external
async _streamDecryptData(encryptedData: Data, outputOptions: OutputOptions, progressOptions: ProgressOptions): Promise {
    const slicer = new SlicerStream({ source: encryptedData });
    const decryptor = await this.makeDecryptorStream();
    const merger = new MergerStream(outputOptions);

    const progressHandler = new ProgressHandler(progressOptions);

    decryptor.on('initialized', () => {
      const encryptedSize = getDataLength(encryptedData);
      const clearSize = decryptor.getClearSize(encryptedSize);
      progressHandler.start(clearSize);
    });

    decryptor.on('data', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    return new Promise((resolve, reject) => {
      [slicer, decryptor, merger].forEach(s => s.on('error', reject));
      slicer.pipe(decryptor).pipe(merger).on('data', resolve);
    });
  }
github TankerHQ / sdk-js / packages / core / src / DataProtection / DataProtector.js View on Github external
async _streamEncryptData(clearData: Data, sharingOptions: SharingOptions, outputOptions: OutputOptions, progressOptions: ProgressOptions, b64ResourceId?: b64string): Promise {
    const slicer = new SlicerStream({ source: clearData });
    const encryptor = await this.makeEncryptorStream(sharingOptions, b64ResourceId);

    const clearSize = getDataLength(clearData);
    const encryptedSize = encryptor.getEncryptedSize(clearSize);
    const progressHandler = new ProgressHandler(progressOptions).start(encryptedSize);
    encryptor.on('data', (chunk: Uint8Array) => progressHandler.report(chunk.byteLength));

    const merger = new MergerStream(outputOptions);

    return new Promise((resolve, reject) => {
      [slicer, encryptor, merger].forEach(s => s.on('error', reject));
      slicer.pipe(encryptor).pipe(merger).on('data', resolve);
    });
  }

@tanker/stream-base

Tanker Stream SDK

Apache-2.0
Latest version published 4 months ago

Package Health Score

68 / 100
Full package analysis

Similar packages