How to use the @azure/storage-blob.BlockBlobURL.fromBlobURL function in @azure/storage-blob

To help you get started, we’ve selected a few @azure/storage-blob examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github Azure / azure-iot-sdk-node / device / samples / upload_to_blob_v2.js View on Github external
retryOptions: { maxTries: 4 },
    telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
    keepAliveOptions: {
      enable: false
    }
  });

  const serviceURL = new ServiceURL(
    `https://${blobInfo.hostName}/${blobInfo.sasToken}`,
    pipeline
  );  

  // initialize the blockBlobURL to a new blob
  const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
  const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
  const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);

  // get file stats
  let fileStats = await getFileStats(localFilePath);

  // parallel uploading
  let uploadStatus = await uploadStreamToBlockBlob(
    Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
    fs.createReadStream(localFilePath),
    blockBlobURL,
    4 * 1024 * 1024, // 4MB block size
    20, // 20 concurrency
    {
      progress: ev => console.log(ev)
    }
    );
    console.log('uploadStreamToBlockBlob success');
github Azure / azure-iot-sdk-node / device / samples / upload_to_blob_advanced.js View on Github external
const pipeline = StorageURL.newPipeline(new AnonymousCredential(), {
            retryOptions: { maxTries: 4 },
            telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
            keepAliveOptions: {
            enable: false
            }
        });
        const serviceURL = new ServiceURL(
            `https://${blobInfo.hostName}/${blobInfo.sasToken}`,
            pipeline
        );

        // Initialize the blockBlobURL to a new blob
        const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
        const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
        const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);

        // Parallel Uploading
        // We use a to() method to wrap the uploadStreamToBlockBlob so that
        // instead of a try catch we can have it return the err as a result of the operation,
        // similar to the older callback format.
        let err, uploadStatus;
        [err, uploadStatus] = await to(uploadStreamToBlockBlob(
            Aborter.timeout(30 * 60 * 1000), // 30mins
            createReadStream(dummyFilePath),
            blockBlobURL,
            fileSize,
            20,
            {
                progress: ev => console.log(ev)
            }
        ));
github Azure / ng-deploy-azure / src / builders / actions / deploy.ts View on Github external
await promiseLimit(5).map(files, async function(file: string) {
    const blobURL = BlobURL.fromContainerURL(containerURL, file);
    const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);

    const blobContentType = lookup(file) || '';
    const blobContentEncoding = charset(blobContentType) || '';

    await uploadStreamToBlockBlob(
      Aborter.timeout(30 * 60 * 60 * 1000),
      fs.createReadStream(path.join(filesPath, file)),
      blockBlobURL,
      4 * 1024 * 1024,
      20,
      {
        blobHTTPHeaders: {
          blobContentType,
          blobContentEncoding
        }
      }
github Budibase / budibase / packages / datastores / datastores / azure-blob.js View on Github external
export const createFile = ({containerUrl}) => async (key, content) => {
    const blobURL = BlobURL.fromContainerURL(containerURL, key);
    const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
    await blockBlobURL.upload(
        Aborter.none,
        content,
        content.length
    );
};