How to use the @azure/storage-blob.ServiceURL function in @azure/storage-blob

To help you get started, we’ve selected a few @azure/storage-blob examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github HomecareHomebase / azure-bake / ingredient / ingredient-storage / src / plugin.ts View on Github external
private async ConfigureDiagnosticSettings(params: any, util: any) {
        let accountName: string;
        let accountKey: string;

        //Get blob storage properties
        accountName = params["storageAccountName"].value;
        const storageUtils = new StorageUtils(this._ctx);
        accountKey = await storageUtils.get_primary_key(accountName, await util.resource_group())
        const credentials = new SharedKeyCredential(accountName, accountKey);
        const pipeline = StorageURL.newPipeline(credentials, {
            // Enable logger when debugging
            // logger: new ConsoleHttpPipelineLogger(HttpPipelineLogLevel.INFO)
        });
        const blobPrimaryURL = `https://${accountName}.blob.core.windows.net/`;
        var serviceURL = new ServiceURL(blobPrimaryURL, pipeline)
        const serviceProperties = await serviceURL.getProperties(Aborter.none);

        //Get Bake variables for diagnostic settings.  Default to "true" (enabled) and 10 days data retention.
        let blobDiagnosticHourlyMetricsEnabled: string = await util.variable("blobDiagnosticHourlyMetricsEnabled") || "true"
        let blobDiagnosticHourlyMetricsRetentionDays = await util.variable("blobDiagnosticHourlyMetricsRetentionDays") || 10
        let blobDiagnosticMinuteMetricsEnabled: string = await util.variable("blobDiagnosticMinuteMetricsEnabled") || "true"
        let blobDiagnosticMinuteMetricsRetentionDays = await util.variable("blobDiagnosticMinuteMetricsRetentionDays") || 10
        let blobDiagnosticLoggingEnabled: string = await util.variable("blobDiagnosticLoggingEnabled") || "true"
        let blobDiagnosticLoggingRetentionDays = await util.variable("blobDiagnosticLoggingRetentionDays") || 10

        //Workaround due to issues using boolean data type for Bake variables
        var boolBlobDiagnosticHourlyMetricsEnabled: boolean = (blobDiagnosticHourlyMetricsEnabled == "true") ? true : false;
        var boolBlobDiagnosticMinuteMetricsEnabled: boolean = (blobDiagnosticMinuteMetricsEnabled == "true") ? true : false;
        var boolBlobDiagnosticLoggingEnabled: boolean = (blobDiagnosticLoggingEnabled == "true") ? true : false;

        //Debug logging of Bake variables
github Azure / azure-iot-sdk-node / device / samples / upload_to_blob_v2.js View on Github external
// OUR CODE
  let blobInfo = await client.getBlobSharedAccessSignature(blobName);
  if (!blobInfo) {
    throw new errors.ArgumentError('Invalid upload parameters');
  }

  // STORAGE BLOB CODE
  const pipeline = StorageURL.newPipeline(new AnonymousCredential(), {
    retryOptions: { maxTries: 4 },
    telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
    keepAliveOptions: {
      enable: false
    }
  });

  const serviceURL = new ServiceURL(
    `https://${blobInfo.hostName}/${blobInfo.sasToken}`,
    pipeline
  );  

  // initialize the blockBlobURL to a new blob
  const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
  const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
  const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);

  // get file stats
  let fileStats = await getFileStats(localFilePath);

  // parallel uploading
  let uploadStatus = await uploadStreamToBlockBlob(
    Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
    fs.createReadStream(localFilePath),
github Azure / azure-iot-sdk-node / device / samples / upload_to_blob_advanced.js View on Github external
// Get the Shared Access Signature for the linked Azure Storage Blob from IoT Hub.
        // The IoT Hub needs to have a linked Storage Account for Upload To Blob.
        let blobInfo = await client.getBlobSharedAccessSignature(blobName);
        if (!blobInfo) {
            throw new Error('Invalid upload parameters');
        }

        // Create a new Pipeline
        const pipeline = StorageURL.newPipeline(new AnonymousCredential(), {
            retryOptions: { maxTries: 4 },
            telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
            keepAliveOptions: {
            enable: false
            }
        });
        const serviceURL = new ServiceURL(
            `https://${blobInfo.hostName}/${blobInfo.sasToken}`,
            pipeline
        );

        // Initialize the blockBlobURL to a new blob
        const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
        const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
        const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);

        // Parallel Uploading
        // We use a to() method to wrap the uploadStreamToBlockBlob so that
        // instead of a try catch we can have it return the err as a result of the operation,
        // similar to the older callback format.
        let err, uploadStatus;
        [err, uploadStatus] = await to(uploadStreamToBlockBlob(
            Aborter.timeout(30 * 60 * 1000), // 30mins
github Azure / ng-deploy-azure / src / builders / actions / deploy.ts View on Github external
const files = await getFiles(context, filesPath, projectRoot);
  if (files.length === 0) {
    throw new Error('Target did not produce any files, or the path is incorrect.');
  }

  const client = new StorageManagementClient(credentials, azureHostingConfig.azureHosting.subscription);
  const accountKey = await getAccountKey(
    azureHostingConfig.azureHosting.account,
    client,
    azureHostingConfig.azureHosting.resourceGroupName
  );

  const pipeline = ServiceURL.newPipeline(new SharedKeyCredential(azureHostingConfig.azureHosting.account, accountKey));

  const serviceURL = new ServiceURL(
    `https://${azureHostingConfig.azureHosting.account}.blob.core.windows.net`,
    pipeline
  );

  await uploadFilesToAzure(serviceURL, context, filesPath, files);

  const accountProps = await client.storageAccounts.getProperties(
    azureHostingConfig.azureHosting.resourceGroupName,
    azureHostingConfig.azureHosting.account
  );
  const endpoint = accountProps.primaryEndpoints && accountProps.primaryEndpoints.web;

  context.logger.info(chalk.green(`see your deployed site at ${endpoint}`));
  // TODO: log url for account at Azure portal
}
github nestjs / azure-storage / lib / azure-storage.service.ts View on Github external
`Error encountered: "AZURE_STORAGE_SAS_KEY" was not provided.`,
      );
    }

    const { buffer, mimetype } = file;

    if (!buffer) {
      throw new Error(
        `Error encountered: File is not a valid Buffer (missing buffer property)`,
      );
    }

    const url = this.getServiceUrl(perRequestOptions);
    const anonymousCredential = new Azure.AnonymousCredential();
    const pipeline = Azure.StorageURL.newPipeline(anonymousCredential);
    const serviceURL = new Azure.ServiceURL(
      // When using AnonymousCredential, following url should include a valid SAS
      url,
      pipeline,
    );

    // Create a container
    const containerURL = Azure.ContainerURL.fromServiceURL(
      serviceURL,
      perRequestOptions.containerName,
    );

    let doesContainerExists = false;
    try {
      doesContainerExists = await this._doesContainerExist(
        serviceURL,
        perRequestOptions.containerName,
github Azure-Samples / azure-storage-js-v10-quickstart / v10 / index.js View on Github external
async function execute() {

    const containerName = "demo";
    const blobName = "quickstart.txt";
    const content = "Hello Node SDK";
    const localFilePath = "../readme.md";

    const credentials = new SharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
    const pipeline = StorageURL.newPipeline(credentials);
    const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
    
    const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
    const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
    
    const aborter = Aborter.timeout(30 * ONE_MINUTE);

    await containerURL.create(aborter);
    console.log(`Container: "${containerName}" is created`);

    console.log("Containers:");
    await showContainerNames(aborter, serviceURL);

    await blockBlobURL.upload(aborter, content, content.length);
    console.log(`Blob "${blobName}" is uploaded`);
    
    await uploadLocalFile(aborter, containerURL, localFilePath);
github serverless / multicloud / azure / src / services / azureBlobStorage.ts View on Github external
public constructor() {
    const sharedKeyCredential = new SharedKeyCredential(
      process.env.azAccount,
      process.env.azAccountKey
    );
    const pipeline = StorageURL.newPipeline(sharedKeyCredential);

    this.service = new ServiceURL(
      `https://${process.env.azAccount}.blob.core.windows.net`,
      pipeline
    );
  }
github microsoft / VoTT / src / providers / storage / azureBlobStorage.ts View on Github external
private getServiceURL(): ServiceURL {
        const credential = this.getCredential();
        const pipeline = StorageURL.newPipeline(credential);
        const accountUrl = this.getAccountUrl();
        const serviceUrl = new ServiceURL(
            accountUrl,
            pipeline,
        );
        return serviceUrl;
    }
github Budibase / budibase / packages / datastores / datastores / azure-blob.js View on Github external
const initialise = opts => {

    const sharedKeyCredential = new SharedKeyCredential(
        opts.account, 
        opts.accountKey
    );

    const pipeline = StorageURL.newPipeline(sharedKeyCredential);

    const serviceURL = new ServiceURL(
        `https://${account}.blob.core.windows.net`,
        pipeline
    );

    const containerURL = ContainerURL.fromServiceURL(
      serviceURL, 
      opts.containerName
    );

    return ({
        containerURL
    });

};