Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
private async ConfigureDiagnosticSettings(params: any, util: any) {
let accountName: string;
let accountKey: string;
//Get blob storage properties
accountName = params["storageAccountName"].value;
const storageUtils = new StorageUtils(this._ctx);
accountKey = await storageUtils.get_primary_key(accountName, await util.resource_group())
const credentials = new SharedKeyCredential(accountName, accountKey);
const pipeline = StorageURL.newPipeline(credentials, {
// Enable logger when debugging
// logger: new ConsoleHttpPipelineLogger(HttpPipelineLogLevel.INFO)
});
const blobPrimaryURL = `https://${accountName}.blob.core.windows.net/`;
var serviceURL = new ServiceURL(blobPrimaryURL, pipeline)
const serviceProperties = await serviceURL.getProperties(Aborter.none);
//Get Bake variables for diagnostic settings. Default to "true" (enabled) and 10 days data retention.
let blobDiagnosticHourlyMetricsEnabled: string = await util.variable("blobDiagnosticHourlyMetricsEnabled") || "true"
let blobDiagnosticHourlyMetricsRetentionDays = await util.variable("blobDiagnosticHourlyMetricsRetentionDays") || 10
let blobDiagnosticMinuteMetricsEnabled: string = await util.variable("blobDiagnosticMinuteMetricsEnabled") || "true"
let blobDiagnosticMinuteMetricsRetentionDays = await util.variable("blobDiagnosticMinuteMetricsRetentionDays") || 10
let blobDiagnosticLoggingEnabled: string = await util.variable("blobDiagnosticLoggingEnabled") || "true"
let blobDiagnosticLoggingRetentionDays = await util.variable("blobDiagnosticLoggingRetentionDays") || 10
//Workaround due to issues using boolean data type for Bake variables
var boolBlobDiagnosticHourlyMetricsEnabled: boolean = (blobDiagnosticHourlyMetricsEnabled == "true") ? true : false;
var boolBlobDiagnosticMinuteMetricsEnabled: boolean = (blobDiagnosticMinuteMetricsEnabled == "true") ? true : false;
var boolBlobDiagnosticLoggingEnabled: boolean = (blobDiagnosticLoggingEnabled == "true") ? true : false;
//Debug logging of Bake variables
// OUR CODE
let blobInfo = await client.getBlobSharedAccessSignature(blobName);
if (!blobInfo) {
throw new errors.ArgumentError('Invalid upload parameters');
}
// STORAGE BLOB CODE
const pipeline = StorageURL.newPipeline(new AnonymousCredential(), {
retryOptions: { maxTries: 4 },
telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
keepAliveOptions: {
enable: false
}
});
const serviceURL = new ServiceURL(
`https://${blobInfo.hostName}/${blobInfo.sasToken}`,
pipeline
);
// initialize the blockBlobURL to a new blob
const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
// get file stats
let fileStats = await getFileStats(localFilePath);
// parallel uploading
let uploadStatus = await uploadStreamToBlockBlob(
Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
fs.createReadStream(localFilePath),
// Get the Shared Access Signature for the linked Azure Storage Blob from IoT Hub.
// The IoT Hub needs to have a linked Storage Account for Upload To Blob.
let blobInfo = await client.getBlobSharedAccessSignature(blobName);
if (!blobInfo) {
throw new Error('Invalid upload parameters');
}
// Create a new Pipeline
const pipeline = StorageURL.newPipeline(new AnonymousCredential(), {
retryOptions: { maxTries: 4 },
telemetry: { value: 'HighLevelSample V1.0.0' }, // Customized telemetry string
keepAliveOptions: {
enable: false
}
});
const serviceURL = new ServiceURL(
`https://${blobInfo.hostName}/${blobInfo.sasToken}`,
pipeline
);
// Initialize the blockBlobURL to a new blob
const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
// Parallel Uploading
// We use a to() method to wrap the uploadStreamToBlockBlob so that
// instead of a try catch we can have it return the err as a result of the operation,
// similar to the older callback format.
let err, uploadStatus;
[err, uploadStatus] = await to(uploadStreamToBlockBlob(
Aborter.timeout(30 * 60 * 1000), // 30mins
const files = await getFiles(context, filesPath, projectRoot);
if (files.length === 0) {
throw new Error('Target did not produce any files, or the path is incorrect.');
}
const client = new StorageManagementClient(credentials, azureHostingConfig.azureHosting.subscription);
const accountKey = await getAccountKey(
azureHostingConfig.azureHosting.account,
client,
azureHostingConfig.azureHosting.resourceGroupName
);
const pipeline = ServiceURL.newPipeline(new SharedKeyCredential(azureHostingConfig.azureHosting.account, accountKey));
const serviceURL = new ServiceURL(
`https://${azureHostingConfig.azureHosting.account}.blob.core.windows.net`,
pipeline
);
await uploadFilesToAzure(serviceURL, context, filesPath, files);
const accountProps = await client.storageAccounts.getProperties(
azureHostingConfig.azureHosting.resourceGroupName,
azureHostingConfig.azureHosting.account
);
const endpoint = accountProps.primaryEndpoints && accountProps.primaryEndpoints.web;
context.logger.info(chalk.green(`see your deployed site at ${endpoint}`));
// TODO: log url for account at Azure portal
}
`Error encountered: "AZURE_STORAGE_SAS_KEY" was not provided.`,
);
}
const { buffer, mimetype } = file;
if (!buffer) {
throw new Error(
`Error encountered: File is not a valid Buffer (missing buffer property)`,
);
}
const url = this.getServiceUrl(perRequestOptions);
const anonymousCredential = new Azure.AnonymousCredential();
const pipeline = Azure.StorageURL.newPipeline(anonymousCredential);
const serviceURL = new Azure.ServiceURL(
// When using AnonymousCredential, following url should include a valid SAS
url,
pipeline,
);
// Create a container
const containerURL = Azure.ContainerURL.fromServiceURL(
serviceURL,
perRequestOptions.containerName,
);
let doesContainerExists = false;
try {
doesContainerExists = await this._doesContainerExist(
serviceURL,
perRequestOptions.containerName,
async function execute() {
const containerName = "demo";
const blobName = "quickstart.txt";
const content = "Hello Node SDK";
const localFilePath = "../readme.md";
const credentials = new SharedKeyCredential(STORAGE_ACCOUNT_NAME, ACCOUNT_ACCESS_KEY);
const pipeline = StorageURL.newPipeline(credentials);
const serviceURL = new ServiceURL(`https://${STORAGE_ACCOUNT_NAME}.blob.core.windows.net`, pipeline);
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, blobName);
const aborter = Aborter.timeout(30 * ONE_MINUTE);
await containerURL.create(aborter);
console.log(`Container: "${containerName}" is created`);
console.log("Containers:");
await showContainerNames(aborter, serviceURL);
await blockBlobURL.upload(aborter, content, content.length);
console.log(`Blob "${blobName}" is uploaded`);
await uploadLocalFile(aborter, containerURL, localFilePath);
public constructor() {
const sharedKeyCredential = new SharedKeyCredential(
process.env.azAccount,
process.env.azAccountKey
);
const pipeline = StorageURL.newPipeline(sharedKeyCredential);
this.service = new ServiceURL(
`https://${process.env.azAccount}.blob.core.windows.net`,
pipeline
);
}
private getServiceURL(): ServiceURL {
const credential = this.getCredential();
const pipeline = StorageURL.newPipeline(credential);
const accountUrl = this.getAccountUrl();
const serviceUrl = new ServiceURL(
accountUrl,
pipeline,
);
return serviceUrl;
}
const initialise = opts => {
const sharedKeyCredential = new SharedKeyCredential(
opts.account,
opts.accountKey
);
const pipeline = StorageURL.newPipeline(sharedKeyCredential);
const serviceURL = new ServiceURL(
`https://${account}.blob.core.windows.net`,
pipeline
);
const containerURL = ContainerURL.fromServiceURL(
serviceURL,
opts.containerName
);
return ({
containerURL
});
};