Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const serviceURL = new ServiceURL(
`https://${blobInfo.hostName}/${blobInfo.sasToken}`,
pipeline
);
// initialize the blockBlobURL to a new blob
const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
// get file stats
let fileStats = await getFileStats(localFilePath);
// parallel uploading
let uploadStatus = await uploadStreamToBlockBlob(
Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
fs.createReadStream(localFilePath),
blockBlobURL,
4 * 1024 * 1024, // 4MB block size
20, // 20 concurrency
{
progress: ev => console.log(ev)
}
);
console.log('uploadStreamToBlockBlob success');
// END STORAGE CODE
// notify IoT Hub of upload to blob status (success/faillure)
await client.notifyBlobUploadStatus(uploadStatus);
return 0;
}
const serviceURL = new ServiceURL(
`https://${blobInfo.hostName}/${blobInfo.sasToken}`,
pipeline
);
// Initialize the blockBlobURL to a new blob
const containerURL = ContainerURL.fromServiceURL(serviceURL, blobInfo.containerName);
const blobURL = BlobURL.fromContainerURL(containerURL, blobInfo.blobName);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
// Parallel Uploading
// We use a to() method to wrap the uploadStreamToBlockBlob so that
// instead of a try catch we can have it return the err as a result of the operation,
// similar to the older callback format.
let err, uploadStatus;
[err, uploadStatus] = await to(uploadStreamToBlockBlob(
Aborter.timeout(30 * 60 * 1000), // 30mins
createReadStream(dummyFilePath),
blockBlobURL,
fileSize,
20,
{
progress: ev => console.log(ev)
}
));
let isSuccess, statusCode, statusDescription;
if (err) {
console.error('notifyBlobUploadStatus failed');
isSuccess = false;
statusCode = err.response.headers.get("x-ms-error-code");
statusDescription = '';
await promiseLimit(5).map(files, async function(file: string) {
const blobURL = BlobURL.fromContainerURL(containerURL, file);
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
const blobContentType = lookup(file) || '';
const blobContentEncoding = charset(blobContentType) || '';
await uploadStreamToBlockBlob(
Aborter.timeout(30 * 60 * 60 * 1000),
fs.createReadStream(path.join(filesPath, file)),
blockBlobURL,
4 * 1024 * 1024,
20,
{
blobHTTPHeaders: {
blobContentType,
blobContentEncoding
}
}
);
bar.tick(1);
});
async upsertIndex(index: Index) {
try {
const currentIndex = await this.getIndex();
const updatedIndex = JSON.stringify({ ...currentIndex, ...index });
const indexStream = intoStream(updatedIndex);
await uploadStreamToBlockBlob(this.aborter, indexStream, this.indexBlobUrl, updatedIndex.length, 10);
} catch (error) {
throw new FailedIndexUpsert(error, index);
}
}
files.map(f =>
uploadStreamToBlockBlob(
this.aborter,
f.stream as Readable,
BlockBlobURL.fromContainerURL(this.container, `${basePath}/${f.name}`),
FOUR_MEGABYTES,
this.concurrentConnections
)
)
async function uploadStream(aborter, containerURL, filePath) {
filePath = path.resolve(filePath);
const fileName = path.basename(filePath).replace('.md', '-stream.md');
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, fileName);
const stream = fs.createReadStream(filePath, {
highWaterMark: FOUR_MEGABYTES,
});
const uploadOptions = {
bufferSize: FOUR_MEGABYTES,
maxBuffers: 5,
};
return await uploadStreamToBlockBlob(
aborter,
stream,
blockBlobURL,
uploadOptions.bufferSize,
uploadOptions.maxBuffers);
}