Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
const fileClient = directoryClient.getFileClient(fileName);
const fileSize = fs.statSync(localFilePath).size;
// Parallel uploading with FileClient.uploadFile() in Node.js runtime
// FileClient.uploadFile() is only available in Node.js
await fileClient.uploadFile(localFilePath, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile success");
// Parallel uploading a Readable stream with FileClient.uploadStream() in Node.js runtime
// FileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
onProgress: (ev: any) => console.log(ev)
});
console.log("uploadStream success");
// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with FileClient.uploadBrowserData()
// Uncomment following code in browsers because FileClient.uploadBrowserData() is only available in browsers
/*
const browserFile = document.getElementById("fileinput").files[0];
await fileClient.uploadBrowserData(browserFile, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading an Azure file into Node.js buffer
// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with FileClient.uploadBrowserData()
// Uncomment following code in browsers because FileClient.uploadBrowserData() is only available in browsers
/*
const browserFile = document.getElementById("fileinput").files[0];
await fileClient.uploadBrowserData(browserFile, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading an Azure file into Node.js buffer
// FileClient.downloadToBuffer() is only available in Node.js
const buffer = Buffer.alloc(fileSize);
await fileClient.downloadToBuffer(buffer, undefined, undefined, {
abortSignal: AbortController.timeout(30 * 60 * 1000),
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("downloadToBuffer success");
// Delete share
await shareClient.delete();
console.log("deleted share");
}
const fileClient = directoryClient.getFileClient(fileName);
const fileSize = fs.statSync(localFilePath).size;
// Parallel uploading with FileClient.uploadFile() in Node.js runtime
// FileClient.uploadFile() is only available in Node.js
await fileClient.uploadFile(localFilePath, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile success");
// Parallel uploading a Readable stream with FileClient.uploadStream() in Node.js runtime
// FileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
onProgress: (ev) => console.log(ev)
});
console.log("uploadStream success");
// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with FileClient.uploadBrowserData()
// Uncomment following code in browsers because FileClient.uploadBrowserData() is only available in browsers
/*
const browserFile = document.getElementById("fileinput").files[0];
await fileClient.uploadBrowserData(browserFile, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading an Azure file into Node.js buffer
/*
const browserFile = document.getElementById("fileinput").files[0];
await blockBlobClient.uploadBrowserData(browserFile, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading a block blob into Node.js buffer
// downloadToBuffer is only available in Node.js
const fileSize = fs.statSync(localFilePath).size;
const buffer = Buffer.alloc(fileSize);
try {
await blockBlobClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("downloadToBuffer succeeds");
} catch (err) {
console.log(
`downloadToBuffer failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
}
// Archive the blob - Log the error codes
await blockBlobClient.setAccessTier("Archive");
try {
// Downloading an archived blockBlob fails
console.log("// Downloading an archived blockBlob fails...");
const fileClient = directoryClient.getFileClient(fileName);
const fileSize = fs.statSync(localFilePath).size;
// Parallel uploading with ShareFileClient.uploadFile() in Node.js runtime
// ShareFileClient.uploadFile() is only available in Node.js
await fileClient.uploadFile(localFilePath, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile success");
// Parallel uploading a Readable stream with ShareFileClient.uploadStream() in Node.js runtime
// ShareFileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
onProgress: (ev) => console.log(ev)
});
console.log("uploadStream success");
// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with ShareFileClient.uploadBrowserData()
// Uncomment following code in browsers because ShareFileClient.uploadBrowserData() is only available in browsers
/*
const browserFile = document.getElementById("fileinput").files[0];
await fileClient.uploadBrowserData(browserFile, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading an Azure file into Node.js buffer
/*
const browserFile = document.getElementById("fileinput").files[0];
await blockBlobClient.uploadBrowserData(browserFile, {
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading a block blob into Node.js buffer
// downloadToBuffer is only available in Node.js
const fileSize = fs.statSync(localFilePath).size;
const buffer = Buffer.alloc(fileSize);
try {
await blockBlobClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
blockSize: 4 * 1024 * 1024, // 4MB block size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("downloadToBuffer succeeds");
} catch (err) {
console.log(
`downloadToBuffer failed, requestId - ${err.details.requestId}, statusCode - ${err.statusCode}, errorCode - ${err.details.errorCode}`
);
}
// Archive the blob - Log the error codes
await blockBlobClient.setAccessTier("Archive");
try {
// Downloading an archived blockBlob fails
console.log("// Downloading an archived blockBlob fails...");
const fileClient = directoryClient.getFileClient(fileName);
const fileSize = fs.statSync(localFilePath).size;
// Parallel uploading with ShareFileClient.uploadFile() in Node.js runtime
// ShareFileClient.uploadFile() is only available in Node.js
await fileClient.uploadFile(localFilePath, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: (ev) => console.log(ev)
});
console.log("uploadFile success");
// Parallel uploading a Readable stream with ShareFileClient.uploadStream() in Node.js runtime
// ShareFileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
onProgress: (ev: any) => console.log(ev)
});
console.log("uploadStream success");
// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with ShareFileClient.uploadBrowserData()
// Uncomment following code in browsers because ShareFileClient.uploadBrowserData() is only available in browsers
/*
const browserFile = document.getElementById("fileinput").files[0];
await fileClient.uploadBrowserData(browserFile, {
rangeSize: 4 * 1024 * 1024, // 4MB range size
concurrency: 20, // 20 concurrency
onProgress: ev => console.log(ev)
});
*/
// Parallel downloading an Azure file into Node.js buffer
/**
* Linear retry. Retry time delay grows linearly.
*/
FIXED
}
// Default values of RetryOptions
const DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {
maxRetryDelayInMs: 120 * 1000,
maxTries: 4,
retryDelayInMs: 4 * 1000,
retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,
tryTimeoutInMs: undefined // Use server side default timeout strategy
};
const RETRY_ABORT_ERROR = new AbortError("The operation was aborted.");
/**
* Retry policy with exponential retry and linear retry implemented.
*
* @class RetryPolicy
* @extends {BaseRequestPolicy}
*/
export class StorageRetryPolicy extends BaseRequestPolicy {
/**
* RetryOptions.
*
* @private
* @type {RetryOptions}
* @memberof StorageRetryPolicy
*/
private readonly retryOptions: StorageRetryOptions;
* Linear retry. Retry time delay grows linearly.
*/
FIXED
}
// Default values of StorageRetryOptions
const DEFAULT_RETRY_OPTIONS: StorageRetryOptions = {
maxRetryDelayInMs: 120 * 1000,
maxTries: 4,
retryDelayInMs: 4 * 1000,
retryPolicyType: StorageRetryPolicyType.EXPONENTIAL,
secondaryHost: "",
tryTimeoutInMs: 30 * 1000 // https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-queue-service-operations
};
const RETRY_ABORT_ERROR = new AbortError("The operation was aborted.");
/**
* Retry policy with exponential retry and linear retry implemented.
*
* @class RetryPolicy
* @extends {BaseRequestPolicy}
*/
export class StorageRetryPolicy extends BaseRequestPolicy {
/**
* RetryOptions.
*
* @private
* @type {RetryOptions}
* @memberof StorageRetryPolicy
*/
private readonly retryOptions: StorageRetryOptions;
receive(onMessage: OnMessage, onError: OnError, abortSignal?: AbortSignalLike): ReceiveHandler {
this._throwIfReceiverOrConnectionClosed();
this._throwIfAlreadyReceiving();
const baseConsumer = this._baseConsumer!;
if (typeof onMessage !== "function") {
throw new TypeError("The parameter 'onMessage' must be of type 'function'.");
}
if (typeof onError !== "function") {
throw new TypeError("The parameter 'onError' must be of type 'function'.");
}
// return immediately if the abortSignal is already aborted.
if (abortSignal && abortSignal.aborted) {
onError(new AbortError("The receive operation has been cancelled by the user."));
// close this receiver when user triggers a cancellation.
this.close().catch(() => {}); // no-op close error handler
return new ReceiveHandler(baseConsumer);
}
const wrappedOnError = (error: Error) => {
// ignore retryable errors
if ((error as MessagingError).retryable) {
return;
}
logger.warning(
"[%s] Since the error is not retryable, we let the user know about it by calling the user's error handler.",
this._context.connectionId
);
logErrorStackTrace(error);