How to use the @azure/ms-rest-js.generateUuid function in @azure/ms-rest-js

To help you get started, we’ve selected a few @azure/ms-rest-js examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github nestjs / azure-database / lib / table-storage / azure-table.mapper.ts View on Github external
  static createEntity(partialDto: Partial, rowKeyValue = generateUuid()) {
    // Note: make sure we are getting the metatadat from the DTO constructor
    // See: src/table-storage/azure-table.repository.ts
    const entityDescriptor = Reflect.getMetadata(AZURE_TABLE_ENTITY, partialDto.constructor) as PartitionRowKeyValues;

    for (const key in partialDto) {
      if (entityDescriptor[key]) {
        // update the value propery
        entityDescriptor[key]._ = partialDto[key];
      }
    }
    // make sure we have a unique RowKey
    entityDescriptor.RowKey._ = rowKeyValue;

    logger.debug(`Mapped Entity from DTO:`);
    logger.debug(`- PartitionKey=${entityDescriptor.PartitionKey._}`);
    logger.debug(`- RowKey=${rowKeyValue}`);
github Azure / azure-sdk-for-js / sdk / storage / storage-BatchRequest.ts View on Github external
constructor() {
    this.operationCount = 0;
    this.body = "";

    let tempGuid = generateUuid();

    // batch_{batchid}
    this.boundary = `batch_${tempGuid}`;
    // --batch_{batchid}
    // Content-Type: application/http
    // Content-Transfer-Encoding: binary
    this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`;
    // multipart/mixed; boundary=batch_{batchid}
    this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`;
    // --batch_{batchid}--
    this.batchRequestEnding = `--${this.boundary}--`;

    this.subRequests = new Map();
  }
github Azure / azure-sdk-for-js / sdk / storage / storage-highlevel.node.ts View on Github external
}

  if (size <= options.maxSingleShotSize) {
    return blockBlobURL.upload(aborter, () => streamFactory(0), size, options);
  }

  const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;
  if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {
    throw new RangeError(
      `The buffer's size is too big or the BlockSize is too small;` +
        `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`
    );
  }

  const blockList: string[] = [];
  const blockIDPrefix = generateUuid();
  let transferProgress: number = 0;

  const batch = new Batch(options.parallelism);
  for (let i = 0; i < numBlocks; i++) {
    batch.addOperation(
      async (): Promise => {
        const blockID = generateBlockID(blockIDPrefix, i);
        const start = options.blockSize! * i;
        const end = i === numBlocks - 1 ? size : start + options.blockSize!;
        const contentLength = end - start;
        blockList.push(blockID);
        await blockBlobURL.stageBlock(
          aborter,
          blockID,
          () => streamFactory(start, contentLength),
          contentLength,
github Azure / azure-sdk-for-js / sdk / storage / storage-policies / UniqueRequestIDPolicy.ts View on Github external
public async sendRequest(request: WebResource): Promise {
    if (!request.headers.contains(HeaderConstants.X_MS_CLIENT_REQUEST_ID)) {
      request.headers.set(HeaderConstants.X_MS_CLIENT_REQUEST_ID, generateUuid());
    }

    return this._nextPolicy.sendRequest(request);
  }
}
github Azure / azure-sdk-for-js / sdk / storage / storage-highlevel.node.ts View on Github external
aborter: Aborter,
  stream: Readable,
  blockBlobURL: BlockBlobURL,
  bufferSize: number,
  maxBuffers: number,
  options: IUploadStreamToBlockBlobOptions = {}
): Promise {
  if (!options.blobHTTPHeaders) {
    options.blobHTTPHeaders = {};
  }
  if (!options.accessConditions) {
    options.accessConditions = {};
  }

  let blockNum = 0;
  const blockIDPrefix = generateUuid();
  let transferProgress: number = 0;
  const blockList: string[] = [];

  const scheduler = new BufferScheduler(
    stream,
    bufferSize,
    maxBuffers,
    async (buffer: Buffer) => {
      const blockID = generateBlockID(blockIDPrefix, blockNum);
      blockList.push(blockID);
      blockNum++;

      await blockBlobURL.stageBlock(aborter, blockID, buffer, buffer.length, {
        leaseAccessConditions: options.accessConditions!.leaseAccessConditions
      });
github Azure / azure-sdk-for-js / sdk / storage / storage-highlevel.browser.ts View on Github external
}

  if (size <= options.maxSingleShotSize) {
    return blockBlobURL.upload(aborter, blobFactory(0, size), size, options);
  }

  const numBlocks: number = Math.floor((size - 1) / options.blockSize) + 1;
  if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) {
    throw new RangeError(
      `The buffer's size is too big or the BlockSize is too small;` +
        `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`
    );
  }

  const blockList: string[] = [];
  const blockIDPrefix = generateUuid();
  let transferProgress: number = 0;

  const batch = new Batch(options.parallelism);
  for (let i = 0; i < numBlocks; i++) {
    batch.addOperation(
      async (): Promise => {
        const blockID = generateBlockID(blockIDPrefix, i);
        const start = options.blockSize! * i;
        const end = i === numBlocks - 1 ? size : start + options.blockSize!;
        const contentLength = end - start;
        blockList.push(blockID);
        await blockBlobURL.stageBlock(
          aborter,
          blockID,
          blobFactory(start, contentLength),
          contentLength,