How to use the azure-storage.createQueueService function in azure-storage

To help you get started, we’ve selected a few azure-storage examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github DFEAGILEDEVOPS / MTC / load-test / bin / init-dev-storage.js View on Github external
require('dotenv').config()
const azure = require('azure-storage')
const bluebird = require('bluebird')
const names = require('../../deploy/storage/tables-queues.json')

if (!process.env.AZURE_STORAGE_CONNECTION_STRING) {
  process.exitCode = -1
  console.error('env var $AZURE_STORAGE_CONNECTION_STRING is required')
}

const queueNames = names['queues']
const tableNames = names['tables']
const poisonQueues = queueNames.map(q => q + '-poison')
const allQueues = queueNames.concat(poisonQueues)
const tableService = getPromisifiedService(azure.createTableService())
const queueService = getPromisifiedService(azure.createQueueService())

async function deleteTableEntities (tables) {
  const deletions = []
  for (let index = 0; index < tables.length; index++) {
    const table = tables[index]
    const query = new azure.TableQuery() // Azure Table Storage has a max of 1000 records returned
    let done = false
    let batch = 1
    while (!done) {
      const data = await tableService.queryEntitiesAsync(table, query, null)
      const entities = data.result.entries
      if (entities.length === 0) {
        done = true
      }
      console.log(`Found ${entities.length} entities to delete in batch ${batch++} from ${table}`)
      entities.forEach(entity => {
github DFEAGILEDEVOPS / MTC / admin / bin / init-dev-storage.js View on Github external
require('dotenv').config()
const azure = require('azure-storage')
const bluebird = require('bluebird')
const names = require('../../deploy/storage/tables-queues.json')

if (!process.env.AZURE_STORAGE_CONNECTION_STRING) {
  process.exitCode = -1
  console.error('env var $AZURE_STORAGE_CONNECTION_STRING is required')
}

const queueNames = names.queues
const tableNames = names.tables
const poisonQueues = queueNames.map(q => q + '-poison')
const allQueues = queueNames.concat(poisonQueues)
const tableService = getPromisifiedService(azure.createTableService())
const queueService = getPromisifiedService(azure.createQueueService())

async function deleteTableEntities (tables) {
  const deletions = []
  for (let index = 0; index < tables.length; index++) {
    const table = tables[index]
    const query = new azure.TableQuery() // Azure Table Storage has a max of 1000 records returned
    let done = false
    let batch = 1
    while (!done) {
      const data = await tableService.queryEntitiesAsync(table, query, null)
      const entities = data.result.entries
      if (entities.length === 0) {
        done = true
      }
      console.log(`Found ${entities.length} entities to delete in batch ${batch++} from ${table}`)
      entities.forEach(entity => {
github DFEAGILEDEVOPS / MTC / _spikes-poc / sas / server / generate-sas-token.js View on Github external
'use strict'

require('dotenv').config()
const azure = require('azure-storage')

const storageConnection = process.env.AZURE_STORAGE_CONNECTION_STRING
if (!storageConnection) {
  process.exitCode = -1
  console.error('env var $AZURE_STORAGE_CONNECTION_STRING is required')
}

const queueName = process.env.QUEUE_NAME || 'completed-checks'
const permissions = azure.QueueUtilities.SharedAccessPermissions.ADD

var queueService = azure.createQueueService(storageConnection)
// setCors(queueService)

const generateSasToken = () => {
  // Create a SAS token that expires in an hour
  // Set start time to five minutes ago to avoid clock skew.
  var startDate = new Date()
  startDate.setMinutes(startDate.getMinutes() - 5)
  var expiryDate = new Date(startDate)
  expiryDate.setMinutes(startDate.getMinutes() + 60)

  var sharedAccessPolicy = {
    AccessPolicy: {
      Permissions: permissions,
      Start: startDate,
      Expiry: expiryDate
    }
github teamdigitale / io-functions / lib / queue_monitor.ts View on Github external
import * as winston from "winston";

import { Context } from "@azure/functions";
import { createQueueService } from "azure-storage";
import { TelemetryClient } from "io-functions-commons/dist/src/utils/application_insights";
import { getRequiredStringEnv } from "io-functions-commons/dist/src/utils/env";
import { configureAzureContextTransport } from "io-functions-commons/dist/src/utils/logging";
import { MESSAGE_QUEUE_NAME } from "./created_message_queue_handler";
import { EMAIL_NOTIFICATION_QUEUE_NAME } from "./emailnotifications_queue_handler";
import { getQueueMetadata } from "./utils/azure_queues";
import { WEBHOOK_NOTIFICATION_QUEUE_NAME } from "./webhook_queue_handler";

const queueConnectionString = getRequiredStringEnv("QueueStorageConnection");
const queueService = createQueueService(queueConnectionString);

// Whether we're in a production environment
const isProduction = process.env.NODE_ENV === "production";

const appInsightsClient = new TelemetryClient();

// needed otherwise AI will wait for the batching loop to end
// see https://github.com/Microsoft/ApplicationInsights-node.js/issues/390
// tslint:disable-next-line:no-object-mutation
appInsightsClient.config.maxBatchSize = 1;

/**
 * A function to store the length of Azure Storage Queues
 * into Application Insights Metrics.
 *
 * To query these values in the Analytics panel type:
github DFEAGILEDEVOPS / MTC / pupil-api / src / services / azure-queue.service.ts View on Github external
export function addMessage (queueName: string, payload: object, queueService?: any) {
  if (queueName.length < 1) {
    throw new Error('Missing queueName')
  }

  if (!queueService) {
    // If we have not been provided with a queueService, assume we want an Azure one.
    if (!azureQueueService) {
      azureQueueService = azure.createQueueService()
    }
    queueService = azureQueueService
  }

  const message = JSON.stringify(payload)
  const encodedMessage = Buffer.from(message).toString('base64')
  queueService.createMessage(queueName, encodedMessage, function (error, result, response) {
    if (error) {
      logger.error(`Error injecting message into queue [${queueName}]: ${error.message}`)
      logger.error(error)
    }
  })
}
github CatalystCode / project-fortis / project-fortis-services / src / clients / storage / AzureQueueManager.js View on Github external
function getAzureQueueService(){
  let queueSvc = azure.createQueueService();
  queueSvc.messageEncoder = new TextBase64QueueMessageEncoder();
  queueSvc.createQueueIfNotExists(PRE_NLP_QUEUE, (error, result, response) => { // eslint-disable-line no-unused-vars
    if (error) {
      RaiseException(`Unable to create new azure queue ${PRE_NLP_QUEUE}`);
    }
  });

  return queueSvc;
}
github DFEAGILEDEVOPS / MTC / functions / lib / azure-storage-helper.js View on Github external
getPromisifiedAzureQueueService: function getPromisifiedAzureQueueService () {
    if (azureQueueService) {
      return azureQueueService
    }
    azureQueueService = azureStorage.createQueueService()
    bluebird.promisifyAll(azureQueueService, {
      promisifier: (originalFunction) => function (...args) {
        return new Promise((resolve, reject) => {
          try {
            originalFunction.call(this, ...args, (error, result, response) => {
              if (error) {
                return reject(error)
              }
              resolve({ result, response })
            })
          } catch (error) {
            reject(error)
          }
        })
      }
    })
github cloudlibz / clocal-azure / example / azure-storage / storage-example.js View on Github external
const azure = require('azure-storage');
const accountKey = 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==';
const accountName = 'devstoreaccount1';

let devStoreCreds = azure.generateDevelopmentStorageCredentials();

const blobService = azure.createBlobService(devStoreCreds);
const tableService = azure.createTableService(devStoreCreds);
const queueService = azure.createQueueService(devStoreCreds);

/*
Use to debug the results

blobService.logger.level = azure.Logger.LogLevels.DEBUG
tableService.logger.level = azure.Logger.LogLevels.DEBUG
queueService.logger.level = azure.Logger.LogLevels.DEBUG
*/

const container = 'taskcontainer';
const task = 'taskblob';
const filename = 'data.txt';

let proxyBlob = {
    protocol: 'http:',
    host: '127.0.0.1',
github DFEAGILEDEVOPS / MTC / admin / services / azure-queue.service.js View on Github external
getPromisifiedAzureQueueService: function getPromisifiedAzureQueueService () {
    if (azureQueueServiceAsync) {
      return azureQueueServiceAsync
    }
    azureQueueServiceAsync = azure.createQueueService()
    bluebird.promisifyAll(azureQueueServiceAsync, {
      promisifier: (originalFunction) => function (...args) {
        return new Promise((resolve, reject) => {
          try {
            originalFunction.call(this, ...args, (error, result, response) => {
              if (error) {
                return reject(error)
              }
              resolve({ result, response })
            })
          } catch (error) {
            reject(error)
          }
        })
      }
    })
github microsoft / vscode / build / tfs / darwin / enqueue.ts View on Github external
function queueSigningRequest(quality: string, commit: string): Promise {
	const retryOperations = new azure.ExponentialRetryPolicyFilter();
	const queueSvc = azure
		.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
		.withFilter(retryOperations);

	queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();

	const message = `${quality}/${commit}`;

	return new Promise((c, e) => queueSvc.createMessage('sign-darwin', message, err => err ? e(err) : c()));
}