Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
fout.write(target)
# Prepare input pickle files
input_file_names = []
output_file_names = []
for i, job in enumerate(jobs):
filename = '{}/input-{}_{}.pickle'.format(path, suffix, i)
input_file_names.append(filename)
for j in range(n):
output_file_names.append('output-{}_{}.{}.pickle'.format(suffix, i, j + 1))
with open(filename, mode='wb') as fout:
pickle.dump(job, fout, protocol=2)
# Create the blob client, for use in obtaining references to
# blob storage containers and uploading files to containers.
blob_client = azureblob.BlockBlobService(
account_name=_STORAGE_ACCOUNT_NAME,
account_key=_STORAGE_ACCOUNT_KEY)
n_jobs = -(-(len(jobs) * n) // nproc) # ceil for int
_log.info('{} jobs will be created.'.format(n_jobs))
res = None
try:
# Use the blob client to create the containers in Azure Storage if they
# don't yet exist.
app_container_name = 'application-{}'.format(suffix)
input_container_name = 'input-{}'.format(suffix)
output_container_name = 'output-{}'.format(suffix)
# app_container_name = 'application'
# input_container_name = 'input'
# output_container_name = 'output'
def setUp(self):
super(StorageBlobEncryptionTest, self).setUp()
self.bbs = self._create_storage_service(BlockBlobService, self.settings)
self.pbs = self._create_storage_service(PageBlobService, self.settings)
self.service_dict = {'block_blob':self.bbs,
'page_blob':self.pbs}
self.container_name = self.get_resource_name('utcontainer')
self.bytes = b'Foo'
if not self.is_playback():
self.bbs.create_container(self.container_name)
self.bbs.MAX_BLOCK_SIZE = 4 * 1024
self.bbs.MAX_SINGLE_PUT_SIZE = 32 * 1024
self.pbs.MAX_PAGE_SIZE = 4 * 1024
try:
# for azure-storage-blob>=12:
from azure.storage.blob import BlobServiceClient
from azure.core.exceptions import AzureError
s = BlobServiceClient.from_connection_string(conn_string)
try:
s.delete_container(container)
except AzureError as ex:
# ignore the ContainerNotFound error:
if ex.error_code != 'ContainerNotFound':
raise
except ImportError:
# for azure-storage-blob<12
from azure.storage.blob import BlockBlobService
s = BlockBlobService(connection_string=conn_string)
s.delete_container(container)
url = LogDownloaderURL.format(ACCOUNT_NAME=conn_string_dict['AccountName'], ACCOUNT_KEY=conn_string_dict['AccountKey'].replace('+','%2b'), CONTAINER=container, START_DATE=start_date.strftime("%Y-%m-%d"), END_DATE=(end_date+datetime.timedelta(days=1)).strftime("%Y-%m-%d"))
r = requests.post(url)
open(output_fp, 'wb').write(r.content)
print(' Done!\n')
except Exception as e:
print('Error: {}'.format(e))
else: # using BlockBlobService python api for cooked logs
try:
print('Establishing Azure Storage BlockBlobService connection using ',end='')
if sas_token and account_name:
print('sas token...')
bbs = BlockBlobService(account_name=account_name, sas_token=sas_token)
else:
print('connection string...')
bbs = BlockBlobService(connection_string=conn_string)
# List all blobs and download them one by one
print('Getting blobs list...')
blobs = bbs.list_blobs(container)
except Exception as e:
if type(e.args[0]) == str and e.args[0].startswith('The specified container does not exist.'):
print("Error: The specified container ({}) does not exist.".format(container))
else:
print("Error:\nType: {}\nArgs: {}".format(type(e).__name__, e.args))
sys.exit()
print('Iterating through blobs...\n')
selected_fps = []
for blob in blobs:
if '/data/' not in blob.name:
if verbose:
print('{} - Skip: Non-data blob\n'.format(blob.name))
def get_conn(self):
"""Return the BlockBlobService object."""
conn = self.get_connection(self.conn_id)
service_options = conn.extra_dejson
return BlockBlobService(account_name=conn.login,
account_key=conn.password, **service_options)
def container_client(self):
return BlockBlobService(self.storage_account, self.storage_key)
def get_blob_sas_uri(self, container_sas_uri, blob_name, create_if_not_exists = False):
container_name = self.get_container_from_uri(container_sas_uri)
sas_service = BlockBlobService(
account_name=self.get_account_from_uri(container_sas_uri),
sas_token=self.get_sas_key_from_uri(container_sas_uri))
blob_uri = sas_service.make_blob_url(container_name, blob_name, sas_token=self.get_sas_key_from_uri(container_sas_uri))
return blob_uri
storageAccountId = batchaccount.auto_storage.storage_account_id
self.storage_account_resource_id = storageAccountId
parsedStorageAccountId = msrestazuretools.parse_resource_id(storageAccountId)
self.storage_account = parsedStorageAccountId['name']
self.storage_mgmt_client = StorageManagementClient(self.mgmtCredentials, str(subscription_id),
base_url=self.aad_environment_provider.getResourceManager(self.aad_environment_id))
try:
self.storage_key = self._call(self.storage_mgmt_client.storage_accounts.list_keys, parsedStorageAccountId['resource_group'], self.storage_account).keys[0].value
except Exception as exp:
self.remove_old_batch_account_from_config()
raise exp
self._storage = storage.BlockBlobService(
self.storage_account,
self.storage_key)
self._storage.MAX_SINGLE_PUT_SIZE = 2 * 1024 * 1024
#TODO refactor move the below shared block into def configureClient(client)
self._client = batch.BatchExtensionsClient(self.batchCredentials,
base_url=self.batch_url,
storage_client=self._storage)
self._client.config.add_user_agent(self._user_agent)
self._client.threads = self.threads
self._log = self._configure_logging(self.logging_level)
def _get_client(self):
client = BlockBlobService(account_name=get_env('INFRABOX_STORAGE_AZURE_ACCOUNT_NAME'),
account_key=get_env('INFRABOX_STORAGE_AZURE_ACCOUNT_KEY'))
return client
'''
# Azure storage hosting
azure_account = parameters['azureaccount']
# Unique container name - requires non-collision * under a single Azure account *
# 36^4=1679616 possibilities, Pr[collision] = 1 - ( (36^4-1)/36^4 )^num_datasets_created
container_name = parameters['containername']
# Generate machine-readable download link to hosted dataset
azure_url = 'https://' + azure_account + '.blob.core.windows.net/' + container_name + '/output.json'
try:
# Create service used to call the Blob service for the storage account
block_blob_service = BlockBlobService(account_name = azure_account, account_key = parameters['azurekey'])
# Create container with name = asset_id
block_blob_service.create_container(container_name)
# Make public
block_blob_service.set_container_acl(container_name, public_access = PublicAccess.Container)
# Create and upload blob
block_blob_service.create_blob_from_path(container_name, 'output.json', 'output.json')
except Exception as e:
print(e)
# Outlier Ventures' abstraction for easy registration with Keeper and Aquarius
reg.simple_register(parameters['name'],