Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_ensure_dir():
base_path = tempfile.mkdtemp()
try:
dir_path = os.path.join(base_path, 'dir')
file_path = os.path.join(dir_path, 'file')
assert not os.path.isdir(dir_path)
helpers.ensure_dir(file_path)
assert os.path.isdir(dir_path)
finally:
shutil.rmtree(base_path)
# Get storage
plugin = import_module('jsontableschema.plugins.%s' % backend)
storage = plugin.Storage(**backend_options)
# Iterate over tables
resources = []
for table in storage.buckets:
# Prepare
schema = storage.describe(table)
base = os.path.dirname(descriptor)
path, name = _restore_path(table)
fullpath = os.path.join(base, path)
# Write data
helpers.ensure_dir(fullpath)
with io.open(fullpath, 'wb') as file:
model = Schema(deepcopy(schema))
data = storage.iter(table)
writer = csv.writer(file, encoding='utf-8')
writer.writerow(model.headers)
for row in data:
writer.writerow(row)
# Add resource
resource = {'schema': schema, 'path': path}
if name is not None:
resource['name'] = name
resources.append(resource)
# Write descriptor
mode = 'w'
writer.writerow(row)
# Add resource
resource = {'schema': schema, 'path': path}
if name is not None:
resource['name'] = name
resources.append(resource)
# Write descriptor
mode = 'w'
encoding = 'utf-8'
if six.PY2:
mode = 'wb'
encoding = None
resources = _restore_resources(resources)
helpers.ensure_dir(descriptor)
with io.open(descriptor,
mode=mode,
encoding=encoding) as file:
descriptor = {
'name': datapackage_name,
'resources': resources,
}
json.dump(descriptor, file, indent=4)
return storage
# Save resource to storage
if storage is not None:
if self.tabular:
self.infer()
storage.create(target, self.schema.descriptor, force=True)
storage.write(target, self.iter())
# Save descriptor to json
else:
mode = 'w'
encoding = 'utf-8'
if six.PY2:
mode = 'wb'
encoding = None
helpers.ensure_dir(target)
with io.open(target, mode=mode, encoding=encoding) as file:
json.dump(self.__current_descriptor, file, indent=4)
sources.append(source)
schemas = list(map(_slugify_foreign_key, schemas))
storage.create(buckets, schemas, force=True)
for bucket in storage.buckets:
source = sources[buckets.index(bucket)]
storage.write(bucket, source())
return storage
# Save descriptor to json
elif str(target).endswith('.json'):
mode = 'w'
encoding = 'utf-8'
if six.PY2:
mode = 'wb'
encoding = None
helpers.ensure_dir(target)
with io.open(target, mode=mode, encoding=encoding) as file:
json.dump(self.__current_descriptor, file, indent=4)
# Save package to zip
else:
try:
with zipfile.ZipFile(target, 'w') as z:
descriptor = json.loads(json.dumps(self.__current_descriptor))
for index, resource in enumerate(self.resources):
if not resource.name:
continue
if not resource.local:
continue
path = os.path.abspath(resource.source)
basename = resource.descriptor.get('name')
resource_format = resource.descriptor.get('format')