Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def rename(client, old, new, force):
"""Rename the workflow named to ."""
from renku.core.models.refs import LinkReference
LinkReference(client=client, name=_ref(old)).rename(_ref(new), force=force)
def is_dataset_name_valid(name, safe=''):
"""A valid name is a valid Git reference name with no /."""
# TODO make name an RFC 3986 compatible name and migrate old projects
return (
name and LinkReference.check_ref_format(name, no_slashes=True) and
'/' not in name
)
def remove(client, name):
"""Remove the remote named ."""
from renku.core.models.refs import LinkReference
LinkReference(client=client, name=_ref(name)).delete()
def get_dataset_path(self, name):
"""Get dataset path from name."""
path = self.renku_datasets_path / name / self.METADATA
if not path.exists():
try:
path = LinkReference(
client=self, name='datasets/' + name
).reference
except errors.ParameterError:
return None
return path
except FileExistsError:
raise errors.DatasetExistsError(
'Dataset with reference {} exists'.format(path.parent)
)
with with_reference(path):
dataset = Dataset(
client=self,
identifier=identifier,
name=name,
short_name=short_name,
description=description,
creator=creators
)
dataset_ref = LinkReference.create(
client=self, name='datasets/' + short_name
)
dataset_ref.set_reference(path)
dataset.to_yaml()
return dataset, path, dataset_ref
if Path(module.url).name == meta.name:
module.remove()
for file_ in dataset.files:
if not Path(file_.path).exists():
expected_path = (
client.path / 'data' / dataset.name / file_.path
)
if expected_path.exists():
file_.path = expected_path.relative_to(client.path)
dataset.__reference__ = new_path
dataset.to_yaml()
Path(old_path).unlink()
ref = LinkReference.create(
client=client,
name='datasets/{0}'.format(name),
force=True,
)
ref.set_reference(new_path)
def migrate_broken_dataset_paths(client):
"""Ensure all paths are using correct directory structure."""
for dataset in client.datasets.values():
dataset_path = Path(dataset.path)
expected_path = (
client.renku_datasets_path /
Path(quote(dataset.identifier, safe=''))
)
# migrate the refs
ref = LinkReference.create(
client=client,
name='datasets/{0}'.format(dataset.short_name),
force=True,
)
ref.set_reference(expected_path / client.METADATA)
if not dataset_path.exists():
dataset_path = (
client.renku_datasets_path / uuid.UUID(dataset.identifier).hex
)
if not expected_path.exists():
shutil.move(str(dataset_path), str(expected_path))
dataset.path = expected_path
dataset.__reference__ = expected_path / client.METADATA
def workflow(ctx, client):
"""List or manage workflows with subcommands."""
if ctx.invoked_subcommand is None:
from renku.core.models.refs import LinkReference
names = defaultdict(list)
for ref in LinkReference.iter_items(client, common_path='workflows'):
names[ref.reference.name].append(ref.name)
for path in client.workflow_path.glob('*.cwl'):
click.echo(
'{path}: {names}'.format(
path=path.name,
names=', '.join(
click.style(_deref(name), fg='green')
for name in names[path.name]
),
def paths(self):
"""Return all paths in the commit."""
index = set()
for file_ in self.commit.diff(self.commit.parents or NULL_TREE):
# ignore deleted files (note they appear as ADDED)
# in this backwards diff
# TODO: set `deprecatedBy` for deleted paths
if file_.change_type == 'A':
continue
path_ = Path(file_.a_path)
is_dataset = self.client.DATASETS in str(path_)
not_refs = LinkReference.REFS not in str(path_)
does_not_exists = not path_.exists()
if all([is_dataset, not_refs, does_not_exists]):
uid = uuid.UUID(path_.parent.name)
path_ = (
Path(self.client.renku_home) / self.client.DATASETS /
str(uid) / self.client.METADATA
)
index.add(str(path_))
return index