How to use the dtlpy.projects function in dtlpy

To help you get started, we’ve selected a few dtlpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dataloop-ai / ZazuML / dataloop_services / zazu_module.py View on Github external
def search(self, configs, progress=None):

        configs = ConfigSpec(configs)
        opt_model = OptModel()
        opt_model.add_child_spec(configs, 'configs')
        zazu = ZaZu(opt_model, remote=True)
        zazu.find_best_model()
        zazu.hp_search()
        checkpoint_paths_list = glob.glob('*checkpoint*.pt')
        save_info = {
            'package_name': self.package_name,
            'execution_id': progress.execution.id
        }

        project_name = opt_model.dataloop['project']
        project = dl.projects.get(project_name=project_name)

        # model_name = opt_model.name
        # model_obj = dl.models.get(model_name=model_name)
        logger.info('uploading checkpoints.....')
        for checkpoint_path in checkpoint_paths_list:
            # model_obj.checkpoints.upload(checkpoint_name=checkpoint_path.split('.')[0], local_path=checkpoint_path)
            project.artifacts.upload(filepath=checkpoint_path,
                                     package_name=save_info['package_name'],
                                     execution_id=save_info['execution_id'])

        logger.info('finished uploading checkpoints')
github dataloop-ai / ZazuML / zazu.py View on Github external
def maybe_do_deployment_stuff():
    if args.deploy:
        logger.info('about to launch 2 deployments, zazu and trial')
        with open('global_configs.json', 'r') as fp:
            global_project_name = json.load(fp)['project']

        global_project = dl.projects.get(project_name=global_project_name)
        global_package_obj = push_package(global_project)
        try:
            # predict_service = deploy_predict(package=global_package_obj)
            trial_service = deploy_model(package=global_package_obj)
            zazu_service = deploy_zazu(package=global_package_obj)
            logger.info('deployments launched successfully')
        except:
            # predict_service.delete()
            trial_service.delete()
            zazu_service.delete()

    if args.zazu_timer:
        logger.info('about to launch timer deployment')
        with open('global_configs.json', 'r') as fp:
            global_project_name = json.load(fp)['project']
github dataloop-ai / ZazuML / dataloop_services / plugin_utils.py View on Github external
def get_dataset_obj(dataloop_configs):
    try:
        project_id = dataloop_configs['project_id']
        dataset_id = dataloop_configs['dataset_id']
        project = dl.projects.get(project_id=project_id)
        dataset_obj = project.datasets.get(dataset_id=dataset_id)
    except:
        project_name = dataloop_configs['project']
        dataset_name = dataloop_configs['dataset']
        project = dl.projects.get(project_name=project_name)
        dataset_obj = project.datasets.get(dataset_name)
    return dataset_obj
github dataloop-ai / ZazuML / download_tiny_coco.py View on Github external
import dtlpy as dl
import os

path = '/home/noam/data'

dl.login_token(
    'insert_token_here')
dl.setenv('dev')
project = dl.projects.get(project_id='buffs_project')
dataset_obj = project.datasets.get('my_data')
# dataset_obj.items.upload(local_path='/Users/noam/tiny_coco', remote_path='')
dataset_obj.items.download(local_path=path)
os.rename(os.path.join(path, 'items', 'tiny_coco'), os.path.join(path, 'tiny_coco'))
github dataloop-ai / ZazuML / launch_pad / launcher.py View on Github external
def __init__(self, optimal_model, ongoing_trials=None, remote=False):
        self.optimal_model = optimal_model
        self.ongoing_trials = ongoing_trials
        self.remote = remote
        self.num_available_devices = torch.cuda.device_count()
        self.home_path = optimal_model.data['home_path']
        self.dataset_name = optimal_model.data['dataset_name']
        self.package_name = 'zazuml'
        if self.remote:
            dataset_obj = get_dataset_obj(optimal_model.dataloop)
            self.project = dl.projects.get(project_id=dataset_obj.projects[0])
            self.dataset_id = dataset_obj.id

            try:
                self.train_query = optimal_model.dataloop['train_query']
            except:
                self.train_query = dl.Filters().prepare()['filter']

            try:
                # TODO: TRAIN QUERY IS STILL BEING COPPIED
                try:
                    self.val_query = deepcopy(self.train_query)
                except:
                    self.val_query = dl.Filters().prepare()
                self.val_query['filter']['$and'][0]['dir'] = optimal_model.dataloop['test_dir']
            except:
                try:
github dataloop-ai / ZazuML / create_plugins.py View on Github external
def create_tuner():
    dl.setenv('prod')

    plugin_name = 'tuner'
    project_name = 'ZazuProject'

    project = dl.projects.get(project_name=project_name)

    ###############
    # push plugin #
    ###############
    plugin = project.plugins.push(plugin_name=plugin_name,
                                  src_path=os.getcwd(),
                                  inputs=[{"type": "Json",
                                           "name": "configs"}])

    plugin = project.plugins.get(plugin_name=plugin_name)

    #####################
    # create deployment #
    #####################
    deployment = plugin.deployments.deploy(deployment_name=plugin.name,
                                           plugin=plugin,
github dataloop-ai / ZazuML / dataloop_services / plugin_utils.py View on Github external
def get_dataset_obj(dataloop_configs):
    try:
        project_id = dataloop_configs['project_id']
        dataset_id = dataloop_configs['dataset_id']
        project = dl.projects.get(project_id=project_id)
        dataset_obj = project.datasets.get(dataset_id=dataset_id)
    except:
        project_name = dataloop_configs['project']
        dataset_name = dataloop_configs['dataset']
        project = dl.projects.get(project_name=project_name)
        dataset_obj = project.datasets.get(dataset_name)
    return dataset_obj
github dataloop-ai / ZazuML / delete_plugin.py View on Github external
import dtlpy as dl
dl.setenv('dev')
deployment = dl.projects.get(project_name='buffs_project').deployments.get(deployment_name="trial")
deployment.delete()

# or

plugin = dl.projects.get(project_name='buffs_project').plugins.get(plugin_name="trial")
plugin.delete()
print("erased . . . ")
github dataloop-ai / ZazuML / create_plugins.py View on Github external
def create_trainer():
    dl.setenv('dev')

    plugin_name = 'trainer'
    project_name = 'ZazuProject'

    project = dl.projects.get(project_name=project_name)
    dl.projects.checkout(project_name)
    dl.plugins.checkout(plugin_name)

    ###############
    # push plugin #
    ###############
    plugin = project.plugins.push(plugin_name=plugin_name,
                                  src_path=os.getcwd())

    plugin = project.plugins.get(plugin_name=plugin_name)

    #####################
    # create deployment #
    #####################
    deployment = plugin.deployments.deploy(deployment_name=plugin.name,
                                           plugin=plugin,
                                           runtime={'gpu': False,
github dataloop-ai / ZazuML / dataloop_services / trial_module.py View on Github external
def run(self, dataset, train_query, val_query, model_specs, hp_values, configs=None, progress=None):
        maybe_download_data(dataset, train_query, val_query)

        # get project
        # project = dataset.project
        assert isinstance(dataset, dl.entities.Dataset)
        project = dl.projects.get(project_id=dataset.projects[0])

        # start tune
        cls = getattr(import_module('.adapter', 'ObjectDetNet.' + model_specs['name']), 'AdapterModel')
        #TODO: without roberto work with path / or github
        inputs_dict = {'devices': {'gpu_index': 0}, 'model_specs': model_specs, 'hp_values': hp_values}
        #json save
        #TODO: make sure you dont run two runs in concurrency and have two saving the same thing twice
        torch.save(inputs_dict, 'checkpoint.pt')

        adapter = cls()
        adapter.load()
        if hasattr(adapter, 'reformat'):
            adapter.reformat()
        if hasattr(adapter, 'data_loader'):
            adapter.data_loader()
        if hasattr(adapter, 'preprocess'):