How to use the dtlpy.projects.get function in dtlpy

To help you get started, we’ve selected a few dtlpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dataloop-ai / ZazuML / dataloop_services / predict_module.py View on Github external
def run(self, dataset, val_query, checkpoint_path, model_specs, configs=None, progress=None):
        self.logger.info('checkpoint path: ' + str(checkpoint_path))
        self.logger.info('Beginning to download checkpoint')
        dataset.items.get(filepath='/checkpoints').download(local_path=os.getcwd())
        self.logger.info('checkpoint downloaded, dir is here' + str(os.listdir('.')))
        self.logger.info('downloading data')
        maybe_download_pred_data(dataset, val_query)
        self.logger.info('data downloaded')
        assert isinstance(dataset, dl.entities.Dataset)
        project = dl.projects.get(project_id=dataset.projects[0])
        cls = getattr(import_module('.adapter', 'ObjectDetNet.' + model_specs['name']), 'AdapterModel')

        home_path = model_specs['data']['home_path']

        inputs_dict = {'checkpoint_path': checkpoint_path['checkpoint_path'], 'home_path': home_path}
        torch.save(inputs_dict, 'predict_checkpoint.pt')

        adapter = cls()
        output_path = adapter.predict(home_path=home_path, checkpoint_path=checkpoint_path['checkpoint_path'])
        save_info = {
            'package_name': self.package_name,
            'execution_id': progress.execution.id
        }
        project.artifacts.upload(filepath=output_path,
                                 package_name=save_info['package_name'],
                                 execution_id=save_info['execution_id'])
github dataloop-ai / ZazuML / create_plugins.py View on Github external
def create_trainer():
    dl.setenv('dev')

    plugin_name = 'trainer'
    project_name = 'ZazuProject'

    project = dl.projects.get(project_name=project_name)
    dl.projects.checkout(project_name)
    dl.plugins.checkout(plugin_name)

    ###############
    # push plugin #
    ###############
    plugin = project.plugins.push(plugin_name=plugin_name,
                                  src_path=os.getcwd())

    plugin = project.plugins.get(plugin_name=plugin_name)

    #####################
    # create deployment #
    #####################
    deployment = plugin.deployments.deploy(deployment_name=plugin.name,
                                           plugin=plugin,
github dataloop-ai / ZazuML / dataloop_services / zazu_timer_module.py View on Github external
def __init__(self, configs, time, test_dataset_id, query):
        logger.info('dtlpy version: ' + str(dl.__version__))
        logger.info('dtlpy info: ' + str(dl.info()))
        time = int(time)
        dl.setenv('prod')
        configs = json.loads(configs)
        query = json.loads(query)
        self.configs_input = dl.FunctionIO(type='Json', name='configs', value=configs)
        self.service = dl.services.get('zazu')
        project_name = configs['dataloop']['project']
        self.project = dl.projects.get(project_name)
        test_dataset = self.project.datasets.get(dataset_id=test_dataset_id)
        maybe_download_pred_data(dataset_obj=test_dataset, val_query=query)

        # add gt annotations
        filters = dl.Filters()
        filters.custom_filter = query
        dataset_name = test_dataset.name
        path_to_dataset = os.path.join(os.getcwd(), dataset_name)
        # only download if doesnt exist
        if not os.path.exists(path_to_dataset):
            download_and_organize(path_to_dataset=path_to_dataset, dataset_obj=test_dataset, filters=filters)

        json_file_path = os.path.join(path_to_dataset, 'json')
        self.model_obj = self.project.models.get(model_name='retinanet')
        self.adapter = self.model_obj.build(local_path=os.getcwd())
        logger.info('model built')
github dataloop-ai / ZazuML / delete_plugin.py View on Github external
import dtlpy as dl
dl.setenv('dev')
deployment = dl.projects.get(project_name='buffs_project').deployments.get(deployment_name="trial")
deployment.delete()

# or

plugin = dl.projects.get(project_name='buffs_project').plugins.get(plugin_name="trial")
plugin.delete()
print("erased . . . ")
github dataloop-ai / ZazuML / launch_pad / launcher.py View on Github external
try:
                # TODO: TRAIN QUERY IS STILL BEING COPPIED
                try:
                    self.val_query = deepcopy(self.train_query)
                except:
                    self.val_query = dl.Filters().prepare()
                self.val_query['filter']['$and'][0]['dir'] = optimal_model.dataloop['test_dir']
            except:
                try:
                    self.val_query = optimal_model.dataloop['val_query']
                except:
                    self.val_query = dl.Filters().prepare()['filter']

            with open('global_configs.json', 'r') as fp:
                global_project_name = json.load(fp)['project']
            self.global_project = dl.projects.get(project_name=global_project_name)


        # TODO: dont convert here
        if self.optimal_model.name == 'yolov3':
            if self.optimal_model.data['annotation_type'] == 'coco':
                self._convert_coco_to_yolo_format()
                self.optimal_model.data['annotation_type'] = 'yolo'