How to use the dtlpy.BaseServiceRunner function in dtlpy

To help you get started, we’ve selected a few dtlpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dataloop-ai / ZazuML / dataloop_services / predict_module.py View on Github external
import dtlpy as dl
import logging
import os
import json
import torch
from logging_utils import logginger
logger = logging.getLogger(name=__name__)
from importlib import import_module
from dataloop_services.plugin_utils import maybe_download_pred_data


class ServiceRunner(dl.BaseServiceRunner):
    """
    Plugin runner class

    """
    def __init__(self, package_name):
        self.package_name = package_name
        self.logger = logginger(__name__)

    def run(self, dataset, val_query, checkpoint_path, model_specs, configs=None, progress=None):
        self.logger.info('checkpoint path: ' + str(checkpoint_path))
        self.logger.info('Beginning to download checkpoint')
        dataset.items.get(filepath='/checkpoints').download(local_path=os.getcwd())
        self.logger.info('checkpoint downloaded, dir is here' + str(os.listdir('.')))
        self.logger.info('downloading data')
        maybe_download_pred_data(dataset, val_query)
        self.logger.info('data downloaded')
github dataloop-ai / ZazuML / dataloop_services / zazu_module.py View on Github external
import logging
import os
import sys
import glob
import dtlpy as dl
from spec import ConfigSpec, OptModel
from zazu import ZaZu
from logging_utils import logginger, init_logging

logger = init_logging(__name__)

class ServiceRunner(dl.BaseServiceRunner):
    """
    Plugin runner class

    """

    def __init__(self, package_name):
        logging.getLogger('dtlpy').setLevel(logging.WARN)
        self.package_name = package_name
        self.this_path = os.getcwd()
        logger.info(self.package_name + ' initialized')

    def search(self, configs, progress=None):

        configs = ConfigSpec(configs)
        opt_model = OptModel()
        opt_model.add_child_spec(configs, 'configs')
github dataloop-ai / ZazuML / dataloop_services / zazu_timer_module.py View on Github external
import logging
import dtlpy as dl
import json
import torch
import os
from time import sleep
from dataloop_services.plugin_utils import maybe_download_pred_data, download_and_organize
from dataloop_services import deploy_predict_item, create_trigger
from eval import precision_recall_compute
from logging_utils import logginger, init_logging
import logging

logger = logging.getLogger(__name__)


class ServiceRunner(dl.BaseServiceRunner):
    """
    Plugin runner class

    """

    def __init__(self, configs, time, test_dataset_id, query):
        logger.info('dtlpy version: ' + str(dl.__version__))
        logger.info('dtlpy info: ' + str(dl.info()))
        time = int(time)
        dl.setenv('prod')
        configs = json.loads(configs)
        query = json.loads(query)
        self.configs_input = dl.FunctionIO(type='Json', name='configs', value=configs)
        self.service = dl.services.get('zazu')
        project_name = configs['dataloop']['project']
        self.project = dl.projects.get(project_name)
github dataloop-ai / ZazuML / dataloop_services / trial_module.py View on Github external
import logging
import os
import torch
import json
import dtlpy as dl
from importlib import import_module
from dataloop_services.plugin_utils import maybe_download_data
from logging_utils import init_logging


class ServiceRunner(dl.BaseServiceRunner):
    """
    Plugin runner class

    """

    def __init__(self, package_name):
        logging.getLogger('dtlpy').setLevel(logging.WARN)
        self.package_name = package_name
        self.path_to_metrics = 'metrics.json'
        self.path_to_tensorboard_dir = 'runs'
        self.path_to_logs = 'logger.conf'
        self.logger = init_logging(__name__, filename=self.path_to_logs)
        self.logger.info(self.package_name + ' initialized')


    def run(self, dataset, train_query, val_query, model_specs, hp_values, configs=None, progress=None):