How to use the deephyper.search.util function in deephyper

To help you get started, we’ve selected a few deephyper examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github deephyper / deephyper / search / async-search.py View on Github external
cfg, optimizer, evaluator = load_checkpoint(chk_path)
    else:
        cfg = util.OptConfig(args)
        optimizer = Optimizer(
            cfg.space,
            base_estimator=ExtremeGradientBoostingQuantileRegressor(),
            acq_optimizer='sampling',
            acq_func='LCB',
            acq_func_kwargs={'kappa':0},
            random_state=SEED,
            n_initial_points=args.num_workers
        )
        evaluator = evaluate.create_evaluator(cfg)
        logger.info(f"Starting new run with {cfg.benchmark_module_name}")

    timer = util.elapsed_timer(max_runtime_minutes=None, service_period=SERVICE_PERIOD)
    chkpoint_counter = 0

    # Gracefully handle shutdown
    def handler(signum, stack):
        logger.info('Received SIGINT/SIGTERM')
        save_checkpoint(cfg, optimizer, evaluator)
        sys.exit(0)

    signal.signal(signal.SIGINT, handler)
    signal.signal(signal.SIGTERM, handler)

    # MAIN LOOP
    logger.info("Hyperopt driver starting")

    for elapsed_seconds in timer:
        logger.info(f"Elapsed time: {util.pretty_time(elapsed_seconds)}")
github deephyper / deephyper / deephyper / search / nas / model / train_utils.py View on Github external
def selectMetric(name):
    """Return the metric defined by name.

    Args:
        name ([type]): [description]

    Returns:
        [type]: [description]
    """
    if (metrics.get(name) == None):
        try:
            return util.load_attr_from(name)
        except:
            return name  # supposing it is referenced in keras metrics
    else:
        return metrics[name]
github deephyper / deephyper / model / nas.py View on Github external
np.random.seed(1000003)
import time

HERE = os.path.dirname(os.path.abspath(__file__)) # search dir
top  = os.path.dirname(os.path.dirname(HERE)) # directory containing deephyper
sys.path.append(top)

import deephyper.model.arch as a
from deephyper.model.builder.tf import BasicBuilder
from deephyper.model.trainer.tf import BasicTrainer
from deephyper.model.utilities.conversions import action2dict_v2
from deephyper.search import util
from deephyper.search.nas.reinforce.tf import BasicReinforce
from deephyper.model.utilities.nas_cmdline import create_parser

logger = util.conf_logger('deephyper.search.nas')

def run(param_dict):
    config = param_dict

    logger.debug(f'[STEP] global_step = {config["global_step"]}')
    logger.debug(f'[STEP] num worker = {config["num_worker"]}')
    logger.debug(f'[STEP] step = {config["step"]}')
    logger.debug('[PARAM] Creating StateSpace')
    config['state_space'] = a.StateSpace(config['state_space'])
    logger.debug('[PARAM] StateSpace created')

    logger.debug('[PARAM] Loading data')
    load_data = import_module(param_dict['load_data_module_name']).load_data

    # Loading data
    (t_X, t_y), (v_X, v_y) = load_data(dest='DATA')
github deephyper / deephyper / model / ptb_nas.py View on Github external
np.random.seed(1000003)
import time

HERE = os.path.dirname(os.path.abspath(__file__)) # search dir
top  = os.path.dirname(os.path.dirname(HERE)) # directory containing deephyper
sys.path.append(top)

import deephyper.model.arch as a
from deephyper.model.builder.tf import BasicBuilder
from deephyper.model.trainer.tf import BasicTrainer
from deephyper.model.utilities.conversions import action2dict_v2
from deephyper.search import util
from deephyper.search.nas.reinforce.tf import BasicReinforce
from deephyper.model.utilities.nas_cmdline import create_parser

logger = util.conf_logger('deephyper.search.nas')

def run(param_dict):
    config = param_dict

    logger.debug(f'[STEP] global_step = {config["global_step"]}')
    logger.debug('[PARAM] Creating StateSpace')
    config['state_space'] = a.StateSpace(config['state_space'])
    logger.debug('[PARAM] StateSpace created')

    logger.debug('[PARAM] Loading data')
    load_data = import_module(param_dict['load_data_module_name']).load_data

    # Loading data
    config['num_steps'] = 10
    data_cfg = {'num_steps':config['num_steps'], 'batch_size':config['hyperparameters']['batch_size'], 'dest':'/Users/Dipendra/Projects/deephyper/benchmarks/ptbNas/DATA'}
    (t_X, t_y), (v_X, v_y), (test_X, test_y), vocab = load_data(data_cfg)
github deephyper / deephyper / deephyper / search / hps / optimizer / optimizer.py View on Github external
from sys import float_info
from skopt import Optimizer as SkOptimizer
from skopt.learning import RandomForestRegressor, ExtraTreesRegressor, GradientBoostingQuantileRegressor
import numpy as np
from numpy import inf
from deephyper.search import util

logger = util.conf_logger('deephyper.search.hps.optimizer.optimizer')


class Optimizer:
    SEED = 12345

    def __init__(self,
                problem,
                num_workers,
                learner='RF',
                acq_func='gp_hedge',
                acq_kappa=1.96,
                liar_strategy='cl_max',
                n_jobs=1, **kwargs):

        assert learner in ["RF", "ET", "GBRT", "GP", "DUMMY"], f"Unknown scikit-optimize base_estimator: {learner}"
github deephyper / deephyper / deephyper / search / search.py View on Github external
def __init__(self, problem, run, evaluator, **kwargs):
        _args = vars(self.parse_args(''))
        kwargs['problem'] = problem
        kwargs['run'] = run
        kwargs['evaluator'] = evaluator
        _args.update(kwargs)
        _args['problem'] = problem
        _args['run'] = run
        self.args = Namespace(**_args)
        self.problem = util.generic_loader(problem, 'Problem')
        self.run_func = util.generic_loader(run, 'run')
        logger.info('Evaluator will execute the function: '+run)
        self.evaluator = Evaluator.create(
                self.run_func, method=evaluator, **kwargs)
        self.num_workers = self.evaluator.num_workers

        logger.info(f'Options: '+pformat(self.args.__dict__, indent=4))
        logger.info('Hyperparameter space definition: ' +
                    pformat(self.problem.space, indent=4))
        logger.info(f'Created {self.args.evaluator} evaluator')
        logger.info(f'Evaluator: num_workers is {self.num_workers}')
github deephyper / deephyper / search / run_nas.py View on Github external
'''Service loop: add jobs; read results; drive nas'''

    cfg = util.OptConfigNas(args)
    controller = Search(cfg)
    logger.info(f"Starting new NAS on benchmark {cfg.benchmark} & run with {cfg.run_module_name}")
    controller.run()

def test_join_states():
    l1 = [3., 1., 1., 1., 0., 0., 4., 0., 0., 0., 3., 3., 3., 3., 1.]
    l2 = [3., 1., 1., 1., 0., 0., 4., 0., 0., 0., 3., 3., 3., 3., 1.]
    l3 = [3., 1., 1., 1., 0., 0., 4., 0., 0., 0., 3., 3., 3., 3., 1.]
    l = [l1, l2, l3]
    print(join_states(l))

if __name__ == "__main__":
    parser = util.create_parser()
    args = parser.parse_args()
    main(args)
github deephyper / deephyper / deephyper / run / nas.py View on Github external
from random import random
from importlib import import_module
import numpy as np
import tensorflow as tf

import time

import deephyper.model.arch as a
from deephyper.model.builder import BasicBuilder
from deephyper.model.trainer import BasicTrainer
from deephyper.model.utilities.conversions import action2dict_v2
from deephyper.search import util
from deephyper.model.utilities.nas_cmdline import create_parser
from deephyper.search.nas.utils import StateSpace

logger = util.conf_logger('deephyper.search.nas')

def run(param_dict):
    logger.debug('Starting...')
    config = param_dict

    # logger.debug(f'[STEP] global_step = {config["global_step"]}')
    # logger.debug(f'[STEP] num worker = {config["num_worker"]}')
    # logger.debug(f'[STEP] step = {config["step"]}')
    logger.debug('[PARAM] Creating StateSpace')
    config['state_space'] = StateSpace(config['state_space'], config['num_layers'])
    logger.debug('[PARAM] StateSpace created')

    logger.debug('[PARAM] Loading data')
    load_data = import_module(param_dict['load_data_module_name']).load_data

    # Loading data
github deephyper / deephyper / search / run_nas.py View on Github external
#for n, state in enumerate(states):
        for n in range(num_workers):
            init_seed = [float(np.random.uniform(-1,1))]*controller_batch_size
            action = reinforce.get_actions(rnn_input=init_seed,
                                           num_layers=num_layers)
            cfg = self.config.copy()
            cfg['global_step'] = step
            cfg['num_worker'] = n
            cfg['num_layers'] = num_layers
            cfg['step'] = 0
            cfg['init_seed'] = init_seed
            cfg['arch_seq'] = action
            self.evaluator.add_eval_nas(cfg)

        timer = util.DelayTimer(max_minutes=None, period=SERVICE_PERIOD)

        controller_patience= 5 * num_workers
        results = []
        for elapsed_str in timer:
            new_results = list(self.evaluator.get_finished_evals())
            results.extend(new_results)
            len_results = len(results)
            logger.debug("[ Time = {0}, Step = {1} : results = {2} ]".format(elapsed_str, step, len_results))
            children_exp += len_results

            # Get rewards and apply reinforcement step by step
            for cfg, reward in results:
                if (reward > best_reward):
                    best_reward = reward
                    children_exp = 0
                state = cfg['arch_seq']
github deephyper / deephyper / search / async-search.py View on Github external
import logging
import os
import pickle
import signal
import sys

HERE = os.path.dirname(os.path.abspath(__file__)) # search dir
top  = os.path.dirname(os.path.dirname(HERE)) # directory containing deephyper
sys.path.append(top)

from deephyper.search import evaluate, util

from skopt import Optimizer
from deephyper.search.ExtremeGradientBoostingQuantileRegressor import ExtremeGradientBoostingQuantileRegressor

masterLogger = util.conf_logger()
logger = logging.getLogger('deephyper.search.async-search')

SERVICE_PERIOD = 2          # Delay (seconds) between main loop iterations
CHECKPOINT_INTERVAL = 10    # How many jobs to complete between optimizer checkpoints
SEED = 12345

def submit_next_points(opt_config, optimizer, evaluator):
    '''Query optimizer for the next set of points to evaluate'''
    if evaluator.counter >= opt_config.max_evals:
        logger.debug("Reached max_evals; no longer starting new runs")
        return

    if opt_config.starting_point is not None:
        XX = [opt_config.starting_point]
        opt_config.starting_point = None
        additional_pts = optimizer.ask(n_points=evaluator.num_workers-1)