How to use the nni.tuner.Tuner function in nni

To help you get started, we’ve selected a few nni examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github microsoft / nni / test / async_sharing_test / simple_tuner.py View on Github external
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.

"""
SimpleTuner for Weight Sharing
"""

import logging

from threading import Event, Lock
from nni.tuner import Tuner

_logger = logging.getLogger('WeightSharingTuner')


class SimpleTuner(Tuner):
    """
    simple tuner, test for weight sharing
    """

    def __init__(self):
        super(SimpleTuner, self).__init__()
        self.trial_meta = {}
        self.f_id = None  # father
        self.sig_event = Event()
        self.thread_lock = Lock()

    def generate_parameters(self, parameter_id, **kwargs):
        if self.f_id is None:
            self.thread_lock.acquire()
            self.f_id = parameter_id
            self.trial_meta[parameter_id] = {
github microsoft / nni / test / config_test / multi_thread / multi_thread_tuner.py View on Github external
import time
from nni.tuner import Tuner


class MultiThreadTuner(Tuner):
    def __init__(self):
        self.parent_done = False

    def generate_parameters(self, parameter_id, **kwargs):
        if parameter_id == 0:
            return {'x': 0}
        else:
            while not self.parent_done:
                time.sleep(2)
            return {'x': 1}

    def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
        if parameter_id == 0:
            self.parent_done = True

    def update_search_space(self, search_space):
github microsoft / nni / test / naive_test / naive_tuner.py View on Github external
# Licensed under the MIT license.

import json
import logging
import os

from nni.tuner import Tuner
from nni.utils import extract_scalar_reward

_logger = logging.getLogger('NaiveTuner')
_logger.info('start')

_pwd = os.path.dirname(__file__)
_result = open(os.path.join(_pwd, 'tuner_result.txt'), 'w')

class NaiveTuner(Tuner):
    def __init__(self, optimize_mode):
        self.cur = 0
        _logger.info('init')

    def generate_parameters(self, parameter_id, **kwargs):
        self.cur += 1
        _logger.info('generate parameters: %s' % self.cur)
        return { 'x': self.cur }

    def receive_trial_result(self, parameter_id, parameters, value, **kwargs):
        reward = extract_scalar_reward(value)
        _logger.info('receive trial result: %s, %s, %s' % (parameter_id, parameters, reward))
        _result.write('%d %d\n' % (parameters['x'], reward))
        _result.flush()

    def update_search_space(self, search_space):
github microsoft / nni / src / sdk / pynni / nni / gp_tuner / gp_tuner.py View on Github external
import logging
import numpy as np

from sklearn.gaussian_process.kernels import Matern
from sklearn.gaussian_process import GaussianProcessRegressor

from nni.tuner import Tuner
from nni.utils import OptimizeMode, extract_scalar_reward

from .target_space import TargetSpace
from .util import UtilityFunction, acq_max

logger = logging.getLogger("GP_Tuner_AutoML")


class GPTuner(Tuner):
    """
    GPTuner is a Bayesian Optimization method where Gaussian Process is used for modeling loss functions.

    Parameters
    ----------
    optimize_mode : str
        optimize mode, 'maximize' or 'minimize', by default 'maximize'
    utility : str
        utility function (also called 'acquisition funcition') to use, which can be 'ei', 'ucb' or 'poi'. By default 'ei'.
    kappa : float
        value used by utility function 'ucb'. The bigger kappa is, the more the tuner will be exploratory. By default 5.
    xi : float
        used by utility function 'ei' and 'poi'. The bigger xi is, the more the tuner will be exploratory. By default 0.
    nu : float
        used to specify Matern kernel. The smaller nu, the less smooth the approximated function is. By default 2.5.
    alpha : float
github microsoft / nni / src / sdk / pynni / nni / smac_tuner / smac_tuner.py View on Github external
from smac.facade.roar_facade import ROAR
from smac.facade.smac_facade import SMAC
from smac.scenario.scenario import Scenario
from smac.utils.io.cmd_reader import CMDReader

from ConfigSpaceNNI import Configuration

import nni
from nni.tuner import Tuner
from nni.utils import OptimizeMode, extract_scalar_reward

from .convert_ss_to_scenario import generate_scenario

logger = logging.getLogger('smac_AutoML')

class SMACTuner(Tuner):
    """
    This is a wrapper of [SMAC](https://github.com/automl/SMAC3) following NNI tuner interface.
    It only supports ``SMAC`` mode, and does not support the multiple instances of SMAC3 (i.e.,
    the same configuration is run multiple times).
    """
    def __init__(self, optimize_mode="maximize", config_dedup=False):
        """
        Parameters
        ----------
        optimize_mode : str
            Optimize mode, 'maximize' or 'minimize', by default 'maximize'
        config_dedup : bool
            If True, the tuner will not generate a configuration that has been already generated.
            If False, a configuration may be generated twice, but it is rare for relatively large search space.
        """
        self.logger = logger
github microsoft / nni / src / sdk / pynni / nni / metis_tuner / metis_tuner.py View on Github external
import nni.metis_tuner.Regression_GP.CreateModel as gp_create_model
import nni.metis_tuner.Regression_GP.OutlierDetection as gp_outlier_detection
import nni.metis_tuner.Regression_GP.Prediction as gp_prediction
import nni.metis_tuner.Regression_GP.Selection as gp_selection
from nni.tuner import Tuner
from nni.utils import OptimizeMode, extract_scalar_reward

logger = logging.getLogger("Metis_Tuner_AutoML")

NONE_TYPE = ''
CONSTRAINT_LOWERBOUND = None
CONSTRAINT_UPPERBOUND = None
CONSTRAINT_PARAMS_IDX = []


class MetisTuner(Tuner):
    """
    Metis Tuner

    More algorithm information you could reference here:
    https://www.microsoft.com/en-us/research/publication/metis-robustly-tuning-tail-latencies-cloud-systems/

    Attributes
    ----------
        optimize_mode : str
            optimize_mode is a string that including two mode "maximize" and "minimize"

        no_resampling : bool
            True or False.
            Should Metis consider re-sampling as part of the search strategy?
            If you are confident that the training dataset is noise-free,
            then you do not need re-sampling.
github microsoft / nni / examples / tuners / weight_sharing / ga_customer_tuner / customer_tuner.py View on Github external
def __str__(self):
        return "info: " + str(self.info) + ", config :" + str(self.config) + ", result: " + str(self.result)

    def mutation(self, indiv_id: int, graph_cfg: Graph = None, info=None):
        self.result = None
        if graph_cfg is not None:
            self.config = graph_cfg
        self.config.mutation()
        self.info = info
        self.parent_id = self.indiv_id
        self.indiv_id = indiv_id
        self.shared_ids.intersection_update({layer.hash_id for layer in self.config.layers if layer.is_delete is False})


class CustomerTuner(Tuner):
    """
    NAS Tuner using Evolution Algorithm, with weight sharing enabled
    """
    def __init__(self, optimize_mode, save_dir_root, population_size=32, graph_max_layer=6, graph_min_layer=3):
        self.optimize_mode = OptimizeMode(optimize_mode)
        self.indiv_counter = 0
        self.events = []
        self.thread_lock = Lock()
        self.save_dir_root = save_dir_root
        self.population = self.init_population(population_size, graph_max_layer, graph_min_layer)
        assert len(self.population) == population_size
        logger.debug('init population done.')
        return

    def generate_new_id(self):
        """
github SpongebBob / tabular_automl_NNI / beamsearch_tunner.py View on Github external
import numpy as np
from enum import Enum, unique

from nni.tuner import Tuner
from nni.utils import extract_scalar_reward

logger = logging.getLogger('autofe-tunner')



class OptimizeMode(Enum):
    Minimize = 'minimize'
    Maximize = 'maximize'


class BeamTuner(Tuner):
    def __init__(self, optimize_mode, feature_percent = 0.9, topk = 120):
        '''
        Beamsearch tunner used given default seacrch_space to get the top N features.
        '''
        self.count = -1
        self.optimize_mode = OptimizeMode(optimize_mode)
        # first trial feaure_importance
        self.search_space = None
        # deleted search_space
        self.deleta_feature = set([])
        # defautlt seach_space
        self.default_space = None 
        self.topk = topk
        self.feature_percent = feature_percent
        logger.debug('init aufo-fe done.')
        return
github microsoft / nni / src / sdk / pynni / nni / batch_tuner / batch_tuner.py View on Github external
batch_tuner.py including:
    class BatchTuner
"""

import logging

import nni
from nni.tuner import Tuner

TYPE = '_type'
CHOICE = 'choice'
VALUE = '_value'

LOGGER = logging.getLogger('batch_tuner_AutoML')

class BatchTuner(Tuner):
    """
    BatchTuner is tuner will running all the configure that user want to run batchly.

    Examples
    --------
    The search space only be accepted like:

        ::

            {'combine_params':
                { '_type': 'choice',
                            '_value': '[{...}, {...}, {...}]',
                }
            }

    """
github microsoft / nni / src / sdk / pynni / nni / hyperopt_tuner / hyperopt_tuner.py View on Github external
if choice_key == choice_name:
                        return {
                            NodeType.INDEX: pos,
                            NodeType.VALUE: [
                                choice_name,
                                _add_index(choice_value_format, parameter[1])
                            ]
                        }
                elif choice_name == item:
                    return {NodeType.INDEX: pos, NodeType.VALUE: item}
        else:
            return parameter
    return None  # note: this is not written by original author, feel free to modify if you think it's incorrect


class HyperoptTuner(Tuner):
    """
    HyperoptTuner is a tuner which using hyperopt algorithm.
    """

    def __init__(self, algorithm_name, optimize_mode='minimize',
                 parallel_optimize=False, constant_liar_type='min'):
        """
        Parameters
        ----------
        algorithm_name : str
            algorithm_name includes "tpe", "random_search" and anneal".
        optimize_mode : str
        parallel_optimize : bool
            More detail could reference: docs/en_US/Tuner/HyperoptTuner.md
        constant_liar_type : str
            constant_liar_type including "min", "max" and "mean"