How to use the hyperactive.SimulatedAnnealingOptimizer function in hyperactive

To help you get started, we’ve selected a few hyperactive examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_SimulatedAnnealingOptimizer():
    from hyperactive import SimulatedAnnealingOptimizer

    opt0 = SimulatedAnnealingOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = SimulatedAnnealingOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_SimulatedAnnealingOptimizer():
    from hyperactive import SimulatedAnnealingOptimizer

    opt0 = SimulatedAnnealingOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = SimulatedAnnealingOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_classes.py View on Github external
RandomAnnealingOptimizer,
        SimulatedAnnealingOptimizer,
        StochasticTunnelingOptimizer,
        ParallelTemperingOptimizer,
        ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
github SimonBlanke / Hyperactive / examples / advanced_features / transfer_learning.py View on Github external
"keras.applications.MobileNet.1": {
        "weights": ["imagenet"],
        "input_shape": [(32, 32, 3)],
        "include_top": [False],
    },
    "keras.layers.Flatten.2": {},
    "keras.layers.Dense.3": {
        "units": range(5, 15),
        "activation": ["relu"],
        "kernel_initializer": ["uniform"],
    },
    "keras.layers.Dense.4": {"units": [10], "activation": ["sigmoid"]},
}


opt = SimulatedAnnealingOptimizer(search_config, n_iter=3, warm_start=False)

# search best hyperparameter for given data
opt.fit(X_train, y_train)

# predict from test data
prediction = opt.predict(X_test)

# calculate score
score = opt.score(X_test, y_test)
github SimonBlanke / Hyperactive / examples / advanced_features / warm_start_keras.py View on Github external
"keras.layers.MaxPooling2D.4": {"pool_size": [(2, 2)]},
    "keras.layers.Conv2D.5": {
        "filters": [32],
        "kernel_size": [3],
        "activation": ["relu"],
        "input_shape": [(28, 28, 1)],
    },
    "keras.layers.MaxPooling2D.6": {"pool_size": [(2, 2)]},
    "keras.layers.Flatten.7": {},
    "keras.layers.Dense.8": {"units": [50], "activation": ["softmax"]},
    "keras.layers.Dropout.9": {"rate": [0.4]},
    "keras.layers.Dense.10": {"units": [10], "activation": ["softmax"]},
}


opt = SimulatedAnnealingOptimizer(search_config, n_iter=3, warm_start=start_point)

# search best hyperparameter for given data
opt.fit(X_train, y_train)

# predict from test data
prediction = opt.predict(X_test)

# calculate accuracy score
score = opt.score(X_test, y_test)
github SimonBlanke / Hyperactive / examples / advanced_features / warm_start_sklearn.py View on Github external
"min_samples_split": range(2, 21),
        "min_samples_leaf": range(2, 21),
    }
}

start_point = {
    "sklearn.ensemble.RandomForestClassifier.0": {
        "n_estimators": [30],
        "max_depth": [6],
        "criterion": ["entropy"],
        "min_samples_split": [12],
        "min_samples_leaf": [16],
    }
}

opt = SimulatedAnnealingOptimizer(
    search_config, n_iter=100, n_jobs=4, warm_start=start_point, verbosity=0
)

# search best hyperparameter for given data
opt.fit(X_train, y_train)

# predict from test data
prediction = opt.predict(X_test)

# calculate accuracy score
score = opt.score(X_test, y_test)