How to use the hyperactive.EvolutionStrategyOptimizer function in hyperactive

To help you get started, we’ve selected a few hyperactive examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_EvolutionStrategyOptimizer():
    from hyperactive import EvolutionStrategyOptimizer

    opt0 = EvolutionStrategyOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = EvolutionStrategyOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_EvolutionStrategyOptimizer():
    from hyperactive import EvolutionStrategyOptimizer

    opt0 = EvolutionStrategyOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = EvolutionStrategyOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_classes.py View on Github external
ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
github SimonBlanke / Hyperactive / examples / machine_learning / xgboost_.py View on Github external
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.20)

# this defines the model and hyperparameter search space
search_config = {
    "xgboost.XGBClassifier": {
        "n_estimators": range(30, 200, 10),
        "max_depth": range(1, 11),
        "learning_rate": [1e-3, 1e-2, 1e-1, 0.5, 1.0],
        "subsample": np.arange(0.05, 1.01, 0.05),
        "min_child_weight": range(1, 21),
        "nthread": [1],
    }
}

opt = EvolutionStrategyOptimizer(search_config, n_iter=10, n_jobs=4)

# search best hyperparameter for given data
opt.fit(X_train, y_train)

# predict from test data
prediction = opt.predict(X_test)

# calculate score
score = opt.score(X_test, y_test)

print("\ntest score of best model:", score)