How to use the hyperactive.ParticleSwarmOptimizer function in hyperactive

To help you get started, we’ve selected a few hyperactive examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_ParticleSwarmOptimizer():
    from hyperactive import ParticleSwarmOptimizer

    opt0 = ParticleSwarmOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = ParticleSwarmOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_multiprocessing.py View on Github external
def test_ParticleSwarmOptimizer():
    from hyperactive import ParticleSwarmOptimizer

    opt0 = ParticleSwarmOptimizer(
        search_config, n_iter_0, random_state=random_state, verbosity=0, cv=cv, n_jobs=1
    )
    opt0.fit(X, y)

    opt1 = ParticleSwarmOptimizer(
        search_config,
        n_iter_1,
        random_state=random_state,
        verbosity=0,
        cv=cv,
        n_jobs=n_jobs,
    )
    opt1.fit(X, y)

    assert opt0.score_best < opt1.score_best
github SimonBlanke / Hyperactive / tests / test_classes.py View on Github external
ParallelTemperingOptimizer,
        ParticleSwarmOptimizer,
        EvolutionStrategyOptimizer,
        BayesianOptimizer,
    )

    _ = HillClimbingOptimizer(search_config, 1)
    _ = StochasticHillClimbingOptimizer(search_config, 1)
    _ = TabuOptimizer(search_config, 1)
    _ = RandomSearchOptimizer(search_config, 1)
    _ = RandomRestartHillClimbingOptimizer(search_config, 1)
    _ = RandomAnnealingOptimizer(search_config, 1)
    _ = SimulatedAnnealingOptimizer(search_config, 1)
    _ = StochasticTunnelingOptimizer(search_config, 1)
    _ = ParallelTemperingOptimizer(search_config, 1)
    _ = ParticleSwarmOptimizer(search_config, 1)
    _ = EvolutionStrategyOptimizer(search_config, 1)
    _ = BayesianOptimizer(search_config, 1)
github SimonBlanke / Hyperactive / examples / example_hyperband_keras.py View on Github external
"input_shape": [(28, 28, 1)],
    },
    "keras.layers.MaxPooling2D.2": {"pool_size": [(2, 2)]},
    "keras.layers.Conv2D.3": {
        "filters": [32, 64, 128],
        "kernel_size": [3],
        "activation": ["relu"],
    },
    "keras.layers.MaxPooling2D.4": {"pool_size": [(2, 2)]},
    "keras.layers.Flatten.5": {},
    "keras.layers.Dense.6": {"units": range(30, 200, 10), "activation": ["softmax"]},
    "keras.layers.Dropout.7": {"rate": np.arange(0.4, 0.8, 0.1)},
    "keras.layers.Dense.8": {"units": [10], "activation": ["softmax"]},
}

Optimizer = ParticleSwarmOptimizer(
    search_config, n_iter=3, metric="mean_squared_error", hyperband_init=10, verbosity=0
)

# search best hyperparameter for given data
Optimizer.fit(X_train, y_train)

# predict from test data
prediction = Optimizer.predict(X_test)

# calculate accuracy score
score = Optimizer.score(X_test, y_test)