How to use the pypesto.optimize function in pypesto

To help you get started, we’ve selected a few pypesto examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ICB-DCM / pyPESTO / test / test_engine.py View on Github external
def _test_basic(engine):
    # set up problem
    objective = test_objective.rosen_for_sensi(max_sensi_order=2)['obj']
    lb = 0 * np.ones((1, 2))
    ub = 1 * np.ones((1, 2))
    problem = pypesto.Problem(objective, lb, ub)
    optimizer = pypesto.optimize.ScipyOptimizer(options={'maxiter': 10})
    result = pypesto.optimize.minimize(
        problem=problem, n_starts=5, engine=engine, optimizer=optimizer)
    assert len(result.optimize_result.as_list()) == 5
github ICB-DCM / pyPESTO / test / visualize / test_visualize.py View on Github external
def create_optimization_history():
    # create the pypesto problem
    problem = create_problem()

    # create optimizer
    optimizer_options = {'maxiter': 200}
    optimizer = optimize.ScipyOptimizer(
        method='TNC', options=optimizer_options)

    history_options = pypesto.HistoryOptions(
        trace_record=True, trace_save_iter=1)

    # run optimization
    optimize_options = optimize.OptimizeOptions(allow_failed_starts=True)
    result_with_trace = optimize.minimize(
        problem=problem,
        optimizer=optimizer,
        n_starts=5,
        startpoint_method=pypesto.startpoint.uniform,
        options=optimize_options,
        history_options=history_options
    )
github ICB-DCM / pyPESTO / test / test_logging.py View on Github external
logger = logging.getLogger('pypesto')
    if os.path.exists(filename):
        os.remove(filename)
    fh = logging.FileHandler(filename)
    fh.setLevel(logging.DEBUG)
    logger.addHandler(fh)
    logger.info("start test")

    # problem definition
    def fun(_):
        raise Exception("This function cannot be called.")

    objective = pypesto.Objective(fun=fun)
    problem = pypesto.Problem(objective, -1, 1)

    optimizer = pypesto.optimize.ScipyOptimizer()
    options = {'allow_failed_starts': True}

    # optimization
    pypesto.optimize.minimize(problem, optimizer, 5, options=options)

    # assert logging worked
    assert os.path.exists(filename)
    f = open(filename, 'rb')
    content = str(f.read())
    f.close()

    # tidy up
    os.remove(filename)

    # check if error message got inserted
    assert "fail" in content
github ICB-DCM / pyPESTO / test / test_engine.py View on Github external
def _test_basic(engine):
    # set up problem
    objective = test_objective.rosen_for_sensi(max_sensi_order=2)['obj']
    lb = 0 * np.ones((1, 2))
    ub = 1 * np.ones((1, 2))
    problem = pypesto.Problem(objective, lb, ub)
    optimizer = pypesto.optimize.ScipyOptimizer(options={'maxiter': 10})
    result = pypesto.optimize.minimize(
        problem=problem, n_starts=5, engine=engine, optimizer=optimizer)
    assert len(result.optimize_result.as_list()) == 5
github ICB-DCM / pyPESTO / test / test_prior.py View on Github external
'log': {'lb': [-3], 'ub': [3], 'opt': [0]},
                    'log10': {'lb': [-3], 'ub': [2], 'opt': [0]}}

    for prior_type, scale in itertools.product(prior_types, scales):

        prior_list = [get_parameter_prior_dict(
            0, prior_type, [1, 1], scale)]

        test_prior = NegLogParameterPriors(prior_list)
        test_problem = pypesto.Problem(test_prior,
                                       lb=problem_dict[scale]['lb'],
                                       ub=problem_dict[scale]['ub'],
                                       dim_full=1,
                                       x_scales=[scale])

        optimizer = pypesto.optimize.ScipyOptimizer(method='Nelder-Mead')

        result = pypesto.optimize.minimize(
            problem=test_problem, optimizer=optimizer, n_starts=10)

        assert np.isclose(result.optimize_result.list[0]['x'],
                          problem_dict[scale]['opt'], atol=1e-04)

    # test uniform distribution:
    for scale in scales:
        prior_dict = get_parameter_prior_dict(
            0, 'uniform', [1, 2], scale)

        # check inside and outside of interval
        assert abs(prior_dict['density_fun'](lin_to_scaled(.5, scale))
                   - 0) < 1e-8
github ICB-DCM / pyPESTO / test / test_profile.py View on Github external
def test_profile_with_fixed_parameters():
    """Test using profiles with fixed parameters."""
    obj = test_objective.rosen_for_sensi(max_sensi_order=1)['obj']

    lb = -2 * np.ones(5)
    ub = 2 * np.ones(5)
    problem = pypesto.Problem(
        objective=obj, lb=lb, ub=ub,
        x_fixed_vals=[0.5, -1.8], x_fixed_indices=[0, 3])

    optimizer = optimize.ScipyOptimizer(options={'maxiter': 50})
    result = optimize.minimize(
        problem=problem, optimizer=optimizer, n_starts=2)

    for i_method, next_guess_method in enumerate([
            'fixed_step', 'adaptive_step_order_0',
            'adaptive_step_order_1', 'adaptive_step_regression']):
        print(next_guess_method)
        profile.parameter_profile(
            problem=problem, result=result, optimizer=optimizer,
            next_guess_method=next_guess_method)

        # standard plotting
        axes = visualize.profiles(result, profile_list_ids=i_method)
        assert len(axes) == 3
        visualize.profile_cis(result, profile_list=i_method)
github ICB-DCM / pyPESTO / test / test_x_fixed.py View on Github external
def test_optimize():
    problem = create_problem()
    optimizer = pypesto.optimize.ScipyOptimizer()
    n_starts = 5
    result = pypesto.optimize.minimize(problem, optimizer, n_starts)

    optimizer_result = result.optimize_result.list[0]
    assert len(optimizer_result.x) == 5
    assert len(optimizer_result.grad) == 5

    # maybe not what we want, but that's how it is right now
    assert len(problem.ub) == 3

    # nans written into unknown components
    assert np.isnan(optimizer_result.grad[1])

    # fixed values written into parameter vector
    assert optimizer_result.x[1] == 1
github ICB-DCM / pyPESTO / test / test_sample.py View on Github external
def test_pipeline(sampler, problem):
    """Check that a typical pipeline runs through."""
    # optimization
    optimizer = optimize.ScipyOptimizer(options={'maxiter': 10})
    result = optimize.minimize(
        problem, n_starts=3, optimizer=optimizer)

    # sample
    result = sample.sample(
        problem, sampler=sampler, n_samples=100, result=result)

    # some plot
    visualize.sampling_1d_marginals(result)
    plt.close()
github ICB-DCM / pyPESTO / test / test_x_fixed.py View on Github external
def test_optimize():
    problem = create_problem()
    optimizer = pypesto.optimize.ScipyOptimizer()
    n_starts = 5
    result = pypesto.optimize.minimize(problem, optimizer, n_starts)

    optimizer_result = result.optimize_result.list[0]
    assert len(optimizer_result.x) == 5
    assert len(optimizer_result.grad) == 5

    # maybe not what we want, but that's how it is right now
    assert len(problem.ub) == 3

    # nans written into unknown components
    assert np.isnan(optimizer_result.grad[1])

    # fixed values written into parameter vector
    assert optimizer_result.x[1] == 1

    lb_full = problem.get_full_vector(problem.lb)
    assert len(lb_full) == 5
github ICB-DCM / pyPESTO / test / visualize / test_visualize.py View on Github external
def create_optimization_result_nan_inf():
    """
    Create a result object containing nan and inf function values
    """
    # get result with only numbers
    result = create_optimization_result()

    # append nan and inf
    optimizer_result = optimize.OptimizerResult(
        fval=float('nan'), x=np.array([float('nan'), float('nan')]))
    result.optimize_result.append(optimizer_result=optimizer_result)
    optimizer_result = optimize.OptimizerResult(
        fval=-float('inf'), x=np.array([-float('inf'), -float('inf')]))
    result.optimize_result.append(optimizer_result=optimizer_result)

    return result