How to use the lale.lib.lale.Hyperopt function in lale

To help you get started, we’ve selected a few lale examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IBM / lale / test / test_autoai_libs.py View on Github external
def doTest(self, trainable, train_X, train_y, test_X, test_y):
        trained = trainable.fit(train_X, train_y)
        transformed = trained.transform(test_X)
        with self.assertWarns(DeprecationWarning):
            trainable.transform(train_X)
        trainable.to_json()
        trainable_pipeline = trainable >> float32_transform() >> LR()
        trained_pipeline = trainable_pipeline.fit(train_X, train_y)
        trained_pipeline.predict(test_X)
        hyperopt = Hyperopt(estimator=trainable_pipeline, max_evals=1)
        trained_hyperopt = hyperopt.fit(train_X, train_y)
        trained_hyperopt.predict(test_X)
github IBM / lale / test / test_core_operators.py View on Github external
def test_trained_get_pipeline_success(self):
        from lale.lib.lale import Hyperopt
        from sklearn.datasets import load_iris
        iris_data = load_iris()
        op = Hyperopt(estimator=LogisticRegression(), max_evals=1)
        with warnings.catch_warnings():
            warnings.simplefilter("ignore")
            op2 = op.fit(iris_data.data[10:], iris_data.target[10:])
            x = op2.get_pipeline
github IBM / lale / test / test_grammar.py View on Github external
assert isinstance(sample, PlannedOperator)
        
        # test getter for methods other than Nonterminal   
        if isinstance(generated, PlannedPipeline):
            assert (generated._name.startswith('pipeline'))
            
        try:
            gtrainer = Hyperopt(estimator=generated, max_evals=3, scoring='r2')
            gtrained = gtrainer.fit(self.train_X, self.train_y)
            assert isinstance(gtrained.get_pipeline(), TrainedOperator)
        except ValueError:
            # None of the trials succeeded
            pass
        
        try:
            strainer = Hyperopt(estimator=sample, max_evals=3, scoring='r2')
            strained = strainer.fit(self.train_X, self.train_y)
            assert isinstance(strained.get_pipeline(), TrainedOperator)
        except ValueError:
            # None of the trials succeeded
            pass
github IBM / lale / test / test_autoai_libs.py View on Github external
def do1DTest(self, trainable, train_X, train_y, test_X, test_y):
        #Test for 1-D array as input to the transformers
        train_X = train_X[:,0]
        test_X = test_X[:,0]
        trainable_pipeline = (trainable & NoOp()) >> ConcatFeatures() >> float32_transform() >> LR()
        trained_pipeline = trainable_pipeline.fit(train_X, train_y)
        trained_pipeline.predict(test_X)
        hyperopt = Hyperopt(estimator=trainable_pipeline, max_evals=1)
        trained_hyperopt = hyperopt.fit(train_X, train_y)
        trained_hyperopt.predict(test_X)
github IBM / lale / test / test_optimizers.py View on Github external
def test_runtime_limit_zero_time_hor(self):
        planned_pipeline = (MinMaxScaler | Normalizer) >> LinearRegression
        from sklearn.datasets import load_boston
        X, y = load_boston(return_X_y=True)
        
        hor = Hyperopt(
            estimator=planned_pipeline,
            max_evals=1,
            cv=3,
            max_opt_time=0.0,
            scoring='r2'
        )
        hor_fitted = hor.fit(X, y)
        assert hor_fitted.get_pipeline() is None
github IBM / lale / test / test_optimizers.py View on Github external
def test_preprocessing_union(self):
        from lale.datasets import openml
        (train_X, train_y), (test_X, test_y) = openml.fetch(
            'credit-g', 'classification', preprocess=False)
        from lale.lib.lale import Project
        from lale.lib.sklearn import Normalizer, OneHotEncoder
        from lale.lib.lale import ConcatFeatures as Concat
        from lale.lib.sklearn import RandomForestClassifier as Forest
        prep_num = Project(columns={'type': 'number'}) >> Normalizer
        prep_cat = Project(columns={'not': {'type': 'number'}}) >> OneHotEncoder(sparse=False)
        planned = (prep_num & prep_cat) >> Concat >> Forest
        from lale.lib.lale import Hyperopt
        hyperopt_classifier = Hyperopt(estimator=planned, max_evals=1)
        best_found = hyperopt_classifier.fit(train_X, train_y)
github IBM / lale / test / test_optimizers.py View on Github external
def test_runtime_limit_zero_time_hoc(self):
        planned_pipeline = (MinMaxScaler | Normalizer) >> (LogisticRegression | KNeighborsClassifier)
        from sklearn.datasets import load_iris
        X, y = load_iris(return_X_y=True)
        
        hoc = Hyperopt(
            estimator=planned_pipeline,
            max_evals=1,
            cv=3,
            scoring='accuracy',
            max_opt_time=0.0
        )
        hoc_fitted = hoc.fit(X, y)
        assert hoc_fitted.get_pipeline() is None
github IBM / lale / test / test_core_operators.py View on Github external
def dont_test_planned_pipe_left(self):
        from lale.lib.lale import NoOp
        from lale.lib.sklearn import LogisticRegression
        from sklearn.decomposition import PCA
        from lale.lib.lale import Hyperopt
        iris = sklearn.datasets.load_iris()
        pipeline = NoOp() >> PCA >> LogisticRegression
        clf = Hyperopt(estimator=pipeline, max_evals=1)
        clf.fit(iris.data, iris.target)
github IBM / lale / test / test_grammar.py View on Github external
g.transformer = g.union_tfm | g.union_tfm >> g.transformer
        g.union_tfm   = g.prim_tfm | g.union_body >> Concat
        g.union_body  = g.transformer | g.transformer & g.union_body
        
        g.prim_est    = LR | KNN
        g.prim_tfm    = PCA | Scaler
        g.ensembler   = Boost

        generated = g.unfold(7)
        sample = g.sample(7)
        assert isinstance(generated, PlannedOperator)
        assert isinstance(sample, PlannedOperator)
        
        # Train
        try:
            gtrainer = Hyperopt(estimator=generated, max_evals=3, scoring='r2')
            gtrained = gtrainer.fit(self.train_X, self.train_y)
            assert isinstance(gtrained.get_pipeline(), TrainedOperator)
        except ValueError:
            # None of the trials succeeded
            pass
        
        try:
            strainer = Hyperopt(estimator=sample, max_evals=3, scoring='r2')
            strained = strainer.fit(self.train_X, self.train_y)
            assert isinstance(strained.get_pipeline(), TrainedOperator)
        except ValueError:
            # None of the trials succeeded
            pass
github IBM / lale / test / test_optimizers.py View on Github external
def test_with_Hyperopt_2(self):
        from lale.lib.sklearn import LogisticRegression as LR
        from lale.lib.sklearn import KNeighborsClassifier as KNN
        from lale.lib.lale import Hyperopt
        choice = LR | KNN
        best = choice.auto_configure(self.X_train, self.y_train,
                                     optimizer=Hyperopt, cv=3, max_evals=3)
        predictions = best.predict(self.X_test)