How to use the lale.type_checking function in lale

To help you get started, we’ve selected a few lale examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github IBM / lale / test / test_interoperability.py View on Github external
def test_resampler(self):
        from lale.lib.sklearn import PCA, Nystroem, LogisticRegression, RandomForestClassifier
        from lale.lib.lale import NoOp, ConcatFeatures
        X_train, y_train = self.X_train, self.y_train
        X_test, y_test = self.X_test, self.y_test
        import importlib
        module_name = ".".join(res_name.split('.')[0:-1])
        class_name = res_name.split('.')[-1]
        module = importlib.import_module(module_name)

        class_ = getattr(module, class_name)
        with self.assertRaises(ValueError):
            res = class_()

        #test_schemas_are_schemas
        lale.type_checking.validate_is_schema(class_.input_schema_fit())
        lale.type_checking.validate_is_schema(class_.input_schema_predict())
        lale.type_checking.validate_is_schema(class_.output_schema_predict())
        lale.type_checking.validate_is_schema(class_.hyperparam_schema())

        #test_init_fit_predict
        from lale.operators import make_pipeline
        pipeline1 = PCA() >> class_(operator=make_pipeline(LogisticRegression()))
        trained = pipeline1.fit(X_train, y_train)
        predictions = trained.predict(X_test)

        pipeline2 = class_(operator=make_pipeline(PCA(), LogisticRegression()))
        trained = pipeline2.fit(X_train, y_train)
        predictions = trained.predict(X_test)

        #test_with_hyperopt
        from lale.lib.lale import Hyperopt
github IBM / lale / test / test_nlp_operators.py View on Github external
def test_encoder(self):
        import importlib
        module_name = ".".join(encoder_name.split('.')[0:-1])
        class_name = encoder_name.split('.')[-1]
        module = importlib.import_module(module_name)

        class_ = getattr(module, class_name)
        encoder = class_()

        #test_schemas_are_schemas
        lale.type_checking.validate_is_schema(encoder.input_schema_fit())
        lale.type_checking.validate_is_schema(encoder.input_schema_transform())
        lale.type_checking.validate_is_schema(encoder.output_schema_transform())
        lale.type_checking.validate_is_schema(encoder.hyperparam_schema())

        #test_init_fit_transform
        trained = encoder.fit(self.X_train, self.y_train)
        transformed = trained.transform(self.X_train)
github IBM / lale / test / test_core_operators.py View on Github external
def test_classifier(self):
        X_train, y_train = self.X_train, self.y_train
        X_test, y_test = self.X_test, self.y_test
        import importlib
        module_name = ".".join(clf_name.split('.')[0:-1])
        class_name = clf_name.split('.')[-1]
        module = importlib.import_module(module_name)

        class_ = getattr(module, class_name)
        clf = class_()

        #test_schemas_are_schemas
        lale.type_checking.validate_is_schema(clf.input_schema_fit())
        lale.type_checking.validate_is_schema(clf.input_schema_predict())
        lale.type_checking.validate_is_schema(clf.output_schema_predict())
        lale.type_checking.validate_is_schema(clf.hyperparam_schema())

        #test_init_fit_predict
        trained = clf.fit(self.X_train, self.y_train)
        predictions = trained.predict(self.X_test)

        #test_with_hyperopt
        from lale.lib.lale import Hyperopt
        hyperopt = Hyperopt(estimator=clf, max_evals=1)
        trained = hyperopt.fit(self.X_train, self.y_train)
        predictions = trained.predict(self.X_test)

        #test_cross_validation
        from lale.helpers import cross_val_score
        cv_results = cross_val_score(clf, X_train, y_train, cv = 2)
        self.assertEqual(len(cv_results), 2)
github IBM / lale / test / test_interoperability.py View on Github external
X_train, y_train = self.X_train, self.y_train
        X_test, y_test = self.X_test, self.y_test
        import importlib
        module_name = ".".join(res_name.split('.')[0:-1])
        class_name = res_name.split('.')[-1]
        module = importlib.import_module(module_name)

        class_ = getattr(module, class_name)
        with self.assertRaises(ValueError):
            res = class_()

        #test_schemas_are_schemas
        lale.type_checking.validate_is_schema(class_.input_schema_fit())
        lale.type_checking.validate_is_schema(class_.input_schema_predict())
        lale.type_checking.validate_is_schema(class_.output_schema_predict())
        lale.type_checking.validate_is_schema(class_.hyperparam_schema())

        #test_init_fit_predict
        from lale.operators import make_pipeline
        pipeline1 = PCA() >> class_(operator=make_pipeline(LogisticRegression()))
        trained = pipeline1.fit(X_train, y_train)
        predictions = trained.predict(X_test)

        pipeline2 = class_(operator=make_pipeline(PCA(), LogisticRegression()))
        trained = pipeline2.fit(X_train, y_train)
        predictions = trained.predict(X_test)

        #test_with_hyperopt
        from lale.lib.lale import Hyperopt
        optimizer = Hyperopt(estimator=PCA >> class_(operator=make_pipeline(LogisticRegression())), max_evals = 1, show_progressbar=False)
        trained_optimizer = optimizer.fit(X_train, y_train)
        predictions = trained_optimizer.predict(X_test)
github IBM / lale / lale / lib / lale / project.py View on Github external
def transform_schema(self, s_X):
        """Used internally by Lale for type-checking downstream operators."""
        if hasattr(self, '_col_tfm'):
            return self._transform_schema_col_tfm(s_X, self._col_tfm)
        columns = self._hyperparams['columns']
        if lale.type_checking.is_schema(columns):
            return self._transform_schema_schema(s_X, columns)
        if not lale.type_checking.is_schema(s_X):
            X = lale.datasets.data_schemas.add_schema(s_X)
            self.fit(X)
            return self._transform_schema_col_tfm(X.json_schema, self._col_tfm)
        return s_X
github IBM / lale / lale / operators.py View on Github external
if not lale.helpers.is_empty_dict(arg):
            if method == 'fit' or method == 'partial_fit':
                schema = self.input_schema_fit()
            elif method == 'transform':
                schema = self.input_schema_transform()
            elif method == 'predict':
                schema = self.input_schema_predict()
            elif method == 'predict_proba':
                schema = self.input_schema_predict_proba()
            elif method == 'decision_function':
                schema = self.input_schema_decision_function()
            if 'properties' in schema and arg_name in schema['properties']:
                arg = lale.datasets.data_schemas.add_schema(arg)
                try:
                    sup = schema['properties'][arg_name]
                    lale.type_checking.validate_schema_or_subschema(arg, sup)
                except Exception as e:
                    raise ValueError(f'{self.name()}.{method}() invalid {arg_name}: {e}') from e
        return arg
github IBM / lale / lale / operators.py View on Github external
def is_supervised(self, default_if_missing=True)->bool:
        if hasattr(self._impl, 'fit'):
            schema_fit = self.input_schema_fit()
            return lale.type_checking.is_subschema(schema_fit, _is_supervised_schema)
        return default_if_missing
github IBM / lale / lale / operators.py View on Github external
name : String
            Name of the operator.
        impl :
            An instance of operator implementation class. This is a class that
            contains fit, predict/transform methods implementing an underlying
            algorithm.
        schemas : dict
            This is a dictionary of json schemas for the operator.
        """
        self._impl = impl
        self._name = name
        self._enum_attributes = None
        if schemas:
            self._schemas = schemas
        else:
            self._schemas = lale.type_checking.get_default_schema(impl)
github IBM / lale / lale / datasets / data_schemas.py View on Github external
def shape_and_dtype_to_schema(shape, dtype):
    result = dtype_to_schema(dtype)
    for dim in reversed(shape):
        result = {
            'type': 'array',
            'minItems': dim,
            'maxItems': dim,
            'items': result}
    lale.type_checking.validate_is_schema(result)
    return result
github IBM / lale / lale / lib / lale / concat_features.py View on Github external
"""Used internally by Lale for type-checking downstream operators."""
        min_cols, max_cols, elem_schema = 0, 0, None
        def add_ranges(min_a, max_a, min_b, max_b):
            min_ab = min_a + min_b
            if max_a == 'unbounded' or max_b == 'unbounded':
                max_ab = 'unbounded'
            else:
                max_ab = max_a + max_b
            return min_ab, max_ab
        for s_dataset in s_X['items']:
            if s_dataset.get('laleType', None) == 'Any':
                return {'laleType': 'Any'}
            arr_1d_num = {'type': 'array', 'items': {'type': 'number'}}
            arr_2d_num = {'type': 'array', 'items': arr_1d_num}
            s_decision_func = {'anyOf': [arr_1d_num, arr_2d_num]}
            if lale.type_checking.is_subschema(s_decision_func, s_dataset):
                s_dataset = arr_2d_num
            assert 'items' in s_dataset, lale.pretty_print.to_string(s_dataset)
            s_rows = s_dataset['items']
            if 'type' in s_rows and 'array' == s_rows['type']:
                s_cols = s_rows['items']
                if isinstance(s_cols, dict):
                    min_c = s_rows['minItems'] if 'minItems' in s_rows else 1
                    max_c = s_rows['maxItems'] if 'maxItems' in s_rows else 'unbounded'
                    elem_schema = lale.type_checking.join_schemas(elem_schema, s_cols)
                else:
                    min_c, max_c = len(s_cols), len(s_cols)
                    for s_col in s_cols:
                        elem_schema = lale.type_checking.join_schemas(elem_schema, s_col)
                min_cols, max_cols = add_ranges(min_cols,max_cols,min_c,max_c)
            else:
                elem_schema = lale.type_checking.join_schemas(elem_schema, s_rows)