How to use the onnxmltools.convert_sklearn function in onnxmltools

To help you get started, we’ve selected a few onnxmltools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github onnx / onnxmltools / tests / sklearn / test_TreeEnsembleConverters.py View on Github external
def _test_single_output_core(self, model):
        X = [[0, 1], [1, 1], [2, 0]]
        y = [100, -10, 50]
        model.fit(X, y)
        model_onnx = convert_sklearn(model, 'tree-based regressor', [('input', Int64TensorType([1, 2]))])
        self.assertTrue(model_onnx is not None)
github onnx / onnxmltools / tests / sklearn / test_SklearnPipeline.py View on Github external
def test_pipeline(self):
        from sklearn.preprocessing import StandardScaler
        from sklearn.pipeline import Pipeline

        data = numpy.array([[0, 0], [0, 0], [1, 1], [1, 1]], dtype=numpy.float32)
        scaler = StandardScaler()
        scaler.fit(data)
        model = Pipeline([('scaler1',scaler), ('scaler2', scaler)])

        model_onnx = convert_sklearn(model, 'pipeline', [('input', FloatTensorType([1, 2]))])
        self.assertTrue(model_onnx is not None)
        dump_data_and_model(data, model, model_onnx, basename="SklearnPipelineScaler")
github onnx / onnxmltools / tests / sklearn / test_OneHotEncoderConverter.py View on Github external
def test_one_hot_encoder_mixed_float_int(self):
        # categorical_features will be removed in 0.22 (this test will fail by then).
        model = OneHotEncoder()
        model.fit([[0.4, 0.2, 3], [1.4, 1.2, 0], [0.2, 2.2, 1]])
        model_onnx = convert_sklearn(model, 'one-hot encoder mixed-type inputs',
                               [('input1', FloatTensorType([1, 2])), ('input2', Int64TensorType([1, 1]))])
        self.assertTrue(model_onnx is not None)
github onnx / onnxmltools / tests / sklearn / test_TreeEnsembleConverters.py View on Github external
def _test_binary_classification_core(self, model):
        X = [[0, 1], [1, 1], [2, 0]]
        y = ['A', 'B', 'A']
        model.fit(X, y)
        model_onnx = convert_sklearn(model, 'tree-based binary classifier', [('input', Int64TensorType([1, 2]))])
        self.assertTrue(model_onnx is not None)
github onnx / onnxmltools / tests / sklearn / test_SklearnScalerConverter.py View on Github external
def test_min_max_scaler(self):
        model = MinMaxScaler()
        data = [[0., 0., 3.], [1., 1., 0.], [0., 2., 1.], [1., 0., 2.]]
        model.fit(data)
        model_onnx = convert_sklearn(model, 'scaler', [('input', FloatTensorType([1, 3]))])
        self.assertTrue(model_onnx is not None)
        dump_data_and_model(numpy.array(data, dtype=numpy.float32),
                            model, basename="SklearnMinMaxScaler")
github onnx / onnxmltools / tests / sklearn / test_SklearnPipeline.py View on Github external
def test_combine_inputs_floats_ints(self):
        from sklearn.preprocessing import StandardScaler
        from sklearn.pipeline import Pipeline

        data = [[0, 0.],[0, 0.],[1, 1.],[1, 1.]]
        scaler = StandardScaler()
        scaler.fit(data)
        model = Pipeline([('scaler1', scaler), ('scaler2', scaler)])

        model_onnx = convert_sklearn(model, 'pipeline',
                                     [('input1', Int64TensorType([1, 1])),
                                      ('input2', FloatTensorType([1, 1]))])
        self.assertTrue(len(model_onnx.graph.node[-1].output) == 1)
        self.assertTrue(model_onnx is not None)
        data = numpy.array(data)
        data = {'input1': data[:, 0].astype(numpy.int64), 
                'input2': data[:, 1].astype(numpy.float32)}
        dump_data_and_model(data, PipeConcatenateInput(model), model_onnx,
                            basename="SklearnPipelineScalerMixed-OneOff")
github onnx / onnxmltools / tests / sklearn / test_SklearnNaiveBayesConverter.py View on Github external
def test_model_multinomial_nb_binary_classification(self):
        model, X = self._fit_model_binary_classification(MultinomialNB())
        model_onnx = convert_sklearn(model, 'multinomial naive bayes', [('input', FloatTensorType([1, 4]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(X, model, model_onnx, basename="SklearnBinMultinomialNB-OneOff",
                            allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.3')")
github onnx / onnxmltools / tests / sklearn / test_SklearnSVMConverters.py View on Github external
def test_registration_convert_svc_model(self):
        model, X = self._fit_binary_classification(SVC(kernel='linear', probability=True))
        model_onnx = onnxmltools.convert_sklearn(model, 'SVC', [('input', FloatTensorType([1, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(X, model, model_onnx, basename="SklearnBinNuSVCPT")
github onnx / onnxmltools / tests / sklearn / test_SklearnNaiveBayesConverter.py View on Github external
def test_model_multinomial_nb_multiclass(self):
        model, X = self._fit_model_multiclass_classification(MultinomialNB())
        model_onnx = convert_sklearn(model, 'multinomial naive bayes', [('input', FloatTensorType([1, 4]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(X, model, model_onnx, basename="SklearnMclMultinomialNB-OneOff",
                            allow_failure="StrictVersion(onnxruntime.__version__) <= StrictVersion('0.1.3')")
github onnx / onnxmltools / tests / sklearn / test_SklearnGLMRegressorConverter.py View on Github external
def test_model_knn_regressor(self):
        model, X = self._fit_model(KNeighborsRegressor(n_neighbors=2))
        model_onnx = convert_sklearn(model, 'KNN regressor', [('input', FloatTensorType([1, 4]))])
        self.assertIsNotNone(model_onnx)
        # dump_data_and_model(X.astype(numpy.float32), model, model_onnx, basename="KNeighborsRegressor")