How to use the mleap.sklearn.preprocessing.data.MinMaxScaler function in mleap

To help you get started, we’ve selected a few mleap examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github combust / mleap / python / mleap / sklearn / preprocessing / tests.py View on Github external
extract_features = ['a']
        feature_extractor = FeatureExtractor(input_scalars=['a'],
                                             output_vector='extracted_a_output',
                                             output_vector_items=["{}_out".format(x) for x in extract_features])

        scaler = MinMaxScaler()
        scaler.mlinit(prior_tf=feature_extractor,
                      output_features='a_scaled')

        scaler.fit(self.df[['a']])

        scaler.serialize_to_bundle(self.tmp_dir, scaler.name)

        # Deserialize the MinMaxScaler
        node_name = "{}.node".format(scaler.name)
        min_max_scaler_tf = MinMaxScaler()
        min_max_scaler_tf.deserialize_from_bundle(self.tmp_dir, node_name)

        # Transform some sample data
        res_a = scaler.transform(self.df[['a']])
        res_b = min_max_scaler_tf.transform(self.df[['a']])

        self.assertEqual(res_a[0], res_b[0])

        self.assertEqual(scaler.name, min_max_scaler_tf.name)
        self.assertEqual(scaler.op, min_max_scaler_tf.op)
github combust / mleap / python / mleap / sklearn / preprocessing / tests.py View on Github external
extract_features = ['a', 'b']
        feature_extractor = FeatureExtractor(input_scalars=['a', 'b'],
                                             output_vector='extracted_multi_outputs',
                                             output_vector_items=["{}_out".format(x) for x in extract_features])

        scaler = MinMaxScaler()
        scaler.mlinit(prior_tf=feature_extractor,
                      output_features=['a_scaled', 'b_scaled'])

        scaler.fit(self.df[['a']])

        scaler.serialize_to_bundle(self.tmp_dir, scaler.name)

        # Deserialize the MinMaxScaler
        node_name = "{}.node".format(scaler.name)
        min_max_scaler_tf = MinMaxScaler()
        min_max_scaler_tf.deserialize_from_bundle(self.tmp_dir, node_name)

        # Transform some sample data
        res_a = scaler.transform(self.df[['a', 'b']])
        res_b = min_max_scaler_tf.transform(self.df[['a', 'b']])

        self.assertEqual(res_a[0][0], res_b[0][0])
        self.assertEqual(res_a[0][1], res_b[0][1])

        self.assertEqual(scaler.name, min_max_scaler_tf.name)
        self.assertEqual(scaler.op, min_max_scaler_tf.op)
github combust / mleap / python / mleap / sklearn / preprocessing / tests.py View on Github external
def test_min_max_scaler_deserializer(self):

        extract_features = ['a']
        feature_extractor = FeatureExtractor(input_scalars=['a'],
                                             output_vector='extracted_a_output',
                                             output_vector_items=["{}_out".format(x) for x in extract_features])

        scaler = MinMaxScaler()
        scaler.mlinit(prior_tf=feature_extractor,
                      output_features='a_scaled')

        scaler.fit(self.df[['a']])

        scaler.serialize_to_bundle(self.tmp_dir, scaler.name)

        # Deserialize the MinMaxScaler
        node_name = "{}.node".format(scaler.name)
        min_max_scaler_tf = MinMaxScaler()
        min_max_scaler_tf.deserialize_from_bundle(self.tmp_dir, node_name)

        # Transform some sample data
        res_a = scaler.transform(self.df[['a']])
        res_b = min_max_scaler_tf.transform(self.df[['a']])
github combust / mleap / python / mleap / sklearn / preprocessing / tests.py View on Github external
def test_min_max_scaler_serializer(self):

        extract_features = ['a']
        feature_extractor = FeatureExtractor(input_scalars=['a'],
                                         output_vector='extracted_a_output',
                                         output_vector_items=["{}_out".format(x) for x in extract_features])

        scaler = MinMaxScaler()
        scaler.mlinit(prior_tf = feature_extractor,
                      output_features='a_scaled')

        scaler.fit(self.df[['a']])

        scaler.serialize_to_bundle(self.tmp_dir, scaler.name)

        expected_min = self.df.a.min()
        expected_max = self.df.a.max()

        expected_model = {
           "op": "min_max_scaler",
            "attributes": {
                "min": {
                    "double": [expected_min],
                    "shape": {
github combust / mleap / python / mleap / sklearn / preprocessing / tests.py View on Github external
def test_min_max_scaler_multi_deserializer(self):

        extract_features = ['a', 'b']
        feature_extractor = FeatureExtractor(input_scalars=['a', 'b'],
                                             output_vector='extracted_multi_outputs',
                                             output_vector_items=["{}_out".format(x) for x in extract_features])

        scaler = MinMaxScaler()
        scaler.mlinit(prior_tf=feature_extractor,
                      output_features=['a_scaled', 'b_scaled'])

        scaler.fit(self.df[['a']])

        scaler.serialize_to_bundle(self.tmp_dir, scaler.name)

        # Deserialize the MinMaxScaler
        node_name = "{}.node".format(scaler.name)
        min_max_scaler_tf = MinMaxScaler()
        min_max_scaler_tf.deserialize_from_bundle(self.tmp_dir, node_name)

        # Transform some sample data
        res_a = scaler.transform(self.df[['a', 'b']])
        res_b = min_max_scaler_tf.transform(self.df[['a', 'b']])