How to use the coremltools.proto.Model_pb2.Model function in coremltools

To help you get started, we’ve selected a few coremltools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ANRGUSC / Jupiter / app_specific_files / demotest_backup_circe / scripts / utils / onnx2coreml.py View on Github external
spec.neuralNetwork.preprocessing[0].featureName = '0'

        yolov3_model.save(name + '.mlmodel')
        # yolov3_model.visualize_spec()
        print(spec.description)

        # 2.5. Try to Predict:
        from PIL import Image
        img = Image.open('../yolov3/data/samples/zidane_416.jpg')
        out = yolov3_model.predict({'0': img}, useCPUOnly=True)
        print(out[name_out0].shape, out[name_out1].shape)

        # 3. Create NMS protobuf
        import numpy as np

        nms_spec = coremltools.proto.Model_pb2.Model()
        nms_spec.specificationVersion = 3

        for i in range(2):
            decoder_output = yolov3_model._spec.description.output[i].SerializeToString()

            nms_spec.description.input.add()
            nms_spec.description.input[i].ParseFromString(decoder_output)

            nms_spec.description.output.add()
            nms_spec.description.output[i].ParseFromString(decoder_output)

        nms_spec.description.output[0].name = 'confidence'
        nms_spec.description.output[1].name = 'coordinates'

        output_sizes = [num_classes, 4]
        for i in range(2):
github yulingtianxia / AudioEmotion / Source / Turicreate_Fix / sound_classifier / sound_classifier.py View on Github external
output_name=output_name)
                elif type(cur_layer) == _mx.gluon.nn.basic_layers.Dropout:
                    continue
                input_name = output_name

            last_output = builder.spec.neuralNetworkClassifier.layers[-1].output[0]
            builder.add_softmax('softmax', last_output, self.target)

            builder.set_class_labels(self.classes)
            builder.set_input([input_name], [(input_length,)])
            builder.set_output([self.target], [(self.num_classes,)])

            return builder.spec


        top_level_spec = coremltools.proto.Model_pb2.Model()
        top_level_spec.specificationVersion = 3

        # Set input
        desc = top_level_spec.description
        input = desc.input.add()
        input.name = self.feature
        input.type.multiArrayType.dataType = ArrayFeatureType.ArrayDataType.Value('FLOAT32')
        input.type.multiArrayType.shape.append(15600)

        # Set outputs
        prob_output = desc.output.add()
        prob_output.name = prob_name
        label_output = desc.output.add()
        label_output.name = 'classLabel'
        desc.predictedFeatureName = 'classLabel'
        desc.predictedProbabilitiesName = prob_name
github apple / coremltools / coremltools / models / tree_ensemble.py View on Github external
def __init__(self):
        """
        High level Python API to build a tree ensemble model for Core ML.
        """
        # Set inputs and outputs
        spec = _Model_pb2.Model()
        spec.specificationVersion = SPECIFICATION_VERSION

        # Save the spec in the protobuf
        self.spec = spec
github apple / coremltools / coremltools / models / pipeline.py View on Github external
predictedFeatureName

        output_features: [list]
            A string or a list of two strings specifying the names of the two 
            output features, the first being a class label corresponding 
            to the class with the highest predicted score, and the second being 
            a dictionary mapping each class to its score. If `output_features` 
            is a string, it specifies the predicted class label and the class 
            scores is set to the default value of `"classProbability."` 
 
        """

        output_features = _feature_management.process_or_validate_classifier_output_features(
                output_features, class_labels)

        spec = _Model_pb2.Model()
        spec.specificationVersion = SPECIFICATION_VERSION
        spec = set_classifier_interface_params(spec, input_features,
                class_labels, 'pipelineClassifier', output_features, training_features)

        # Access this to declare it as a pipeline
        spec.pipelineClassifier

        # Save as a member variable
        self.spec = spec
github apple / coremltools / coremltools / models / utils.py View on Github external
-------
    model_spec: Model_pb
        Protobuf representation of the model

    Examples
    --------
    .. sourcecode:: python

        >>> spec = coremltools.utils.load_spec('HousePricer.mlmodel')

    See Also
    --------
    save_spec
    """
    from ..proto import Model_pb2
    spec = Model_pb2.Model()

    with open(filename, 'rb') as f:
        contents = f.read()
        spec.ParseFromString(contents)
        return spec
github apple / coremltools / coremltools / converters / sklearn / _one_hot_encoder.py View on Github external
if idx in _categorical_features:

            # This input column is one hot encoded
            feature_extractor_spec = create_array_feature_extractor(
                input_features, f_name, idx, output_type = 'Int64')

            pline.add_model(feature_extractor_spec)

            _cat_feature_idx = _cat_feature_idx_mapping[idx]

            ohe_input_features = [(f_name, datatypes.Int64())]
            ohe_output_features = [(f_name, datatypes.Dictionary('Int64'))]

            # Create a one hot encoder per column
            o_spec = _Model_pb2.Model()
            o_spec.specificationVersion = SPECIFICATION_VERSION
            o_spec = set_transform_interface_params(o_spec, ohe_input_features, ohe_output_features)

            ohe_spec = o_spec.oneHotEncoder
            ohe_spec.outputSparse = True

            if model.handle_unknown == 'error':
                ohe_spec.handleUnknown = _OHE_pb2.OneHotEncoder.HandleUnknown.Value('ErrorOnUnknown')
            else:
                ohe_spec.handleUnknown = _OHE_pb2.OneHotEncoder.HandleUnknown.Value('IgnoreUnknown')
            
            # Need to do a quick search to find the part of the active_features_ mask 
            # that represents the categorical variables in our part.  Could do this 
            # with binary search, but we probably don't need speed so much here.
            def bs_find(a, i):
                lb, k = 0, len(a)
github apple / coremltools / coremltools / models / model.py View on Github external
def __init__(self, model, useInputAndOutputShapes=True):

        from ..libcoremlpython import _NeuralNetworkShaperProxy

        path = ''
        if isinstance(model, str):
            self._spec = _load_spec(model)
            path = model
        elif isinstance(model, _Model_pb2.Model):
            self._spec = model
            filename = _tempfile.mktemp(suffix='.mlmodel')
            _save_spec(model, filename)
            path = filename
        else:
            raise TypeError("Expected argument to be a path to a .mlmodel file or a Model_pb2.Model object")

        self._shaper = _NeuralNetworkShaperProxy(path, useInputAndOutputShapes)
github apple / coremltools / coremltools / models / feature_vectorizer.py View on Github external
Feature indices in the final array are counted sequentially from the 
        from 0 through the total number of features. 


    output_feature_name: str
        The name of the output feature.  The type is an Array 
        List of output feature of the network. 

    known_size_map: 
        A dictionary mapping the feature name to the expanded size in the final 
        array.  This is most useful for specifying the size of sparse vectors 
        given as dictionaries of index to value.

    """

    spec = _Model_pb2.Model()
    spec.specificationVersion = SPECIFICATION_VERSION

    input_features = process_or_validate_features(input_features)

    feature_vectorizer = spec.featureVectorizer

    num_output_dimensions = 0
    
    for n, ft in input_features:
        if n in known_size_map:
            dim = known_size_map[n]

            if ft.num_elements is not None:
                if dim != ft.num_elements:
                    raise ValueError(("In feature %s, override size (%d) not "
                            "compatible with inherent value size (%d).") 
github apple / coremltools / coremltools / models / nearest_neighbors / builder.py View on Github external
def __init__(self, input_name, output_name, number_of_dimensions, default_class_label, **kwargs):
        """
        Create a KNearestNeighborsClassifierBuilder object.
        :param input_name: Name of the model input
        :param output_name: Name of the output
        :param number_of_dimensions: Number of dimensions of the input data
        :param default_class_label: The default class label to use for predictions. Must be either an int64 or a string.
        :param number_of_neighbors: Number of neighbors to use for predictions. Default = 5 with allowed values between 1-1000.
        :param weighting_scheme: Weight function used in prediction. One of 'uniform' (default) or 'inverse_distance'
        :param index_type: Algorithm to compute nearest neighbors. One of 'linear' (default), or 'kd_tree'.
        :param leaf_size: Leaf size for the kd-tree. Ignored if index type is 'linear'. Default = 30.
        """
        super(KNearestNeighborsClassifierBuilder, self).__init__()

        self.spec = coremltools.proto.Model_pb2.Model()
        self.spec.specificationVersion = coremltools._MINIMUM_NEAREST_NEIGHBORS_SPEC_VERSION

        # the model is initially empty - assume it's updatable
        self.is_updatable = True

        if number_of_dimensions <= 0:
            raise ValueError('number_of_dimensions must be >= 0')
        self.spec.kNearestNeighborsClassifier.nearestNeighborsIndex.numberOfDimensions = number_of_dimensions

        input_type = kwargs.get(self._PARAMETER_KEY_INPUT_TYPE, self._PARAMETER_DEFAULT_INPUT_TYPE)
        input_feature_type = FeatureTypes_pb2.ArrayFeatureType.FLOAT32
        if input_type == datatypes.Double:
            input_feature_type = FeatureTypes_pb2.ArrayFeatureType.DOUBLE

        input_feature = self.spec.description.input.add()
        input_feature.name = input_name