How to use the coremltools.models function in coremltools

To help you get started, we’ve selected a few coremltools examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github apache / incubator-tvm / tests / python / frontend / coreml / test_forward.py View on Github external
a_np1 = np.random.uniform(size=input_dim).astype(dtype)
    a_np2 = np.random.uniform(size=input_dim).astype(dtype)
    a_np3 = np.random.uniform(size=input_dim).astype(dtype)

    b_np = np.min((a_np1, a_np2, a_np3), axis=0)

    inputs = [('input1', datatypes.Array(*input_dim)),
              ('input2', datatypes.Array(*input_dim)),
              ('input3', datatypes.Array(*input_dim))]
    output = [('output', datatypes.Array(*b_np.shape))]
    builder = NeuralNetworkBuilder(inputs, output)
    builder.add_elementwise(name='Min',
                            input_names=['input1', 'input2', 'input3'],
                            output_name='output',
                            mode='MIN')
    model = cm.models.MLModel(builder.spec)
    for target, ctx in ctx_list():
        out = run_tvm_graph(model, target, ctx, [a_np1, a_np2, a_np3],
                            ['input1', 'input2', 'input3'], b_np.shape, dtype)
        tvm.testing.assert_allclose(out, b_np, rtol=1e-5)
github ANRGUSC / Jupiter / app_specific_files / demotest_backup_circe / scripts / utils / onnx2coreml.py View on Github external
pipeline.spec.description.output[1].ParseFromString(nms_model._spec.description.output[1].SerializeToString())

        # Update metadata
        pipeline.spec.description.metadata.versionString = 'yolov3-tiny.pt imported from PyTorch'
        pipeline.spec.description.metadata.shortDescription = 'https://github.com/ultralytics/yolov3'
        pipeline.spec.description.metadata.author = 'glenn.jocher@ultralytics.com'
        pipeline.spec.description.metadata.license = 'https://github.com/ultralytics/yolov3'

        user_defined_metadata = {'classes': ','.join(labels),
                                 'iou_threshold': str(nms.iouThreshold),
                                 'confidence_threshold': str(nms.confidenceThreshold)}
        pipeline.spec.description.metadata.userDefined.update(user_defined_metadata)

        # Save the model
        pipeline.spec.specificationVersion = 3
        final_model = coremltools.models.MLModel(pipeline.spec)
        final_model.save((name + '_pipelined.mlmodel'))
github ANRGUSC / Jupiter / app_specific_files / demotest_backup_circe / scripts / utils / onnx2coreml.py View on Github external
nms = nms_spec.nonMaximumSuppression
        nms.confidenceInputFeatureName = name_out0  # 1x507x80
        nms.coordinatesInputFeatureName = name_out1  # 1x507x4
        nms.confidenceOutputFeatureName = 'confidence'
        nms.coordinatesOutputFeatureName = 'coordinates'
        nms.iouThresholdInputFeatureName = 'iouThreshold'
        nms.confidenceThresholdInputFeatureName = 'confidenceThreshold'

        nms.iouThreshold = 0.4
        nms.confidenceThreshold = 0.5
        nms.pickTop.perClass = True

        labels = np.loadtxt('../yolov3/data/coco.names', dtype=str, delimiter='\n')
        nms.stringClassLabels.vector.extend(labels)

        nms_model = coremltools.models.MLModel(nms_spec)
        nms_model.save(name + '_nms.mlmodel')

        # out_nms = nms_model.predict({
        #     '143': out['143'].squeeze().reshape((80, 507)),
        #     '144': out['144'].squeeze().reshape((4, 507))
        # })
        # print(out_nms['confidence'].shape, out_nms['coordinates'].shape)

        # # # 3.5 Add Softmax model
        # from coremltools.models import datatypes
        # from coremltools.models import neural_network
        #
        # input_features = [
        #     ("141", datatypes.Array(num_anchors, num_classes, 1)),
        #     ("143", datatypes.Array(num_anchors, 4, 1))
        # ]
github dmlc / nnvm / python / nnvm / frontend / coreml.py View on Github external
coremltools.models.MLModel of a NeuralNetworkClassifier

    Returns
    -------
    sym : nnvm.Symbol
        Compatible nnvm symbol

    params : dict of str to tvm.NDArray
        The parameter dict to be used by nnvm
    """
    try:
        import coremltools as cm
    except ImportError:
        raise ImportError('The coremltools package must be installed')

    assert isinstance(model, cm.models.MLModel)
    spec = model.get_spec()
    modeltype = spec.WhichOneof('Type')
    assert modeltype in ['neuralNetworkClassifier', 'neuralNetwork', 'neuralNetworkRegressor']
    cc = getattr(spec, modeltype)

    symtab = SymbolTable()
    for i in spec.description.input:
        symtab.get_var(i.name, must_contain=False)

    for pp in cc.preprocessing:
        whichpp = pp.WhichOneof('preprocessor')
        ppmethod = getattr(pp, whichpp)
        # the NeuralNetworkImageScalar doesn't seem to have a featureName?
        if whichpp == 'scaler':
            for i in spec.description.input:
                coreml_op_to_nnvm(ppmethod, i.name, i.name, symtab)
github cloud-annotations / training / trainer / src / convert / build_decoder.py View on Github external
end_index=2)

    builder.add_elementwise(
        name="concat",
        input_names=["slice_x_output", "slice_y_output", "slice_w_output", "slice_h_output"],
        output_name="concat_output",
        mode="CONCAT")

    # (4, num_anchors, 1) --> (1, num_anchors, 4)
    builder.add_permute(
        name="permute_output",
        dim=(0, 3, 2, 1),
        input_name="concat_output",
        output_name="raw_coordinates")

    return coremltools.models.MLModel(builder.spec)
github apple / turicreate / src / python / turicreate / toolkits / activity_classifier / _activity_classifier.py View on Github external
prob_name = self.target + 'Probability'
        label_name = self.target

        input_features = [
            ('features', _cmt.models.datatypes.Array(*(1, self.prediction_window, self.num_features)))
        ]
        output_features = [
            (prob_name, _cmt.models.datatypes.Array(*(self.num_classes,)))
        ]

        model_params = self._pred_model.get_params()
        weights = {k: v.asnumpy() for k, v in model_params[0].items()}
        weights = _mx.rnn.LSTMCell(num_hidden=_net_params['lstm_h']).unpack_weights(weights)
        moving_weights = {k: v.asnumpy() for k, v in model_params[1].items()}

        builder = _cmt.models.neural_network.NeuralNetworkBuilder(
            input_features,
            output_features,
            mode='classifier'
        )

        # Conv
        # (1,1,W,C) -> (1,C,1,W)
        builder.add_permute(name='permute_layer', dim=(0, 3, 1, 2),
                            input_name='features', output_name='conv_in')
        W = _np.expand_dims(weights['conv_weight'], axis=0).transpose((2, 3, 1, 0))
        builder.add_convolution(name='conv_layer',
                                kernel_channels=self.num_features,
                                output_channels=_net_params['conv_h'],
                                height=1, width=self.prediction_window,
                                stride_height=1, stride_width=self.prediction_window,
                                border_mode='valid', groups=1,
github apple / turicreate / src / unity / python / turicreate / toolkits / image_classifier / image_classifier.py View on Github external
del nn_spec.int64ClassLabels.vector[:]
                for c in class_labels:
                    nn_spec.int64ClassLabels.vector.append(c)
            else:
                nn_spec.ClearField('stringClassLabels')
                probOutput.type.dictionaryType.stringKeyType.MergeFromString(b'')
                classLabel.type.stringType.MergeFromString(b'')
                del nn_spec.stringClassLabels.vector[:]
                for c in class_labels:
                    nn_spec.stringClassLabels.vector.append(c)

            prob_name = self.target + 'Probability'
            label_name = self.target
            old_output_name = nn_spec.layers[-1].name
            coremltools.models.utils.rename_feature(spec, 'classLabel', label_name)
            coremltools.models.utils.rename_feature(spec, old_output_name, prob_name)
            if nn_spec.layers[-1].name == old_output_name:
                nn_spec.layers[-1].name = prob_name
            if nn_spec.labelProbabilityLayerName == old_output_name:
                nn_spec.labelProbabilityLayerName = prob_name
            coremltools.models.utils.rename_feature(spec, 'data', self.feature)
            if len(nn_spec.preprocessing) > 0:
                nn_spec.preprocessing[0].featureName = self.feature

            mlmodel = coremltools.models.MLModel(spec)
            model_type = 'image classifier (%s)' % self.model
            mlmodel.short_description = _coreml_utils._mlmodel_short_description(model_type)
            mlmodel.input_description[self.feature] = u'Input image'
            mlmodel.output_description[prob_name] = 'Prediction probabilities'
            mlmodel.output_description[label_name] = 'Class label of top prediction'
            _coreml_utils._set_model_metadata(mlmodel, self.__class__.__name__, {
                'model': self.model,
github hollance / coreml-survival-guide / NeuralNetworkBuilder / convert_to_coreml.py View on Github external
input_name="ip1_output",
                          output_name="ip2_output")

builder.add_softmax(name="softmax",
                    input_name="ip2_output",
                    output_name="labelProbs")


import caffe_pb2
mean_image = caffe_pb2.BlobProto()
mean_image.ParseFromString(open("mean.binaryproto", "rb").read())
mean_image = np.array(mean_image.data)
builder.spec.neuralNetworkClassifier.preprocessing[0].meanImage.meanImage.extend(mean_image)


mlmodel = coremltools.models.MLModel(builder.spec)

mlmodel.short_description = "cifar10_quick"
mlmodel.author = "https://github.com/BVLC/caffe/tree/master/examples/cifar10"
mlmodel.license = "https://github.com/BVLC/caffe/blob/master/LICENSE"

mlmodel.input_description["image"] = "The input image"
mlmodel.output_description["labelProbs"] = "The predicted probabilities"
mlmodel.output_description["label"] = "The class with the highest score"

mlmodel.save("CIFAR10.mlmodel")


import PIL
img = PIL.Image.open("boat.jpg")
img = img.resize((32, 32), PIL.Image.BILINEAR)